lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/language.rs
Xetera/ginkou-api
48e8cea435b477d9365dc736407159f00321b1b0
use std::fs::File; use std::io; use std::io::Write; use std::path::{Path, PathBuf}; use std::string::FromUtf8Error; extern crate dirs; #[macro_use] use rusqlite::params; use rusqlite::Connection; use structopt; use structopt::StructOpt; use mecab; use mecab::Tagger; const DAKUTEN_BYTES: [u8; 3] = [227, 128, 130]; const SQL_ADD_SENTENCE: &'static str = include_str!("sql/add_sentence.sql"); const SQL_ADD_WORD_JUNCTION: &'static str = include_str!("sql/add_word_junction.sql"); const SQL_ADD_WORD: &'static str = include_str!("sql/add_word.sql"); const SQL_ALL_WORD_SENTENCES: &'static str = include_str!("sql/all_word_sentences.sql"); const SQL_BEST_WORD_SENTENCES: &'static str = include_str!("sql/best_word_sentences.sql"); const SQL_SETUP: &'static str = include_str!("sql/setup.sql"); #[derive(Debug)] enum SentenceError { Utf8(FromUtf8Error), IO(io::Error), } impl From<FromUtf8Error> for SentenceError { fn from(err: FromUtf8Error) -> Self { SentenceError::Utf8(err) } } impl From<io::Error> for SentenceError { fn from(err: io::Error) -> Self { SentenceError::IO(err) } } struct Sentences<R> { bytes: io::Bytes<R>, done: bool, } impl<B: io::BufRead> Iterator for Sentences<B> { type Item = Result<String, SentenceError>; fn next(&mut self) -> Option<Self::Item> { if self.done { return None; } let mut buf = Vec::new(); let mut match_index = 0; while match_index < 3 { let byte = match self.bytes.next() { None => break, Some(Err(e)) => return Some(Err(e.into())), Some(Ok(b)) => b, }; buf.push(byte); if byte == DAKUTEN_BYTES[match_index] { match_index += 1; } else { match_index = 0; } } if buf.len() == 0 { self.done = true; return None; } let next = String::from_utf8(buf).map_err(SentenceError::from); Some(next.map(|x| x.replace(|x: char| x.is_whitespace(), ""))) } } fn sentences<R: io::BufRead>(reader: R) -> Sentences<R> { Sentences { bytes: reader.bytes(), done: false, } } fn create_tables(conn: &Connection) -> rusqlite::Result<()> { conn.execute_batch(SQL_SETUP)} pub fn conn_from_disk<P: AsRef<Path>>(path: P) -> rusqlite::Result<Connection> { let existed = path.as_ref().exists(); let conn = Connection::open(path)?; if !existed { create_tables(&conn)?; } Ok(conn) } fn conn_from_memory() -> rusqlite::Result<Connection> { let conn = Connection::open_in_memory()?; create_tables(&conn)?; Ok(conn) } fn add_sentence(conn: &Connection, sentence: &str) -> rusqlite::Result<u32> { conn.execute(SQL_ADD_SENTENCE, params![sentence])?; Ok(conn.last_insert_rowid() as u32) } fn add_word(conn: &Connection, word: &str, sentence_id: u32) -> rusqlite::Result<()> { conn.execute(SQL_ADD_WORD, params![word])?; conn.execute(SQL_ADD_WORD_JUNCTION, params![word, sentence_id])?; Ok(()) } pub fn matching_word(conn: &Connection, word: &str, all: bool) -> rusqlite::Result<Vec<String>> { let query = if all { SQL_ALL_WORD_SENTENCES } else { SQL_BEST_WORD_SENTENCES }; let mut stmt = conn.prepare_cached(query)?; let mut buffer = Vec::new(); let results = stmt.query_map(params![word], |row| row.get(0))?; for r in results { let s: String = r?; buffer.push(s); } Ok(buffer) } fn print_matching_words(conn: &Connection, word: &str, all: bool) -> rusqlite::Result<()> { let query = if all { SQL_ALL_WORD_SENTENCES } else { SQL_BEST_WORD_SENTENCES }; let mut stmt = conn.prepare_cached(query)?; let results = stmt.query_map(params![word], |row| row.get(0))?; for r in results { let r: String = r?; if let Err(e) = write!(io::stdout(), "{}\n", r) { if e.kind() != io::ErrorKind::BrokenPipe { panic!(e); } } } Ok(()) } fn consume_trimmed(conn: &Connection, trimmed: &str) -> rusqlite::Result<()> { let sentence_id = add_sentence(conn, trimmed)?; let mut tagger = Tagger::new(""); tagger.parse_nbest_init(trimmed); let mecab_out = tagger.next().unwrap(); for l in mecab_out.lines() { if l == "EOS" { break; } let tab_index = l.find('\t').unwrap(); let (_, rest) = l.split_at(tab_index); let rest = &rest[1..]; let root = rest.split(',').skip(6).next().unwrap(); add_word(conn, root, sentence_id)?; } Ok(()) } fn consume_sentences<R: io::BufRead>(conn: &Connection, reader: R) -> rusqlite::Result<()> { let mut i = 0; for sentence in sentences(reader) { i += 1; if sentence.is_err() { println!("Err on #{}: {:?}", i, sentence); continue; }; let sentence = sentence.unwrap(); println!("#{}: {}", i, sentence); consume_trimmed(conn, &sentence)?; } Ok(()) } #[derive(Debug, StructOpt)] #[structopt(name = "ginkou", about = "Japanese sentence bank")] enum Ginkou { #[structopt(name = "add")] Add { #[structopt(long, short = "f", parse(from_os_str))] file: Option<PathBuf>, #[structopt(long = "database", short = "d", parse(from_os_str))] db: Option<PathBuf>, }, #[structopt(name = "get")] Get { word: String, #[structopt(long = "allwords", short = "a")] all: bool, #[structopt(long = "database", short = "d", parse(from_os_str))] db: Option<PathBuf>, }, } fn default_db_path() -> PathBuf { if let Some(mut pb) = dirs::home_dir() { pb.push(".ginkoudb"); pb } else { PathBuf::from(".ginkoudb") } } fn main() { } #[cfg(test)] mod tests { use super::*; #[test] fn sentences_works_correctly() { let string = "A。\n B。\n\n XXC。"; let mut iter = sentences(std::io::BufReader::new(string.as_bytes())); let a = iter.next(); assert_eq!(String::from("A。"), a.unwrap().unwrap()); let b = iter.next(); assert_eq!(String::from("B。"), b.unwrap().unwrap()); let c = iter.next(); assert_eq!(String::from("XXC。"), c.unwrap().unwrap()); } #[test] fn bank_lookup_works_correctly() -> rusqlite::Result<()> { let conn = conn_from_memory()?; let sentence1 = String::from("A B"); let sentence2 = String::from("A B C"); let s1 = add_sentence(&conn, &sentence1)?; add_word(&conn, "A", s1)?; add_word(&conn, "B", s1)?; let s2 = add_sentence(&conn, &sentence2)?; add_word(&conn, "A", s2)?; add_word(&conn, "B", s2)?; add_word(&conn, "C", s2)?; let a_sentences = vec![sentence1.clone(), sentence2.clone()]; assert_eq!(Ok(a_sentences), matching_word(&conn, "A")); let c_sentences = vec![sentence2.clone()]; assert_eq!(Ok(c_sentences), matching_word(&conn, "C")); Ok(()) } #[test] fn sentences_can_be_consumed() -> rusqlite::Result<()> { let conn = conn_from_memory()?; let sentence1 = "猫を見た"; let sentence2 = "犬を見る"; consume_trimmed(&conn, sentence1)?; consume_trimmed(&conn, sentence2)?; let a_sentences = vec![sentence1.into(), sentence2.into()]; assert_eq!(Ok(a_sentences), matching_word(&conn, "見る")); let b_sentences = vec![sentence2.into()]; assert_eq!(Ok(b_sentences), matching_word(&conn, "犬")); let c_sentences = vec![sentence1.into()]; assert_eq!(Ok(c_sentences), matching_word(&conn, "猫")); Ok(()) } }
use std::fs::File; use std::io; use std::io::Write; use std::path::{Path, PathBuf}; use std::string::FromUtf8Error; extern crate dirs; #[macro_use] use rusqlite::params; use rusqlite::Connection; use structopt; use structopt::StructOpt; use mecab; use mecab::Tagger; const DAKUTEN_BYTES: [u8; 3] = [227, 128, 130]; const SQL_ADD_SENTENCE: &'static str = include_str!("sql/add_sentence.sql"); const SQL_ADD_WORD_JUNCTION: &'static str = include_str!("sql/add_word_junction.sql"); const SQL_ADD_WORD: &'static str = include_str!("sql/add_word.sql"); const SQL_ALL_WORD_SENTENCES: &'static str = include_str!("sql/all_word_sentences.sql"); const SQL_BEST_WORD_SENTENCES: &'static str = include_str!("sql/best_word_sentences.sql"); const SQL_SETUP: &'static str = include_str!("sql/setup.sql"); #[derive(Debug)] enum SentenceError { Utf8(FromUtf8Error), IO(io::Error), } impl From<FromUtf8Error> for SentenceError { fn from(err: FromUtf8Error) -> Self { SentenceError::Utf8(err) } } impl From<io::Error> for SentenceError { fn from(err: io::Error) -> Self { SentenceError::IO(err) } } struct Sentences<R> { bytes: io::Bytes<R>, done: bool, } impl<B: io::BufRead> Iterator for Sentences<B> { type Item = Result<String, SentenceError>; fn next(&mut self) -> Option<Self::Item> { if self.done { return None; } let mut buf = Vec::new(); let mut match_index = 0; while match_index < 3 { let byte = match self.bytes.next() { None => break, Some(Err(e)) => return Some(Err(e.into())), Some(Ok(b)) => b, }; buf.push(byte); if byte == DAKUTEN_BYTES[match_index] { match_index += 1; } else { match_index = 0; } } if buf.len() == 0 { self.done = true; return None; } let next = String::from_utf8(buf).map_err(SentenceError::from); Some(next.map(|x| x.replace(|x: char| x.is_whitespace(), ""))) } } fn sentences<R: io::BufRead>(reader: R) -> Sentences<R> { Sentences { bytes: reader.bytes(), done: false, } } fn create_tables(conn: &Connection) -> rusqlite::Result<()> { conn.execute_batch(SQL_SETUP)} pub fn conn_from_disk<P: AsRef<Path>>(path: P) -> rusqlite::Result<Connection> { let existed = path.as_ref().exists(); let conn = Connection::open(path)?; if !existed { create_tables(&conn)?; } Ok(conn) } fn conn_from_memory() -> rusqlite::Result<Connection> { let conn = Connection::open_in_memory()?; create_tables(&conn)?; Ok(conn) } fn add_sentence(conn: &Connection, sentence: &str) -> rusqlite::Result<u32> { conn.execute(SQL_ADD_SENTENCE, params![sentence])?; Ok(conn.last_insert_rowid() as u32) } fn add_word(conn: &Connection, word: &str, sentence_id: u32) -> rusqlite::Result<()> { conn.execute(SQL_ADD_WORD, params![word])?; conn.execute(SQL_ADD_WORD_JUNCTION, params![word, sentence_id])?; Ok(()) } pub fn matching_word(conn: &Connection, word: &str, all: bool) -> rusqlite::Result<Vec<String>> { let query = if all { SQL_ALL_WORD_SENTENCES } else { SQL_BEST_WORD_SENTENCES }; let mut stmt = conn.prepare_cached(query)?; let mut buffer = Vec::new(); let results = stmt.query_map(params![word], |row| row.get(0))?; for r in results { let s: String = r?; buffer.push(s); } Ok(buffer) } fn print_matching_words(conn: &Connection, word: &str, all: bool) -> rusqlite::Result<()> { let query = if all { SQL_ALL_WORD_SENTENCES } else { SQL_BEST_WORD_SENTENCES }; let mut stmt = conn.prepare_cached(query)?; let results = stmt.query_map(params![word], |row| row.get(0))?; for r in results { let r: String = r?; if let Err(e) = write!(io::stdout(), "{}\n", r) { if e.kind() != io::ErrorKind::BrokenPipe { panic!(e); } } } Ok(()) } fn consume_trimmed(conn: &Connection, trimmed: &str) -> rusqlite::Result<()> { let sentence_id = add_sentence(conn, trimmed)?; let mut tagger = Tagger::new(""); tagger.parse_nbest_init(trimmed); let mecab_out = tagger.next().unwrap(); for l in mecab_out.lines() { if l == "EOS" { break; } let tab_index = l.find('\t').unwrap(); let (_, rest) = l.split_at(tab_index); let rest = &rest[1..]; let root = rest.split(',').skip(6).next().unwrap(); add_word(conn, root, sentence_id)?; } Ok(()) } fn consume_sentences<R: io::BufRead>(conn: &Connection, reader: R) -> rusqlite::Result<()> { let mut i = 0; for sentence in sentences(reader) { i += 1; if sentence.is_err() { println!("Err on #{}: {:?}", i, sentence); continue; }; let sentence = sentence.unwrap(); println!("#{}: {}", i, sentence); consume_trimmed(conn, &sentence)?; } Ok(()) } #[derive(Debug, StructOpt)] #[structopt(name = "ginkou", about = "Japanese sentence bank")] enum Ginkou { #[structopt(name = "add")] Add { #[structopt(long, short = "f", parse(from_os_str))] file: Option<PathBuf>, #[structopt(long = "database", short = "d", parse(from_os_str))] db: Option<PathBuf>, }, #[structopt(name = "get")] Get { word: String, #[structopt(long = "allwords", short = "a")] all: bool, #[structopt(long = "database", short = "d", parse(from_os_str))] db: Option<PathBuf>, }, } fn default_db_path() -> PathBuf { if let Some(mut pb) = dirs::home_dir() { pb.push(".ginkoudb"); pb } else { PathBuf::from(".ginkoudb") } } fn main() { } #[cfg(test)] mod tests { use super::*; #[test] fn sentences_works_correctly() { let string = "A。\n B。\n\n XXC。"; let mut iter = sentences(std::io::BufReader::new(string.as_bytes())); let a = iter.next(); assert_eq!(String::from("A。"), a.unwrap().unwrap()); let b = iter.next(); assert_eq!(String::from("B。"), b.unwrap().unwrap()); let c = iter.next(); assert_eq!(String::from("XXC。"), c.unwrap().unwrap()); } #[test]
#[test] fn sentences_can_be_consumed() -> rusqlite::Result<()> { let conn = conn_from_memory()?; let sentence1 = "猫を見た"; let sentence2 = "犬を見る"; consume_trimmed(&conn, sentence1)?; consume_trimmed(&conn, sentence2)?; let a_sentences = vec![sentence1.into(), sentence2.into()]; assert_eq!(Ok(a_sentences), matching_word(&conn, "見る")); let b_sentences = vec![sentence2.into()]; assert_eq!(Ok(b_sentences), matching_word(&conn, "犬")); let c_sentences = vec![sentence1.into()]; assert_eq!(Ok(c_sentences), matching_word(&conn, "猫")); Ok(()) } }
fn bank_lookup_works_correctly() -> rusqlite::Result<()> { let conn = conn_from_memory()?; let sentence1 = String::from("A B"); let sentence2 = String::from("A B C"); let s1 = add_sentence(&conn, &sentence1)?; add_word(&conn, "A", s1)?; add_word(&conn, "B", s1)?; let s2 = add_sentence(&conn, &sentence2)?; add_word(&conn, "A", s2)?; add_word(&conn, "B", s2)?; add_word(&conn, "C", s2)?; let a_sentences = vec![sentence1.clone(), sentence2.clone()]; assert_eq!(Ok(a_sentences), matching_word(&conn, "A")); let c_sentences = vec![sentence2.clone()]; assert_eq!(Ok(c_sentences), matching_word(&conn, "C")); Ok(()) }
function_block-full_function
[ { "content": "SELECT id, ?2 FROM WORDS WHERE word=?1 AND NOT EXISTS (SELECT 1 FROM WordSentence WHERE word_id=id AND sentence_id=?2);", "file_path": "src/sql/add_word_junction.sql", "rank": 11, "score": 86279.24715323126 }, { "content": "fn main() {\n\n rocket::ignite().mount(\"/\", routes![index, lookup]).launch();\n\n}", "file_path": "src/main.rs", "rank": 14, "score": 77333.59537415029 }, { "content": "#[get(\"/lookup/<name>\")]\n\nfn lookup(name: &RawStr) -> Result<JsonValue, rusqlite::Error> {\n\n let path = \".ginkoudb\";\n\n let conn = language::conn_from_disk(path)?;\n\n let search_target = &name.percent_decode()?;\n\n let result = language::matching_word(&conn, search_target, false)?;\n\n\n\n return Ok(\n\n json!({ \"sentences\": result })\n\n );\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 16, "score": 72287.78439996514 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct SearchResponse {\n\n sentence: Option<String>\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 18, "score": 57014.72236270094 }, { "content": "#[get(\"/\")]\n\nfn index() {\n\n \"Ginkou API\\nUsage: /lookup/<word>\\nExample: /lookup/僕\"\n\n}\n\n\n\n\n", "file_path": "src/main.rs", "rank": 19, "score": 51848.93050137071 }, { "content": "SELECT sentence FROM sentences\n\nLEFT JOIN wordsentence ON wordsentence.sentence_id = sentences.id \n\nLEFT JOIN words ON words.id = wordsentence.word_id\n\nWHERE word=?1;", "file_path": "src/sql/all_word_sentences.sql", "rank": 20, "score": 39396.79857898379 }, { "content": "SELECT sentence FROM sentences\n\nLEFT JOIN wordsentence ON wordsentence.sentence_id = sentences.id \n\nLEFT JOIN words ON words.id = wordsentence.word_id\n\nWHERE word=?1\n\nORDER BY length(sentence)\n\nLIMIT 200;", "file_path": "src/sql/best_word_sentences.sql", "rank": 21, "score": 38127.90097606159 }, { "content": "INSERT INTO Words(word)\n\nSELECT ?1 WHERE NOT EXISTS(SELECT 1 FROM words WHERE word=?1);", "file_path": "src/sql/add_word.sql", "rank": 22, "score": 35583.197031364616 }, { "content": "INSERT INTO Sentences(sentence) VALUES(?1);", "file_path": "src/sql/add_sentence.sql", "rank": 23, "score": 35173.72328688904 }, { "content": "INSERT INTO WordSentence\n", "file_path": "src/sql/add_word_junction.sql", "rank": 24, "score": 33817.85218212206 }, { "content": "CREATE TABLE WordSentence(\n\n word_id INTEGER NOT NULL,\n\n sentence_id INTEGER NOT NULL,\n\n PRIMARY KEY(word_id, sentence_id),\n\n FOREIGN KEY(word_id) REFERENCES Words(id),\n\n FOREIGN KEY(sentence_id) REFERENCES Sentences(id)\n\n);", "file_path": "src/sql/setup.sql", "rank": 25, "score": 31183.315591180242 }, { "content": "#![feature(proc_macro_hygiene, decl_macro)]\n\n\n\n#[macro_use]\n\nextern crate rocket;\n\n#[macro_use] \n\nextern crate serde_derive;\n\n#[macro_use] \n\nextern crate rocket_contrib;\n\n\n\nextern crate structopt;\n\nextern crate rusqlite;\n\nextern crate mecab;\n\nuse rocket::http::route::Error;\n\nuse rocket::http::RawStr;\n\n\n\nuse rocket_contrib::json::{Json, JsonValue};\n\n\n\nmod language;\n\n\n\n#[derive(Serialize, Deserialize)]\n", "file_path": "src/main.rs", "rank": 26, "score": 20279.84387705061 }, { "content": "CREATE TABLE Words(\n\n id INTEGER PRIMARY KEY,\n\n word TEXT UNIQUE NOT NULL\n\n);\n\n\n", "file_path": "src/sql/setup.sql", "rank": 27, "score": 16558.672164226195 }, { "content": "CREATE TABLE Sentences(\n\n id INTEGER PRIMARY KEY,\n\n sentence TEXT NOT NULL\n\n);\n\n\n", "file_path": "src/sql/setup.sql", "rank": 28, "score": 16171.396986590056 }, { "content": "# ginkou-api\n\n\n\nA REST api for interacting with [ginkou](https://github.com/cronokirby/ginkou).\n\nFind examples of sentences that use a specific Japanese word regardless of conjugation.\n\n\n\n## Usage\n\n\n\nhttps://japanese.hifumi.io/lookup/僕\n\n\n\n## Dependencies\n\n\n\nThis program depends on [mecab](http://taku910.github.io/mecab/) for the aforementioned\n\nmorphological splitting. For instructions on installing it, see the [mecab crate](https://github.com/tsurai/mecab-rs).\n\n\n\nYou can use the following packages if you use `apt`\n\n\n\n```\n\nsudo apt install libsqlite3-dev\n\nsudo apt install libmecab-dev\n\n```\n\n\n\nFor a CLI interface, download the original [ginkou](https://github.com/cronokirby/ginkou) app.\n", "file_path": "README.md", "rank": 29, "score": 12807.291757860829 } ]
Rust
corp_bot/src/main.rs
lholznagel/eve_online
7f8cc4aa46b06c0edf64f89ff5853216cf5597b7
mod asset; use appraisal::{Appraisal, Janice}; use caph_connector::{EveAuthClient, CorporationService}; use num_format::{Locale, ToFormattedString}; use reqwest::{header::{HeaderMap, HeaderValue}, Client}; use serde::Serialize; use sqlx::{PgPool, postgres::PgPoolOptions}; use tracing_subscriber::EnvFilter; const PG_ADDR: &str = "DATABASE_URL"; #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { dotenv::dotenv().ok(); tracing_subscriber::fmt() .pretty() .with_env_filter(EnvFilter::from_default_env()) .init(); let pg_addr = std::env::var(PG_ADDR) .expect("Expected that a DATABASE_URL ENV is set"); let pool = PgPoolOptions::new() .max_connections(10) .connect(&pg_addr) .await?; let journal = journal(&pool).await; let asset_worth = assets(&pool).await; let wallet = wallets(&pool).await; let mut headers = HeaderMap::new(); headers.insert( "Authorization", HeaderValue::from_static("Bot OTgwMjQ1MTcyNzM2NjIyNjIy.Go7wtl.vSkXQMxjtHMZGqTM-s0MCrG5lChsGg0wIXlSAg") ); let client = Client::builder() .default_headers(headers) .build() .unwrap(); #[derive(Debug, Serialize)] struct Message { content: String, allowed_mentions: AllowedMentions } #[derive(Debug, Serialize)] struct AllowedMentions { parse: Vec<String>, } impl Default for AllowedMentions { fn default() -> Self { Self { parse: vec!["users".into(), "roles".into()] } } } client.post( "https://discord.com/api/v10/channels/980171856768270387/messages" ) .json(&Message { content: format!(r#" <@318403897972621312> Ungefährerer derzeitiger Wert der Corp wallets + Assets. ``` Master Wallet {} ISK Moon Taxes {} ISK Alliance Taxes {} ISK Assets: {} ISK Total {} ISK ``` "#, (wallet.master as u64).to_formatted_string(&Locale::de), (wallet.moon as u64).to_formatted_string(&Locale::de), (wallet.alliance as u64).to_formatted_string(&Locale::de), (asset_worth as u64).to_formatted_string(&Locale::de), ((wallet.master + wallet.moon + wallet.moon + asset_worth) as u64).to_formatted_string(&Locale::de) ), allowed_mentions: AllowedMentions::default() }) .send() .await .unwrap() .text() .await .unwrap(); Ok(()) } async fn assets(pool: &PgPool) -> f32 { let info = sqlx::query!(r#" SELECT c.corporation_id AS "corporation_id!", l.refresh_token AS "refresh_token!" FROM characters c JOIN logins l ON l.character_id = c.character_id WHERE 'esi-assets.read_corporation_assets.v1' = ANY(esi_tokens) AND c.corporation_name = 'Rip0ff Industries' AND l.refresh_token IS NOT NULL AND c.corporation_id IS NOT NULL "# ) .fetch_one(pool) .await .unwrap(); let client = EveAuthClient::new(info.refresh_token).unwrap(); let corporation_service = CorporationService::new(info.corporation_id.into()); let assets = corporation_service .assets(&client) .await .unwrap() .into_iter() .filter(|x| !x.is_blueprint_copy) .collect::<Vec<_>>(); let mut entries = Vec::new(); for asset in assets { let name = sqlx::query!(" SELECT name FROM items WHERE type_id = $1 ", *asset.type_id ) .fetch_one(pool) .await .unwrap() .name; entries.push(format!("{} {}", name, asset.quantity)); } Janice::validate().unwrap(); let janice = Janice::init().unwrap(); janice .create(false, entries) .await .unwrap() .sell_price } async fn journal(pool: &PgPool) { let info = sqlx::query!(r#" SELECT c.corporation_id AS "corporation_id!", l.refresh_token AS "refresh_token!" FROM characters c JOIN logins l ON l.character_id = c.character_id WHERE 'esi-assets.read_corporation_assets.v1' = ANY(esi_tokens) AND c.corporation_name = 'Rip0ff Industries' AND l.refresh_token IS NOT NULL AND c.corporation_id IS NOT NULL "# ) .fetch_one(pool) .await .unwrap(); let client = EveAuthClient::new(info.refresh_token).unwrap(); let corporation_service = CorporationService::new(info.corporation_id.into()); let journal = corporation_service .wallet_journal(&client) .await .unwrap(); dbg!(journal); } async fn wallets(pool: &PgPool) -> Wallets { let info = sqlx::query!(r#" SELECT c.corporation_id AS "corporation_id!", l.refresh_token AS "refresh_token!" FROM characters c JOIN logins l ON l.character_id = c.character_id WHERE 'esi-assets.read_corporation_assets.v1' = ANY(esi_tokens) AND c.corporation_name = 'Rip0ff Industries' AND l.refresh_token IS NOT NULL AND c.corporation_id IS NOT NULL "# ) .fetch_one(pool) .await .unwrap(); let client = EveAuthClient::new(info.refresh_token).unwrap(); let corporation_service = CorporationService::new(info.corporation_id.into()); let wallets = corporation_service .wallets(&client) .await .unwrap(); Wallets { master: wallets[0].balance, moon: wallets[1].balance, alliance: wallets[2].balance, } } struct Wallets { master: f32, moon: f32, alliance: f32, }
mod asset; use appraisal::{Appraisal, Janice}; use caph_connector::{EveAuthClient, CorporationService}; use num_format::{Locale, ToFormattedString}; use reqwest::{header::{HeaderMap, HeaderValue}, Client}; use serde::Serialize; use sqlx::{PgPool, postgres::PgPoolOptions}; use tracing_subscriber::EnvFilter; const PG_ADDR: &str = "DATABASE_URL"; #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { dotenv::dotenv().ok(); tracing_subscriber::fmt() .pretty() .with_env_filter(EnvFilter::from_default_env()) .init(); let pg_addr = std::env::var(PG_ADDR) .expect("Expected that a DATABASE_URL ENV is set"); let pool = PgPoolOptions::new() .max_connections(10) .connect(&pg_addr) .await?; let journal = journal(&pool).await; let asset_worth = assets(&pool).await; let wallet = wallets(&pool).await; let mut headers = HeaderMap::new(); headers.insert( "Authorization", HeaderValue::from_static("Bot OTgwMjQ1MTcyNzM2NjIyNjIy.Go7wtl.vSkXQMxjtHMZGqTM-s0MCrG5lChsGg0wIXlSAg") ); let client = Client::builder() .default_headers(headers) .build() .unwrap(); #[derive(Debug, Serialize)] struct Message { content: String, allowed_mentions: AllowedMentions } #[derive(Debug, Serialize)] struct AllowedMentions { parse: Vec<String>, } impl Default for AllowedMentions { fn default() -> Self { Self { parse: vec!["users".into(), "roles".into()] } } } client.post( "https://discord.com/api/v10/channels/980171856768270387/messages" ) .json(&Message { content: format!(r#" <@318403897972621312> Ungefährerer derzeitiger Wert der Corp wallets + Assets. ``` Master Wallet {} ISK Moon Taxes {} ISK Alliance Taxes {} ISK Assets: {} ISK Total {} ISK ``` "#, (wallet.master as u64).to_formatted_string(&Locale::de), (wallet.moon as u64).to_formatted_string(&Locale::de), (wallet.alliance as u64).to_formatted_string(&Locale::de), (asset_worth as u64).to_formatted_string(&Locale::de), ((wallet.master + wallet.moon + wallet.moon + asset_worth) as u64).to_formatted_string(&Locale::de) ), allowed_mentions: AllowedMentions::default() }) .send() .await .unwrap() .text() .await .unwrap(); Ok(()) } async fn assets(pool: &PgPool) -> f32 { let info = sqlx::query!(r#" SELECT c.corporation_id AS "corporation_id!", l.refresh_token AS "refresh_token!" FROM characters c JOIN logins l ON l.character_id = c.character_id WHERE 'esi-assets.read_corporation_assets.v1' = ANY(esi_tokens) AND c.corporation_name = 'Rip0ff Industries' AND l.refresh_token IS NOT NULL AND c.corporation_id IS NOT NULL "# ) .fetch_one(pool) .await .unwrap(); let client = EveAuthClient::new(info.refresh_token).unwrap(); let corporation_service = CorporationService::new(info.corporation_id.into()); let assets = corporation_service .assets(&client) .await .unwrap() .into_iter() .filter(|x| !x.is_blueprint_copy) .collect::<Vec<_>>(); let mut entries = Vec::new(); for asset in assets { let name = sqlx::query!(" SELECT name FROM items WHERE type_id = $1 ", *asset.type_id ) .fetch_one(pool) .await .unwrap() .name; entries.push(format!("{} {}", name, asset.quantity)); } Janice::validate().unwrap(); let janice = Janice::init().unwrap(); janice .create(false, entries) .await .unwrap() .sell_price } async fn journal(pool: &PgPool) { let info = sqlx::query!(r#" SELECT c.corporation_id AS "corporation_id!", l.refresh_token AS "refresh_token!" FROM characters c JOIN logins l ON l.character_id = c.character_id WHERE 'esi-assets.read_corporation_assets.v1' = ANY(esi_tokens) AND c.corporation_name = 'Rip0ff Industries' AND l.refresh_token IS NOT NULL AND c.corporation_id IS NOT NULL "# ) .fetch_one(pool) .await .unwrap(); let client = EveAuthClient::new(info.refresh_token).unwrap(); let corporation_service = CorporationService::new(info.corporation_id.into()); let journal = corporation_service .wallet_journal(&client) .await .unwrap(); dbg!(journal); } async fn wallets(pool: &PgPool) -> Wallets { let info = sqlx::query!(r#" SELECT c.corporation_id AS "corporation_id!", l.refresh_token AS "refresh_token!" FROM characters c JOIN logins l ON l.character_id = c.character_id WHERE 'esi-assets.read_corporation_assets.v1' = ANY(esi_tokens) AND c.corporation_name = 'Rip0ff Industries' AND l.refresh_t
ration_service = CorporationService::new(info.corporation_id.into()); let wallets = corporation_service .wallets(&client) .await .unwrap(); Wallets { master: wallets[0].balance, moon: wallets[1].balance, alliance: wallets[2].balance, } } struct Wallets { master: f32, moon: f32, alliance: f32, }
oken IS NOT NULL AND c.corporation_id IS NOT NULL "# ) .fetch_one(pool) .await .unwrap(); let client = EveAuthClient::new(info.refresh_token).unwrap(); let corpo
function_block-random_span
[ { "content": "/// Generates the basic SQL-Query that is required for items.\n\n///\n\n/// # Returns\n\n///\n\n/// String containing the SQL-Query.\n\n///\n\nfn sql_header() -> String {\n\n r#\"DELETE FROM items CASCADE;\"#.into()\n\n}\n\n\n", "file_path": "sde_parser/src/items.rs", "rank": 1, "score": 169196.65506172966 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=migrations\");\n\n\n\n // Only git env variables when we do a release\n\n if std::env::var(\"PROFILE\").unwrap_or_else(|_| \"debug\".into()) == \"debug\" {\n\n return;\n\n }\n\n\n\n let git_head_short = Command::new(\"git\")\n\n .args(&[\"rev-parse\", \"--short\", \"HEAD\"])\n\n .output()\n\n .unwrap();\n\n let git_head_short = String::from_utf8(git_head_short.stdout).unwrap();\n\n\n\n println!(\"cargo:rustc-env=GIT_HEAD_SHORT={}\", git_head_short);\n\n}\n", "file_path": "server/build.rs", "rank": 2, "score": 161109.9993815293 }, { "content": "/// Generates the basic SQL-Query that is required for blueprints.\n\n///\n\n/// # Returns\n\n///\n\n/// String containing the SQL-Query.\n\n///\n\nfn sql_header() -> String {\n\n r#\"DELETE FROM blueprint_raw CASCADE;\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 3, "score": 135648.4951447268 }, { "content": "/// Generates a secure token that can be used as a cookie token\n\n/// \n\n/// # Errors\n\n/// \n\n/// When either the ENV `SECRET_KEY` is not available or if generating\n\n/// the HMAC key fails.\n\n/// \n\n/// # Returns\n\n/// \n\n/// Base64 encoded key and a base64 encoded hashed version of the key\n\n/// \n\npub fn generate_secure_token() -> Result<(String, String), Error> {\n\n let secret = std::env::var(\"SECRET_KEY\")\n\n .map_err(Error::MissingEnvSecretKey)?;\n\n\n\n let mut rng = ChaCha20Rng::from_entropy();\n\n let mut key: Vec<u8> = (0..255).collect::<Vec<_>>();\n\n key.shuffle(&mut rng);\n\n let token = base64::encode(key.clone());\n\n\n\n let mut mac = Hmac::<Sha256>::new_from_slice(secret.as_bytes())\n\n .map_err(Error::HmacInitError)?;\n\n mac.update(&key);\n\n let result = mac.finalize();\n\n let result = result.into_bytes();\n\n let hashed = base64::encode(result);\n\n Ok((token, hashed))\n\n}\n\n\n", "file_path": "server/src/utils.rs", "rank": 4, "score": 134956.64525908724 }, { "content": "/// Parses the input file and exports it as SQL\n\npub fn run() -> Result<String, Box<dyn std::error::Error>> {\n\n tracing::info!(\"Starting asset parsing\");\n\n\n\n let current = std::env::current_dir()?;\n\n let current = current\n\n .to_str()\n\n .unwrap_or_default();\n\n let path_type_ids = format!(\n\n \"{}/{}/type_ids.yaml\",\n\n current,\n\n FOLDER_INPUT\n\n );\n\n let file_type_ids = File::open(&path_type_ids)?;\n\n\n\n let path_group_ids = format!(\n\n \"{}/{}/group_ids.yaml\",\n\n current,\n\n FOLDER_INPUT\n\n );\n\n let file_group_ids = File::open(&path_group_ids)?;\n", "file_path": "sde_parser/src/items.rs", "rank": 5, "score": 132353.0915031213 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Item {\n\n /// TypeId of the item\n\n type_id: TypeId,\n\n /// CategoryId of the item\n\n category_id: TypeId,\n\n /// GroupId of the item\n\n group_id: TypeId,\n\n /// MetaGroupId of the item\n\n meta_group_id: Option<TypeId>,\n\n /// Volume\n\n volume: f32,\n\n /// English name of the item\n\n name: String\n\n}\n\n\n\nimpl Item {\n\n /// Converts the struct into a SQL-Insert Query.\n\n ///\n\n /// # Example\n\n ///\n", "file_path": "sde_parser/src/items.rs", "rank": 6, "score": 125898.16195900497 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let config = File::open(\"skillplans/skillplans.json\")?;\n\n let config: Vec<ConfigFile> = serde_json::from_reader(config)?;\n\n\n\n let mut all = Vec::new();\n\n for c in config {\n\n if let Some(x) = c.core {\n\n let mut file = File::open(format!(\"skillplans/Core/{}\", x))?;\n\n let mut content = String::new();\n\n file.read_to_string(&mut content)?;\n\n let plan: EvemonPlan = quick_xml::de::from_str(&content)?;\n\n\n\n all.push(Output {\n\n name: x,\n\n skills: plan.entries\n\n });\n\n }\n\n\n\n for s in c.plans {\n\n let mut file = File::open(format!(\"skillplans/{}/{}\", c.folder, s))?;\n", "file_path": "evemon_to_json/src/main.rs", "rank": 7, "score": 125080.42849752502 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n tracing_subscriber::fmt()\n\n .pretty()\n\n .with_env_filter(EnvFilter::from_default_env())\n\n .init();\n\n\n\n let current_dir = std::env::current_dir()?;\n\n if !Path::new(\n\n &format!(\n\n \"{}/{}/blueprints.yaml\",\n\n current_dir.to_str().unwrap_or_default(),\n\n FOLDER_INPUT\n\n )\n\n ).exists() {\n\n\n\n tracing::error!(\n\n \"File 'blueprints.yaml' is not in {}/blueprints.yaml\",\n\n FOLDER_INPUT\n\n );\n\n }\n", "file_path": "sde_parser/src/main.rs", "rank": 8, "score": 125080.42849752502 }, { "content": "/// Parses the input file and exports it as SQL\n\npub fn run() -> Result<String, Box<dyn std::error::Error>> {\n\n tracing::info!(\"Starting blueprint parsing\");\n\n\n\n let current = std::env::current_dir()?;\n\n let current = current\n\n .to_str()\n\n .unwrap_or_default();\n\n let path = format!(\n\n \"{}/{}/blueprints.yaml\",\n\n current,\n\n FOLDER_INPUT\n\n );\n\n let file = File::open(&path)?;\n\n\n\n // Map with the blueprint as key\n\n let blueprints: HashMap<TypeId, Blueprint> = serde_yaml::from_reader(file)?;\n\n\n\n // Map with the product as key\n\n let products = blueprints\n\n .clone()\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 9, "score": 104731.25650070992 }, { "content": "/// Generates a SQL-Query containing all game items\n\n/// \n\n/// # Returns\n\n/// \n\n/// String containing the value-tuple\n\n/// \n\nfn sql_items(\n\n type_ids: &HashMap<TypeId, TypeEntry>,\n\n group_ids: &HashMap<GroupId, GroupEntry>,\n\n) -> String {\n\n let mut items = Vec::new();\n\n\n\n for (type_id, entry) in type_ids {\n\n let type_id = *type_id;\n\n let group_id = entry.group_id;\n\n let category_id = group_ids\n\n .get(&group_id)\n\n .map(|x| x.category_id)\n\n .expect(\"Every entry should have a categroy id\");\n\n let volume = entry.volume.unwrap_or(0f32);\n\n let meta_group_id = entry.meta_group_id;\n\n let name = entry\n\n .name()\n\n .unwrap_or(\n\n format!(\"Unknown name {}\", type_id)\n\n );\n", "file_path": "sde_parser/src/items.rs", "rank": 10, "score": 101557.06459628316 }, { "content": " public static async info(cid: number): Promise<ICharacter> {\n\n // Ignore what typescript says\n\n if (<any>this.character_name_req[cid]) {\n\n return this.character_name_req[cid];\n\n }\n\n\n\n if (this.character_names[cid]) {\n\n return this.character_names[cid];\n\n } else {\n\n this.character_name_req[cid] = axios\n\n .get(`${BASE_ADDR}/${cid}/info`)\n\n .then(x => x.data)\n\n .then(x => {\n\n this.character_names[cid] = x;\n\n delete this.character_name_req[cid];\n\n return x;\n\n });\n\n return this.character_name_req[cid];\n\n }\n", "file_path": "web/src/services/character.ts", "rank": 11, "score": 98783.35190376967 }, { "content": "pub fn router() -> Router {\n\n Router::new()\n\n .nest(\n\n \"/pulls\",\n\n Router::new()\n\n .route(\n\n \"/\",\n\n get(pulls)\n\n .post(create)\n\n )\n\n .route(\n\n \"/:id\",\n\n get(pull)\n\n .put(update)\n\n )\n\n )\n\n}\n\n\n\nasync fn pull(\n\n user: AuthUser,\n", "file_path": "server/src/moon/api.rs", "rank": 12, "score": 81640.8389638782 }, { "content": "", "file_path": "corp_bot/src/asset.rs", "rank": 13, "score": 75626.25827728261 }, { "content": "/// Generates the SQL-Code for inserting all blueprint research entries.\n\n///\n\n/// Combines the activities [ActivityName::ResearchMaterial],\n\n/// [ActivityName::ResearchTime] and [ActivityName::Copying].\n\n///\n\n/// # Params\n\n///\n\n/// * `bps` -> Map of the parsed `blueprint.yaml` file\n\n///\n\n/// # Returns\n\n///\n\n/// String containing the SQL-Query.\n\n///\n\nfn sql_research(bps: &HashMap<TypeId, Blueprint>) -> String {\n\n let mut entries = Vec::new();\n\n\n\n for (btype_id, entry) in bps {\n\n if !entry.has_job() {\n\n continue;\n\n }\n\n\n\n let btype_id = *btype_id;\n\n\n\n let ptype_id = if let Some(x) = entry.product() {\n\n x\n\n } else {\n\n continue;\n\n };\n\n\n\n let material = if let Some(x) = entry\n\n .research_time(ActivityName::ResearchMaterial) {\n\n x\n\n } else {\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 25, "score": 74548.79221000988 }, { "content": "/// Generates the SQL-Code for inserting all blueprint invention entries.\n\n///\n\n/// Contains the activity [ActivityName::Invention].\n\n///\n\n/// # Params\n\n///\n\n/// * `bps` -> Map of the parsed `blueprint.yaml` file\n\n///\n\n/// # Returns\n\n///\n\n/// String containing the SQL-Query.\n\n///\n\nfn sql_invention(bps: &HashMap<TypeId, Blueprint>) -> String {\n\n let mut inventions = Vec::new();\n\n let mut materials = Vec::new();\n\n\n\n for (btype_id, entry) in bps {\n\n let ptype_id = if let Some(x) = entry.product() {\n\n x\n\n } else {\n\n continue;\n\n };\n\n\n\n let activity = if let Some(x) = entry.activities\n\n .get(&ActivityName::Invention) {\n\n x\n\n } else {\n\n continue;\n\n };\n\n let time = if let Some(x) = entry.activities\n\n .get(&ActivityName::Invention) {\n\n if !x.materials.is_empty() {\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 26, "score": 74548.16860924287 }, { "content": "-- Contains every character that ever tried to login, if the login was not\n\n-- successful and the user tried again, the user will be here multiple times\n\nCREATE TABLE logins(\n\n expire_date TIMESTAMPTZ,\n\n\n\n character_id INTEGER,\n\n character_main INTEGER,\n\n\n\n -- token so that we can verify the user\n\n token VARCHAR,\n\n -- EVE tokens\n\n refresh_token VARCHAR,\n\n access_token VARCHAR,\n\n\n\n PRIMARY KEY(token)\n\n);\n\n\n", "file_path": "server/migrations/20220326201841_init.sql", "rank": 27, "score": 72600.59875470909 }, { "content": "CREATE TABLE items(\n\n type_id INTEGER NOT NULL,\n\n category_id INTEGER NOT NULL,\n\n group_id INTEGER NOT NULL,\n\n\n\n meta_group_id INTEGER,\n\n\n\n volume REAL NOT NULL,\n\n\n\n name VARCHAR NOT NULL,\n\n\n\n PRIMARY KEY(type_id)\n\n);\n", "file_path": "server/migrations/20220326201841_init.sql", "rank": 28, "score": 72297.54773777632 }, { "content": "-- Contains all characters that successfully logged in\n\nCREATE TABLE characters(\n\n character_id INTEGER NOT NULL,\n\n corporation_id INTEGER NOT NULL,\n\n\n\n character_main INTEGER,\n\n\n\n character_name VARCHAR(50) NOT NULL,\n\n corporation_name VARCHAR(50) NOT NULL,\n\n\n\n alliance_id INTEGER,\n\n alliance_name VARCHAR(50),\n\n\n\n esi_tokens VARCHAR[],\n\n\n\n PRIMARY KEY (character_id),\n\n\n\n FOREIGN KEY (character_main)\n\n REFERENCES characters (character_id)\n\n ON DELETE CASCADE\n\n);\n\n\n", "file_path": "server/migrations/20220326201841_init.sql", "rank": 29, "score": 72126.57305908736 }, { "content": "CREATE INDEX project_asset_project ON project_assets(project);\n", "file_path": "server/migrations/20220326201841_init.sql", "rank": 30, "score": 71831.3964334853 }, { "content": "-- Assets that are stored in a container\n\nCREATE TABLE project_assets(\n\n project UUID NOT NULL,\n\n\n\n -- material efficiency, only set if its a bp, bpc or formula\n\n meff INTEGER,\n\n -- time efficiency, only set if its a bp, bpc or formula\n\n teff INTEGER,\n\n -- remaining runs, only set if its a bpc\n\n runs INTEGER,\n\n\n\n type_id INTEGER NOT NULL,\n\n quantity BIGINT NOT NULL,\n\n\n\n PRIMARY KEY (project, type_id),\n\n\n\n FOREIGN KEY (project)\n\n REFERENCES projects (project)\n\n ON DELETE CASCADE\n\n);\n\n\n", "file_path": "server/migrations/20220326201841_init.sql", "rank": 31, "score": 69888.30056211227 }, { "content": " public static async blueprints_total(): Promise<any[]> {\n\n return (await axios.get(`${BASE_ADDR}/blueprints/total`)).data;\n", "file_path": "web/src/services/character.ts", "rank": 32, "score": 67951.96001680184 }, { "content": " public static async resolve_id_from_name_bulk(names: string[], params: { [key: string]: any }): Promise<number[]> {\n\n return (await axios.post('/api/v1/items/resolve', names, { params })).data;\n", "file_path": "web/src/services/item.ts", "rank": 33, "score": 64904.59026693631 }, { "content": " public async init(): Promise<void> {\n\n return this.load_promise\n\n ? this.load_promise\n\n : new Promise<void>((r, _) => r());\n", "file_path": "web/src/project/project.ts", "rank": 34, "score": 59441.19542669445 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\nstruct Material {\n\n /// Quantity that is required\n\n quantity: i32,\n\n /// TypeId of the material that is required\n\n #[serde(rename = \"typeID\")]\n\n type_id: TypeId,\n\n\n\n /// This field is only set when the activity is an invention and there only\n\n /// for products\n\n probability: Option<f32>,\n\n}\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 35, "score": 57922.60997228269 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\nstruct Blueprint {\n\n /// Holds all activities that are possible with that blueprint\n\n activities: HashMap<ActivityName, Activity>,\n\n}\n\n\n\nimpl Blueprint {\n\n /// Checks if the activity has reaction.\n\n ///\n\n /// # Returns\n\n ///\n\n /// * `true` -> If the entry is a reaction\n\n /// * `false` -> If there are not reactions\n\n ///\n\n pub fn is_reaction(&self) -> bool {\n\n self.activities.get(&ActivityName::Reaction).is_some()\n\n }\n\n\n\n /// Checks if the blueprint has a manufacture or reaction job.\n\n ///\n\n /// # Returns\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 36, "score": 57922.60997228269 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\nstruct Activity {\n\n /// Time it takes to perform the activity\n\n time: i32,\n\n /// Required materials for the activity, will be an empty Vector if not\n\n /// materials are required\n\n #[serde(default)]\n\n materials: Vec<Material>,\n\n /// Products that are produced by this blueprint, will be an empty Vec if\n\n /// nothing is produced by this activity\n\n #[serde(default)]\n\n products: Vec<Material>,\n\n}\n\n\n\n/// Represents a material required for an activity\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 37, "score": 57922.60997228269 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct BlueprintRaw {\n\n /// Uniqe id\n\n bp_id: Uuid,\n\n /// Blueprint type id\n\n btype_id: TypeId,\n\n /// Product type id\n\n ptype_id: TypeId,\n\n /// Quantity that is produced with each run\n\n quantity: i32\n\n}\n\n\n\nimpl BlueprintRaw {\n\n /// Converts the struct into a SQL-Insert Query.\n\n ///\n\n /// # Returns\n\n ///\n\n /// SQL-Value tuple for inserting.\n\n ///\n\n pub fn into_sql(self) -> String {\n\n format!(\n\n \"('{}', {}, {}, {})\",\n\n self.bp_id,\n\n self.btype_id,\n\n self.ptype_id,\n\n self.quantity\n\n )\n\n }\n\n}\n\n\n\n/// Represetns a single manufacture job\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 38, "score": 56580.62849798671 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct BlueprintInvention {\n\n /// Unique id of the invention\n\n bp_id: Uuid,\n\n /// Blueprint type id\n\n btype_id: TypeId,\n\n /// Tier 1 product type id\n\n ttype_id: TypeId,\n\n /// Product type id\n\n ptype_id: TypeId,\n\n /// TypeId of the invented blueprint\n\n itype_id: TypeId,\n\n\n\n /// Time it takes to invent\n\n time: i32,\n\n /// Probability that the invention works\n\n probability: f32\n\n}\n\n\n\nimpl BlueprintInvention {\n\n /// Converts the struct into a SQL-Insert Query.\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 39, "score": 56580.62849798671 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct BlueprintMaterial {\n\n /// Unique id that references to [BlueprintInvention]\n\n bp_id: Uuid,\n\n /// Required quantity\n\n quantity: i32,\n\n /// TypeId of the material\n\n mtype_id: TypeId,\n\n /// Quantity that is produced by the product\n\n produces: i32,\n\n /// Time to research time efficiency\n\n time: i32,\n\n}\n\n\n\nimpl BlueprintMaterial {\n\n /// Converts the struct into a SQL-Insert Query.\n\n ///\n\n /// # Returns\n\n ///\n\n /// SQL-Value tuple for inserting.\n\n ///\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 40, "score": 56580.62849798671 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct BlueprintManufacture {\n\n /// Uniqe id\n\n bp_id: Uuid,\n\n /// Blueprint type id\n\n btype_id: TypeId,\n\n /// Product type id\n\n ptype_id: TypeId,\n\n /// Quantity that is produced with each run\n\n quantity: i32,\n\n /// Time it takes to construct\n\n time: i32,\n\n /// Determines if this entry is a reaction\n\n reaction: bool\n\n}\n\n\n\nimpl BlueprintManufacture {\n\n /// Converts the struct into a SQL-Insert Query.\n\n ///\n\n /// # Returns\n\n ///\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 41, "score": 56580.62849798671 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct BlueprintResearch {\n\n /// Blueprint type id\n\n btype_id: TypeId,\n\n /// Product type id\n\n ptype_id: TypeId,\n\n\n\n /// Time to research material efficiency\n\n material: i32,\n\n /// Time to research time efficiency\n\n time: i32,\n\n /// Time to make a blueprint copy\n\n copy: i32,\n\n}\n\n\n\nimpl BlueprintResearch {\n\n /// Converts the struct into a SQL-Insert Query.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 42, "score": 56580.62849798671 }, { "content": "/// Generates the SQL-Code for inserting all raw entries that are required for\n\n/// a blueprint or reaction.\n\n///\n\n/// # Params\n\n///\n\n/// * `bps` -> Map of the parsed `blueprint.yaml` file\n\n///\n\n/// # Returns\n\n///\n\n/// String containing the SQL-Query.\n\n///\n\nfn sql_raw(\n\n blueprints: &HashMap<TypeId, Blueprint>,\n\n products: &HashMap<TypeId, Blueprint>,\n\n) -> String {\n\n let mut entries = Vec::new();\n\n let mut materials = Vec::new();\n\n\n\n for (bp_id, bp) in blueprints {\n\n let mut raw = HashMap::new();\n\n\n\n let mut todo = VecDeque::new();\n\n todo.extend(bp.materials());\n\n\n\n while let Some(e) = todo.pop_front() {\n\n if let Some(x) = products.get(&e.type_id) {\n\n todo.extend(x.materials());\n\n } else {\n\n raw\n\n .entry(e.type_id)\n\n .and_modify(|r: &mut Material| r.quantity += e.quantity)\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 43, "score": 56412.04425219765 }, { "content": "/// Generates the SQL-Code for inserting all raw entries that are required for\n\n/// a blueprint or reaction.\n\n///\n\n/// # Params\n\n///\n\n/// * `bps` -> Map of the parsed `blueprint.yaml` file\n\n///\n\n/// # Returns\n\n///\n\n/// String containing the SQL-Query.\n\n///\n\nfn sql_json(\n\n blueprints: &HashMap<TypeId, Blueprint>,\n\n products: &HashMap<TypeId, Blueprint>,\n\n) -> String {\n\n #[derive(Clone, Debug, Serialize)]\n\n enum DependencyType {\n\n Blueprint,\n\n Reaction,\n\n\n\n Material,\n\n }\n\n #[derive(Clone, Debug, Serialize)]\n\n struct Dependency {\n\n btype_id: TypeId,\n\n ptype_id: TypeId,\n\n time: u32,\n\n quantity: u32,\n\n typ: DependencyType,\n\n components: Vec<Dependency>,\n\n }\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 44, "score": 56412.04425219765 }, { "content": "/// Generates a SQL-Query containing all blueprints\n\n/// \n\n/// # Returns\n\n/// \n\n/// String containing the value-tuple\n\n/// \n\nfn sql_manufacture(\n\n blueprints: &HashMap<TypeId, Blueprint>,\n\n) -> String {\n\n let excluded_type_ids = vec![\n\n 2738, 2742, 2743, 2744, 2745, 2746, 2747, 2748, 2749, 2751, 2753, 2754,\n\n 2756, 2758, 2760, 2762, 2764, 2765, 2766, 2767, 2768, 2769, 2770, 2771,\n\n 2772, 2773, 2786, 2788, 2789, 2790, 2791, 2793, 2795, 2797, 2800, 2820,\n\n 2821, 21943, 21944, 21945, 21946, 28605, 32800, 32802, 32804, 33515,\n\n 33582, 33584, 33868, 34222, 42134, 42135,\n\n ];\n\n\n\n let mut bps = Vec::new();\n\n let mut materials = Vec::new();\n\n\n\n for (btype_id, entry) in blueprints {\n\n let bp_id = Uuid::new_v4();\n\n\n\n if excluded_type_ids.contains(btype_id) {\n\n continue;\n\n }\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 45, "score": 56409.22038526903 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct BlueprintManufactureComponent {\n\n /// Uniqe id\n\n bp_id: Uuid,\n\n /// Blueprint type id\n\n btype_id: TypeId,\n\n /// Product type id\n\n ptype_id: TypeId,\n\n /// Quantity that is produced with each run\n\n quantity: i32,\n\n}\n\n\n\nimpl BlueprintManufactureComponent {\n\n /// Converts the struct into a SQL-Insert Query.\n\n ///\n\n /// # Returns\n\n ///\n\n /// SQL-Value tuple for inserting.\n\n ///\n\n pub fn into_sql(self) -> String {\n\n format!(\n\n \"('{}', {}, {}, {})\",\n\n self.bp_id,\n\n self.btype_id,\n\n self.ptype_id,\n\n self.quantity,\n\n )\n\n }\n\n}\n\n\n\n/// Represetns a single invention\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 46, "score": 55330.44048673961 }, { "content": "/// Generates a SQL-Query containing all blueprints components\n\n/// \n\n/// # Returns\n\n/// \n\n/// String containing the value-tuple\n\n/// \n\nfn sql_manufacture_components(\n\n blueprints: &HashMap<TypeId, Blueprint>,\n\n products: &HashMap<TypeId, Blueprint>,\n\n) -> String {\n\n let mut bps = Vec::new();\n\n let mut materials = Vec::new();\n\n\n\n for (btype_id, entry) in blueprints {\n\n let bp_id = Uuid::new_v4();\n\n\n\n let mut queue = VecDeque::from(entry.materials());\n\n let mut components = HashMap::new();\n\n\n\n while let Some(e) = queue.pop_front() {\n\n let product = if let Some(x) = products.get(&e.type_id) {\n\n x\n\n } else {\n\n continue;\n\n };\n\n\n", "file_path": "sde_parser/src/blueprints.rs", "rank": 47, "score": 55161.40578006348 }, { "content": "/// TODO: validate\n\npub fn recreate_secure_token(\n\n token: String,\n\n) -> Result<String, Error> {\n\n let secret = std::env::var(\"SECRET_KEY\")\n\n .map_err(Error::MissingEnvSecretKey)?;\n\n\n\n let token = base64::decode(token)\n\n .map_err(Error::InvalidBase64)?;\n\n\n\n let mut mac = Hmac::<Sha256>::new_from_slice(secret.as_bytes())\n\n .map_err(Error::HmacInitError)?;\n\n mac.update(&token);\n\n\n\n let result = mac.finalize();\n\n let result = result.into_bytes();\n\n\n\n Ok(base64::encode(result))\n\n}\n", "file_path": "server/src/utils.rs", "rank": 48, "score": 50807.603487909335 }, { "content": "pub fn dependency(\n\n name: String,\n\n ptype_id: TypeId,\n\n products: i64,\n\n components: Vec<Dependency>\n\n) -> Dependency {\n\n Dependency {\n\n name: name,\n\n blueprint_name: \"\".into(),\n\n btype_id: 0.into(),\n\n ptype_id: ptype_id,\n\n category_id: 0.into(),\n\n group_id: 0.into(),\n\n products: products,\n\n products_base: 1i64,\n\n products_per_run: 1i64,\n\n time: 1i64,\n\n time_per_run: 1i64,\n\n dependency_type: DependencyType::Material,\n\n components: components,\n\n }\n\n}\n\n\n", "file_path": "server/src/project/dependency/test_utils.rs", "rank": 49, "score": 49645.94604035406 }, { "content": "pub fn fuel_block(\n\n name: String,\n\n type_id: TypeId,\n\n) -> Dependency {\n\n let mut isotope = Dependency {\n\n name: \"\".into(),\n\n blueprint_name: \"\".into(),\n\n btype_id: 0.into(),\n\n ptype_id: 0.into(),\n\n category_id: 4.into(),\n\n group_id: 423.into(),\n\n products_per_run: 0,\n\n products: 450i64,\n\n products_base: 450i64,\n\n time: 0i64,\n\n time_per_run: 0i64,\n\n dependency_type: DependencyType::Material,\n\n components: Vec::new()\n\n };\n\n\n", "file_path": "server/src/project/dependency/test_utils.rs", "rank": 50, "score": 48561.77423777027 }, { "content": "CREATE TABLE moon_pulls (\n\n id UUID NOT NULL DEFAULT uuid_generate_v4(),\n\n character_id INTEGER NOT NULL,\n\n\n\n -- TypeId of the material\n\n material_1 INTEGER NOT NULL,\n\n material_2 INTEGER NOT NULL,\n\n material_3 INTEGER,\n\n material_4 INTEGER,\n\n\n\n -- Amount that is pulled\n\n material_1_amount INTEGER NOT NULL,\n\n material_2_amount INTEGER NOT NULL,\n\n material_3_amount INTEGER,\n\n material_4_amount INTEGER,\n\n\n\n -- Time the pull ends\n\n extraction_time TIMESTAMPTZ NOT NULL DEFAULT NOW(),\n\n\n\n -- Appraisal for the mined volume and the waste volume\n\n appraisal_mined VARCHAR,\n\n appraisal_waste VARCHAR\n\n);\n\n\n", "file_path": "server/migrations/20220529200024_moons.sql", "rank": 51, "score": 45378.51066465372 }, { "content": "CREATE TABLE moon_materials (\n\n -- Reference to the moon pull\n\n moon UUID NOT NULL,\n\n\n\n type_id INTEGER NOT NULL,\n\n amount INTEGER NOT NULL\n\n);\n", "file_path": "server/migrations/20220529200024_moons.sql", "rank": 52, "score": 45378.51066465372 }, { "content": " public static async buildable_items(): Promise<IItem[]> {\n\n return (\n\n await axios.get('/api/v1/items/buildable')\n\n ).data;\n", "file_path": "web/src/services/item.ts", "rank": 53, "score": 45282.20670877789 }, { "content": "#[async_trait]\n\npub trait RequestClient {\n\n /// Makes a single request to the given path and returns parses the result\n\n /// the given struct.\n\n ///\n\n /// # Params\n\n ///\n\n /// * `T` -> Model that represents the resulting json\n\n /// * `path` -> Path of the request\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if eiher the request failed or the parsing failed\n\n ///\n\n /// # Returns\n\n ///\n\n /// Parsed json data\n\n ///\n\n async fn fetch<T>(\n\n &self,\n\n path: &str,\n", "file_path": "connector/src/client.rs", "rank": 54, "score": 45010.83104681678 }, { "content": "export interface IItem {\n\n type_id: number;\n\n name: string;\n", "file_path": "web/src/services/item.ts", "rank": 55, "score": 42106.47253368907 }, { "content": "export interface ICharacter {\n\n character: string;\n\n character_id: number;\n\n character_icon: string;\n\n alliance: string;\n\n alliance_id: number;\n\n alliance_icon: string;\n\n corporation: string;\n\n corporation_icon: string;\n\n corporation_id: number;\n", "file_path": "web/src/services/character.ts", "rank": 56, "score": 41902.210092707704 }, { "content": "export class ItemService {\n\n public static async components(): Promise<IItem[]> {\n\n return (\n\n await axios.get('/api/v1/items/components')\n\n ).data;\n\n }\n\n\n\n public static async buildable_items(): Promise<IItem[]> {\n\n return (\n\n await axios.get('/api/v1/items/buildable')\n\n ).data;\n\n }\n\n\n\n public static async resolve_id_from_name_bulk(names: string[], params: { [key: string]: any }): Promise<number[]> {\n\n return (await axios.post('/api/v1/items/resolve', names, { params })).data;\n\n }\n", "file_path": "web/src/services/item.ts", "rank": 57, "score": 41182.81642206532 }, { "content": "//! Adds the `git rev` as environment variable `GIT_HEAD_SHORT` during the build\n\n//! process, if hthe build environment variable `PROFILE` is not `debug` during\n\n//! the build process.\n\n\n\nuse std::process::Command;\n\n\n", "file_path": "server/build.rs", "rank": 58, "score": 41171.950888037405 }, { "content": "export class CharacterService {\n\n private static character_name_req: { [key: number]: Promise<ICharacter> } = { };\n\n private static character_names: { [key: number]: ICharacter } = { };\n\n\n\n // TODO: move to auth service\n\n public static async whoami(): Promise<ICharacter> {\n\n return (await axios.get(`/api/v1/auth/whoami`)).data;\n\n }\n\n\n\n // TODO: move to auth service\n\n public static add() {\n\n window.location.href = `/api/v1/auth/login/alt`;\n\n }\n\n\n\n public static async alts(): Promise<ICharacter[]> {\n\n return (await axios.get(`${BASE_ADDR}/alts`)).data;\n\n }\n\n\n\n public static async remove(cid: number): Promise<void> {\n\n return (await axios.delete(`${BASE_ADDR}/${cid}`)).data;\n\n }\n\n\n\n public static async info(cid: number): Promise<ICharacter> {\n\n // Ignore what typescript says\n\n if (<any>this.character_name_req[cid]) {\n\n return this.character_name_req[cid];\n\n }\n\n\n\n if (this.character_names[cid]) {\n\n return this.character_names[cid];\n\n } else {\n\n this.character_name_req[cid] = axios\n\n .get(`${BASE_ADDR}/${cid}/info`)\n\n .then(x => x.data)\n\n .then(x => {\n\n this.character_names[cid] = x;\n\n delete this.character_name_req[cid];\n\n return x;\n\n });\n\n return this.character_name_req[cid];\n\n }\n\n }\n\n\n\n public static async refresh(cid: number): Promise<void> {\n\n return await axios.get(`${BASE_ADDR}/${cid}/refresh`);\n\n }\n\n\n\n public static async blueprints_total(): Promise<any[]> {\n\n return (await axios.get(`${BASE_ADDR}/blueprints/total`)).data;\n\n }\n", "file_path": "web/src/services/character.ts", "rank": 59, "score": 40983.03472336976 }, { "content": " public static async components(): Promise<IItem[]> {\n\n return (\n\n await axios.get('/api/v1/items/components')\n\n ).data;\n", "file_path": "web/src/services/item.ts", "rank": 60, "score": 40298.81348552604 }, { "content": " public static async whoami(): Promise<ICharacter> {\n\n return (await axios.get(`/api/v1/auth/whoami`)).data;\n", "file_path": "web/src/services/character.ts", "rank": 61, "score": 40103.32016785095 }, { "content": " public static async remove(cid: number): Promise<void> {\n\n return (await axios.delete(`${BASE_ADDR}/${cid}`)).data;\n", "file_path": "web/src/services/character.ts", "rank": 62, "score": 40103.32016785095 }, { "content": " public static add() {\n\n window.location.href = `/api/v1/auth/login/alt`;\n", "file_path": "web/src/services/character.ts", "rank": 63, "score": 40103.32016785095 }, { "content": " public static async alts(): Promise<ICharacter[]> {\n\n return (await axios.get(`${BASE_ADDR}/alts`)).data;\n", "file_path": "web/src/services/character.ts", "rank": 64, "score": 40103.32016785095 }, { "content": " public static async refresh(cid: number): Promise<void> {\n\n return await axios.get(`${BASE_ADDR}/${cid}/refresh`);\n", "file_path": "web/src/services/character.ts", "rank": 65, "score": 40103.32016785095 }, { "content": "\n\n let mut headers = HeaderMap::new();\n\n headers.insert(\n\n \"X-ApiKey\",\n\n HeaderValue::from_str(&api_key).unwrap()\n\n );\n\n headers.insert(\n\n \"Content-Type\",\n\n HeaderValue::from_static(\"text/plain\")\n\n );\n\n\n\n let client = Client::builder()\n\n .user_agent(user_agent)\n\n .default_headers(headers)\n\n .build()\n\n .map_err(Error::CouldNotConstructClient)?;\n\n\n\n Ok(Self(client))\n\n }\n\n\n", "file_path": "appraisal/src/janice.rs", "rank": 66, "score": 39383.136357130395 }, { "content": "use async_trait::*;\n\nuse reqwest::Client;\n\nuse reqwest::header::{HeaderMap, HeaderValue};\n\nuse serde::Deserialize;\n\nuse std::collections::HashMap;\n\n\n\nuse crate::{Appraisal, AppraisalInformation, AppraisalItem, Error};\n\n\n\n/// Implementation for [janice](https://janice.e-351.com/)\n\n/// \n\n/// Additional documentation [swagger](https://janice.e-351.com/api/rest/docs/index.html)\n\npub struct Janice(reqwest::Client);\n\n\n\nimpl Janice {\n\n /// Name of the UserAgent ENV\n\n const USER_AGENT: &'static str = \"JANICE_USER_AGENT\";\n\n /// Name of the ApiKey ENV\n\n const API_KEY: &'static str = \"JANICE_API_KEY\";\n\n /// Url for creating appraisals\n\n const APPRAISAL_URL: &'static str = \"https://janice.e-351.com/api/rest/v2/appraisal\";\n", "file_path": "appraisal/src/janice.rs", "rank": 67, "score": 39382.1751650647 }, { "content": "}\n\n\n\n#[async_trait]\n\nimpl Appraisal for Janice where Self: Sized {\n\n /// Validates that all required Environment variables are set\n\n /// \n\n /// # Error\n\n /// \n\n /// Fails when a Environment-Variable is missing\n\n /// \n\n /// # Returns\n\n /// \n\n /// `Ok` -> If all Environment-Variables are set\n\n /// `Err` -> Not all Environment-Variables are set, contains the missing ENV-Name\n\n /// \n\n fn validate() -> Result<(), Error> {\n\n std::env::var(Self::USER_AGENT)\n\n .map_err(|_| Error::MissingEnv(format!(\"JANICE_{}\", Self::USER_AGENT)))\n\n .map(drop)?;\n\n std::env::var(Self::API_KEY)\n", "file_path": "appraisal/src/janice.rs", "rank": 68, "score": 39379.897819662125 }, { "content": " .map_err(|_| Error::MissingEnv(format!(\"JANICE_{}\", Self::API_KEY)))\n\n .map(drop)?;\n\n Ok(())\n\n }\n\n\n\n /// Creates a new janice appraisal instance.\n\n /// \n\n /// # Error\n\n /// \n\n /// If not all required Environment-Variables are set.\n\n /// \n\n /// # Returns\n\n /// \n\n /// Appraisal instance\n\n /// \n\n fn init() -> Result<Self, Error> {\n\n let user_agent = std::env::var(Self::USER_AGENT)\n\n .map_err(|_| Error::MissingEnv(format!(\"JANICE_{}\", Self::USER_AGENT)))?;\n\n let api_key = std::env::var(Self::API_KEY)\n\n .map_err(|_| Error::MissingEnv(format!(\"JANICE_{}\", Self::API_KEY)))?;\n", "file_path": "appraisal/src/janice.rs", "rank": 69, "score": 39374.16124833488 }, { "content": " entries: Vec<String>\n\n ) -> Result<AppraisalInformation, Error> {\n\n let mut params = HashMap::new();\n\n params.insert(\"persist\", persist.to_string());\n\n params.insert(\"designation\", \"appraisal\".into());\n\n params.insert(\"pricing\", \"split\".into());\n\n params.insert(\"pricingVariant\", \"immediate\".into());\n\n\n\n self.0\n\n .post(Self::APPRAISAL_URL)\n\n .query(&params)\n\n .body(entries.join(\"\\n\"))\n\n .send()\n\n .await\n\n .map_err(Error::RequestError)?\n\n .json::<ApprisalResponse>()\n\n .await\n\n .map_err(Error::RequestError)\n\n .map(Into::into)\n\n }\n", "file_path": "appraisal/src/janice.rs", "rank": 70, "score": 39373.63125080074 }, { "content": "\n\n/// Represents an janice Appraisal Value\n\n#[derive(Debug, Deserialize)]\n\npub struct ItemType {\n\n /// TypeId of the item\n\n pub eid: u32,\n\n /// Name of the item\n\n pub name: String,\n\n}\n\n\n\n/// Represents an janice Appraisal Value\n\n#[derive(Debug, Deserialize)]\n\npub struct AppraisalValue {\n\n /// Buy price for all items\n\n #[serde(rename = \"totalBuyPrice\")]\n\n pub buy_price: f32,\n\n /// Split price for all items\n\n #[serde(rename = \"totalSplitPrice\")]\n\n pub split_price: f32,\n\n /// Sell price for all items\n", "file_path": "appraisal/src/janice.rs", "rank": 71, "score": 39372.430679575635 }, { "content": " #[serde(rename = \"totalSellPrice\")]\n\n pub sell_price: f32\n\n}\n\n\n\n/// Represents an janice Appraisal Item Value\n\n#[derive(Debug, Deserialize)]\n\npub struct AppraisalValueItem {\n\n /// Buy price for one items\n\n #[serde(rename = \"buyPrice\")]\n\n pub buy_price: f32,\n\n /// Split price for one items\n\n #[serde(rename = \"splitPrice\")]\n\n pub split_price: f32,\n\n /// Sell price for one items\n\n #[serde(rename = \"sellPrice\")]\n\n pub sell_price: f32,\n\n\n\n /// Buy price for all items\n\n #[serde(rename = \"buyPriceTotal\")]\n\n pub buy_price_total: f32,\n\n /// Split price for all items\n\n #[serde(rename = \"splitPriceTotal\")]\n\n pub split_price_total: f32,\n\n /// Sell price for all items\n\n #[serde(rename = \"sellPriceTotal\")]\n\n pub sell_price_total: f32,\n\n}\n", "file_path": "appraisal/src/janice.rs", "rank": 72, "score": 39367.27657479613 }, { "content": "}\n\n\n\nimpl Into<AppraisalItem> for Item {\n\n fn into(self) -> AppraisalItem {\n\n AppraisalItem {\n\n type_id: self.item_type.eid,\n\n name: self.item_type.name,\n\n\n\n amount: self.amount,\n\n\n\n sell_price: self.average_price.sell_price,\n\n split_price: self.average_price.split_price,\n\n buy_price: self.average_price.buy_price,\n\n\n\n sell_price_total: self.average_price.sell_price_total,\n\n split_price_total: self.average_price.split_price_total,\n\n buy_price_total: self.average_price.buy_price_total,\n\n }\n\n }\n\n}\n", "file_path": "appraisal/src/janice.rs", "rank": 73, "score": 39366.827701900846 }, { "content": " /// Creates a new apprisal\n\n /// \n\n /// # Params\n\n /// \n\n /// * `persist` -> Determines if the apprisal should be stored\n\n /// * `entries` -> List of entries to create a apprisal for\n\n /// Format: `item_name quantity`\n\n /// \n\n /// # Errors\n\n /// \n\n /// - When the server is not reachable\n\n /// - Invalid Format\n\n /// \n\n /// # Returns\n\n /// \n\n /// Appraisal information\n\n /// \n\n async fn create(\n\n &self,\n\n persist: bool,\n", "file_path": "appraisal/src/janice.rs", "rank": 74, "score": 39364.85136174646 }, { "content": " /// Optional code to share the appraisal\n\n pub code: Option<String>,\n\n}\n\n\n\nimpl Into<AppraisalInformation> for ApprisalResponse {\n\n fn into(self) -> AppraisalInformation {\n\n let uri = self.code\n\n .as_ref()\n\n .map(|x| format!(\"https://janice.e-351.com/a/{}\", x));\n\n\n\n AppraisalInformation {\n\n sell_price: self.average_price.sell_price,\n\n split_price: self.average_price.split_price,\n\n buy_price: self.average_price.buy_price,\n\n items: self.items.into_iter().map(Into::into).collect::<Vec<_>>(),\n\n code: self.code,\n\n uri: uri,\n\n }\n\n }\n\n}\n", "file_path": "appraisal/src/janice.rs", "rank": 75, "score": 39362.491367813505 }, { "content": "\n\n/// Represents an janice Appraisal Value\n\n#[derive(Debug, Deserialize)]\n\npub struct Item {\n\n /// Effective price\n\n #[serde(rename = \"effectivePrices\")]\n\n pub effective_price: AppraisalValueItem,\n\n /// Price if sold immidiate\n\n #[serde(rename = \"immediatePrices\")]\n\n pub immidiate_price: AppraisalValueItem,\n\n /// Average price of the top 5\n\n #[serde(rename = \"top5AveragePrices\")]\n\n pub average_price: AppraisalValueItem,\n\n\n\n /// Given amount of the item\n\n pub amount: u64,\n\n\n\n /// Information about the item\n\n #[serde(rename = \"itemType\")]\n\n pub item_type: ItemType,\n", "file_path": "appraisal/src/janice.rs", "rank": 76, "score": 39358.936194495436 }, { "content": "}\n\n\n\n/// Represents the response from janice\n\n/// \n\n/// Not all fields are represented\n\n#[derive(Debug, Deserialize)]\n\npub struct ApprisalResponse {\n\n /// Effective price\n\n #[serde(rename = \"effectivePrices\")]\n\n pub effective_price: AppraisalValue,\n\n /// Price if sold immidiate\n\n #[serde(rename = \"immediatePrices\")]\n\n pub immidiate_price: AppraisalValue,\n\n /// Average price of the top 5\n\n #[serde(rename = \"top5AveragePrices\")]\n\n pub average_price: AppraisalValue,\n\n\n\n /// Breakdown of all items\n\n pub items: Vec<Item>,\n\n\n", "file_path": "appraisal/src/janice.rs", "rank": 77, "score": 39358.16320307697 }, { "content": "mod api;\n\nmod service;\n\n\n\npub use self::api::*;\n\npub use self::service::*;\n", "file_path": "server/src/moon.rs", "rank": 78, "score": 39351.790448099535 }, { "content": "use axum::{Extension, Router};\n\nuse caph_server::*;\n\nuse sqlx::postgres::PgPoolOptions;\n\nuse tracing::Level;\n\nuse tracing_subscriber::EnvFilter;\n\n\n\n/// ENV variable for the database URL\n\nconst PG_ADDR: &str = \"DATABASE_URL\";\n\n/// ENV variable for the address the server should bind to\n\nconst SERVER_BIND_ADDR: &str = \"SERVER_BIND_ADDR\";\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n dotenv::dotenv().ok();\n\n\n\n if std::env::var(\"GIT_HEAD_SHORT\").is_ok() {\n\n tracing_subscriber::fmt()\n\n .with_max_level(Level::WARN)\n\n .init();\n\n } else {\n", "file_path": "server/src/main.rs", "rank": 79, "score": 39224.22536026447 }, { "content": " tracing_subscriber::fmt()\n\n .pretty()\n\n .with_env_filter(EnvFilter::from_default_env())\n\n .init();\n\n }\n\n\n\n let pg_addr = std::env::var(PG_ADDR)\n\n .expect(\"Expected that a DATABASE_URL ENV is set\");\n\n let pool = PgPoolOptions::new()\n\n .max_connections(10)\n\n .connect(&pg_addr)\n\n .await?;\n\n sqlx::migrate!()\n\n .run(&pool)\n\n .await?;\n\n\n\n let auth_service = AuthService::new(pool.clone());\n\n let character_service = CharacterService::new(\n\n pool.clone(),\n\n auth_service.clone()\n", "file_path": "server/src/main.rs", "rank": 80, "score": 39223.17846660583 }, { "content": " .layer(Extension(pool))\n\n .into_make_service();\n\n\n\n let bind = std::env::var(SERVER_BIND_ADDR)\n\n .unwrap_or_else(|_| String::from(\"127.0.0.1:8080\"))\n\n .parse()\n\n .map_err(|_| Error::CouldNotParseServerListenAddr)?;\n\n tracing::info!(\"Starting server\");\n\n axum::Server::bind(&bind)\n\n .serve(app)\n\n .await\n\n .map_err(|_| Error::CouldNotStartServer)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "server/src/main.rs", "rank": 81, "score": 39217.05655615748 }, { "content": " );\n\n let item_service = ItemService::new(pool.clone());\n\n\n\n let dependency_cache = DependencyCache::new(pool.clone()).await?;\n\n let project_blueprint_service = ProjectBlueprintService::new(\n\n pool.clone(),\n\n character_service.clone(),\n\n );\n\n let project_storage_service = ProjectStorageService::new(\n\n pool.clone()\n\n );\n\n let project_service = ProjectService::new(\n\n pool.clone(),\n\n\n\n project_blueprint_service.clone(),\n\n\n\n dependency_cache\n\n );\n\n\n\n let moon_service = MoonService::new(\n", "file_path": "server/src/main.rs", "rank": 82, "score": 39213.85595933593 }, { "content": " pool.clone()\n\n );\n\n\n\n let app = Router::new()\n\n .nest(\n\n \"/api/v1\",\n\n Router::new()\n\n .nest(\"/auth\", crate::AuthApi::router())\n\n .nest(\"/character\", crate::CharacterApi::router())\n\n .nest(\"/items\", crate::ItemApi::router())\n\n .nest(\"/projects\", crate::ProjectApi::router())\n\n .nest(\"/moons\", crate::moon::router())\n\n )\n\n .layer(Extension(auth_service))\n\n .layer(Extension(character_service))\n\n .layer(Extension(item_service))\n\n .layer(Extension(project_service))\n\n .layer(Extension(project_blueprint_service))\n\n .layer(Extension(project_storage_service))\n\n .layer(Extension(moon_service))\n", "file_path": "server/src/main.rs", "rank": 83, "score": 39210.919614422935 }, { "content": "--------------------------------------------------------------------------------\n", "file_path": "server/migrations/20220326201841_init.sql", "rank": 84, "score": 39187.70846007871 }, { "content": "mod api;\n\nmod service;\n\n\n\npub use self::api::*;\n\npub use self::service::*;\n", "file_path": "server/src/item.rs", "rank": 85, "score": 39148.44522028321 }, { "content": " if let Some(x) = headers.get(\"x-pages\") {\n\n x.to_str()\n\n .unwrap_or_default()\n\n .parse::<u8>()\n\n .unwrap_or_default()\n\n } else {\n\n 0u8\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for EveAuthClient {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"EveAuthClient\")\n\n .finish()\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl RequestClient for EveAuthClient {\n", "file_path": "connector/src/client.rs", "rank": 86, "score": 39052.35958174856 }, { "content": " _path: &str,\n\n ) -> Result<T, ConnectError>\n\n where\n\n R: Debug + Serialize + Send + Sync,\n\n T: DeserializeOwned {\n\n\n\n unimplemented!()\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for EveClient {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"EveClient\")\n\n .finish()\n\n }\n\n}\n\n\n\n/// Client for communicating with the EVE-API using an authenticated character.\n\n///\n\n/// After constructing it provides helper functions for performing a\n", "file_path": "connector/src/client.rs", "rank": 87, "score": 39048.31681940329 }, { "content": " }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl RequestClient for EveClient {\n\n #[tracing::instrument(level = \"debug\")]\n\n async fn fetch<T>(\n\n &self,\n\n path: &str,\n\n ) -> Result<T, ConnectError>\n\n where T: DeserializeOwned {\n\n let json = self.send(path)\n\n .await?\n\n .json::<T>()\n\n .await\n\n .map_err(ConnectError::ReqwestError)?;\n\n Ok(json)\n\n }\n\n\n", "file_path": "connector/src/client.rs", "rank": 88, "score": 39044.985890513446 }, { "content": " Client::new()\n\n .post(Self::EVE_TOKEN_URL)\n\n .basic_auth(client_id, Some(secret_key))\n\n .header(\"Content-Type\", \"application/x-www-form-urlencoded\")\n\n .header(\"Host\", \"login.eveonline.com\")\n\n .form(&form)\n\n .send()\n\n .await\n\n .map_err(ConnectError::ReqwestError)?\n\n .json::<EveOAuthToken>()\n\n .await\n\n .map_err(ConnectError::ReqwestError)\n\n }\n\n\n\n /// Consutructs a new [EveAuthClient].\n\n ///\n\n /// # Requirements\n\n ///\n\n /// The ENV `EVE_USER_AGENT` must be set.\n\n ///\n", "file_path": "connector/src/client.rs", "rank": 89, "score": 39044.86896601711 }, { "content": " #[allow(clippy::unwrap_in_result)]\n\n pub fn with_access_token(\n\n access_token: String,\n\n refresh_token: String\n\n ) -> Result<Self, ConnectError> {\n\n let s = Self::new(refresh_token)?;\n\n #[allow(clippy::unwrap_used)]\n\n {\n\n *s.access_token.lock().unwrap() = Some(access_token);\n\n }\n\n Ok(s)\n\n }\n\n\n\n /// Generates a url for authenticationg a character against the EVE-API.\n\n ///\n\n /// # Params\n\n ///\n\n /// * `state` -> Unique key, used for extra security\n\n /// * `scope` -> Required scope, musst be a lost of space seperated entries\n\n ///\n", "file_path": "connector/src/client.rs", "rank": 90, "score": 39044.69057650832 }, { "content": " /// Parsed json data\n\n ///\n\n async fn post<R, T>(\n\n &self,\n\n data: R,\n\n path: &str,\n\n ) -> Result<T, ConnectError>\n\n where\n\n R: Debug + Serialize + Send + Sync,\n\n T: DeserializeOwned;\n\n}\n\n\n\n/// Client for communicating with the EVE-API.\n\n///\n\n/// # Required ENV\n\n///\n\n/// If not all required ENVs are set, an error will be returned\n\n///\n\n/// * `EVE_USER_AGENT` -> Name of the user agent that is send with every request\n\n///\n", "file_path": "connector/src/client.rs", "rank": 91, "score": 39044.28518313795 }, { "content": "#[derive(Clone)]\n\npub struct EveAuthClient {\n\n /// Client for communicating with EVE\n\n client: Client,\n\n /// Token to get a new `access_token`\n\n refresh_token: String,\n\n /// Token needed to get data that is behind auth\n\n access_token: Arc<Mutex<Option<String>>>,\n\n}\n\n\n\nimpl EveAuthClient {\n\n /// URL to the EVE-API\n\n const EVE_API_URL: &'static str = \"https://esi.evetech.net\";\n\n /// URL to the EVE-API oauth login page\n\n const EVE_LOGIN_URL: &'static str = \"https://login.eveonline.com/v2/oauth/authorize\";\n\n /// URL to the EVE-API oauth token\n\n const EVE_TOKEN_URL: &'static str = \"https://login.eveonline.com/v2/oauth/token\";\n\n /// Name of the ENV of the application callback\n\n const ENV_CALLBACK: &'static str = \"EVE_CALLBACK\";\n\n /// Name of the ENV of the application client id\n", "file_path": "connector/src/client.rs", "rank": 92, "score": 39043.803561062115 }, { "content": " pub fn auth_uri(state: &str, scope: Option<&str>) -> Result<Url, ConnectError> {\n\n let scope = if let Some(x) = scope {\n\n x\n\n } else {\n\n Self::DEFAULT_SCOPE\n\n };\n\n\n\n let mut url = Url::parse(Self::EVE_LOGIN_URL)\n\n .map_err(|_| ConnectError::UrlParseError)?;\n\n\n\n let callback = std::env::var(Self::ENV_CALLBACK)\n\n .map_err(|_| ConnectError::env_callback())?;\n\n let client_id = std::env::var(Self::ENV_CLIENT_ID)\n\n .map_err(|_| ConnectError::env_client_id())?;\n\n let _ = std::env::var(Self::ENV_SECRET_KEY)\n\n .map_err(|_| ConnectError::env_secret_key())?;\n\n\n\n url.query_pairs_mut()\n\n .append_pair(\"response_type\", \"code\")\n\n .append_pair(\"redirect_uri\", &callback)\n", "file_path": "connector/src/client.rs", "rank": 93, "score": 39043.66843086673 }, { "content": " /// # Errors\n\n ///\n\n /// If the retrieving of an `access_token` fails the function will return\n\n /// an error\n\n ///\n\n pub async fn access_token(\n\n code: &str\n\n ) -> Result<EveOAuthToken, ConnectError> {\n\n let mut map = HashMap::new();\n\n map.insert(\"grant_type\", \"authorization_code\");\n\n map.insert(\"code\", code);\n\n\n\n let token = Self::get_token(map).await?;\n\n Ok(token)\n\n }\n\n\n\n /// Makes a request to the token interface and sets necessary headers to\n\n /// retrieve a new `access_token`.\n\n ///\n\n /// # Params\n", "file_path": "connector/src/client.rs", "rank": 94, "score": 39043.50572314323 }, { "content": " const ENV_CLIENT_ID: &'static str = \"EVE_CLIENT_ID\";\n\n /// Name of the ENV of the application secret key\n\n const ENV_SECRET_KEY: &'static str = \"EVE_SECRET_KEY\";\n\n /// Name of the ENV of the user agent\n\n const ENV_USER_AGENT: &'static str = \"EVE_USER_AGENT\";\n\n /// Default scope that is used\n\n const DEFAULT_SCOPE: &'static str = \"publicData\";\n\n\n\n /// Gets the initial access token,\n\n ///\n\n /// [More information](https://docs.esi.evetech.net/docs/sso/web_based_sso_flow.html)\n\n ///\n\n /// # Params\n\n ///\n\n /// * `code` -> Code send by the EVE-API as query parameter\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if the [Mutex] is not exclusive.\n\n ///\n", "file_path": "connector/src/client.rs", "rank": 95, "score": 39041.937931239045 }, { "content": " #[tracing::instrument(level = \"debug\")]\n\n async fn fetch<T>(\n\n &self,\n\n path: &str,\n\n ) -> Result<T, ConnectError>\n\n where T: DeserializeOwned {\n\n let path = format!(\"{}/{}\", Self::EVE_API_URL, path);\n\n let json = self.send(&path)\n\n .await?\n\n .json::<T>()\n\n .await\n\n .map_err(ConnectError::ReqwestError)?;\n\n Ok(json)\n\n }\n\n\n\n #[tracing::instrument(level = \"debug\")]\n\n async fn fetch_page<T>(\n\n &self,\n\n path: &str,\n\n ) -> Result<Vec<T>, ConnectError>\n", "file_path": "connector/src/client.rs", "rank": 96, "score": 39040.558854318515 }, { "content": " /// See the returned error for more details.\n\n ///\n\n /// # Returns\n\n ///\n\n /// New instance of the [EveClient]\n\n ///\n\n pub fn new() -> Result<Self, ConnectError> {\n\n let user_agent = std::env::var(Self::ENV_USER_AGENT)\n\n .map_err(|_| ConnectError::env_user_agent())?;\n\n\n\n let client = Client::builder()\n\n .user_agent(user_agent)\n\n .build()\n\n .map_err(ConnectError::CouldNotConstructClient)?;\n\n\n\n Ok(Self(client))\n\n }\n\n\n\n /// Deconstructs the struct and returns the underlying [reqwest::Client].\n\n ///\n", "file_path": "connector/src/client.rs", "rank": 97, "score": 39040.14826761966 }, { "content": "#[derive(Clone)]\n\npub struct EveClient(Client);\n\n\n\nimpl EveClient {\n\n /// URL to the EVE-API\n\n const EVE_API_URL: &'static str = \"https://esi.evetech.net\";\n\n /// Name of the ENV of the user agent\n\n const ENV_USER_AGENT: &'static str = \"EVE_USER_AGENT\";\n\n\n\n /// Consutructs a new [EveClient].\n\n ///\n\n /// # Requirements\n\n ///\n\n /// The ENV `EVE_USER_AGENT` must be set.\n\n ///\n\n /// # Errors\n\n ///\n\n /// The function will return an error if the ENV `EVE_USER_AGENT` is not set.\n\n /// Besides that it will return an error if the client could not be\n\n /// constructed.\n", "file_path": "connector/src/client.rs", "rank": 98, "score": 39040.022293991766 }, { "content": "\n\n let client = Client::builder()\n\n .user_agent(user_agent)\n\n .pool_idle_timeout(None)\n\n .build()\n\n .map_err(ConnectError::CouldNotConstructClient)?;\n\n\n\n Ok(Self {\n\n client: client,\n\n refresh_token: refresh_token,\n\n access_token: Arc::new(Mutex::new(None))\n\n })\n\n }\n\n\n\n /// Consutructs a new [EveAuthClient] with an existing `access_token`.\n\n ///\n\n /// # Requirements\n\n ///\n\n /// The ENV `EVE_USER_AGENT` must be set.\n\n ///\n", "file_path": "connector/src/client.rs", "rank": 99, "score": 39039.946913896594 } ]
Rust
sdk/src/system_transaction.rs
sunnygleason/solana
1fb9ed98372dfaf8f9a6691e7ec6d5bc5577bd2a
use crate::hash::Hash; use crate::pubkey::Pubkey; use crate::signature::Keypair; use crate::system_instruction::SystemInstruction; use crate::system_program; use crate::transaction::{Instruction, Transaction}; pub struct SystemTransaction {} impl SystemTransaction { pub fn new_program_account( from_keypair: &Keypair, to: Pubkey, last_id: Hash, tokens: u64, space: u64, program_id: Pubkey, fee: u64, ) -> Transaction { let create = SystemInstruction::CreateAccount { tokens, space, program_id, }; Transaction::new( from_keypair, &[to], system_program::id(), &create, last_id, fee, ) } pub fn new_account( from_keypair: &Keypair, to: Pubkey, tokens: u64, last_id: Hash, fee: u64, ) -> Transaction { let program_id = system_program::id(); Self::new_program_account(from_keypair, to, last_id, tokens, 0, program_id, fee) } pub fn new_assign( from_keypair: &Keypair, last_id: Hash, program_id: Pubkey, fee: u64, ) -> Transaction { let assign = SystemInstruction::Assign { program_id }; Transaction::new( from_keypair, &[], system_program::id(), &assign, last_id, fee, ) } pub fn new_move( from_keypair: &Keypair, to: Pubkey, tokens: u64, last_id: Hash, fee: u64, ) -> Transaction { let move_tokens = SystemInstruction::Move { tokens }; Transaction::new( from_keypair, &[to], system_program::id(), &move_tokens, last_id, fee, ) } pub fn new_move_many( from: &Keypair, moves: &[(Pubkey, u64)], last_id: Hash, fee: u64, ) -> Transaction { let instructions: Vec<_> = moves .iter() .enumerate() .map(|(i, (_, amount))| { let spend = SystemInstruction::Move { tokens: *amount }; Instruction::new(0, &spend, vec![0, i as u8 + 1]) }) .collect(); let to_keys: Vec<_> = moves.iter().map(|(to_key, _)| *to_key).collect(); Transaction::new_with_instructions( &[from], &to_keys, last_id, fee, vec![system_program::id()], instructions, ) } pub fn new_spawn(from_keypair: &Keypair, last_id: Hash, fee: u64) -> Transaction { let spawn = SystemInstruction::Spawn; Transaction::new( from_keypair, &[], system_program::id(), &spawn, last_id, fee, ) } } #[cfg(test)] mod tests { use super::*; use crate::signature::KeypairUtil; #[test] fn test_move_many() { let from = Keypair::new(); let t1 = Keypair::new(); let t2 = Keypair::new(); let moves = vec![(t1.pubkey(), 1), (t2.pubkey(), 2)]; let tx = SystemTransaction::new_move_many(&from, &moves, Hash::default(), 0); assert_eq!(tx.account_keys[0], from.pubkey()); assert_eq!(tx.account_keys[1], t1.pubkey()); assert_eq!(tx.account_keys[2], t2.pubkey()); assert_eq!(tx.instructions.len(), 2); assert_eq!(tx.instructions[0].accounts, vec![0, 1]); assert_eq!(tx.instructions[1].accounts, vec![0, 2]); } }
use crate::hash::Hash; use crate::pubkey::Pubkey; use crate::signature::Keypair; use crate::system_instruction::SystemInstruction; use crate::system_program; use crate::transaction::{Instruction, Transaction}; pub struct SystemTransaction {} impl SystemTransaction { pub fn new_program_account( from_keypair: &Keypair, to: Pubkey, last_id: Hash, tokens: u64, space: u64, program_id: Pubkey, fee: u64, ) -> Transaction { let create = SystemInstruction::CreateAccount { tokens, space, program_id, }; Transaction::new( from_keypair, &[to], system_program::id(), &create, last_id, fee, ) } pub fn new_account( from_keypair: &Keypair, to: Pubkey, tokens: u64, last_id: Hash, fee: u64, ) -> Transaction { let program_id = system_program::id(); Self::new_program_account(from_keypair, to, last_id, tokens, 0, program_id, fee) } pub fn new_assign( from_keypair: &Keypair, last_id: Hash, program_id: Pubkey, fee: u64, ) -> Transaction { let assign = SystemInstruction::Assign { program_id }; Transaction::new( from_keypair, &[], system_program::id(), &assign, last_id, fee, ) } pub fn new_move( from_keypair: &Keypair, to: Pubkey, tokens: u64,
pub fn new_move_many( from: &Keypair, moves: &[(Pubkey, u64)], last_id: Hash, fee: u64, ) -> Transaction { let instructions: Vec<_> = moves .iter() .enumerate() .map(|(i, (_, amount))| { let spend = SystemInstruction::Move { tokens: *amount }; Instruction::new(0, &spend, vec![0, i as u8 + 1]) }) .collect(); let to_keys: Vec<_> = moves.iter().map(|(to_key, _)| *to_key).collect(); Transaction::new_with_instructions( &[from], &to_keys, last_id, fee, vec![system_program::id()], instructions, ) } pub fn new_spawn(from_keypair: &Keypair, last_id: Hash, fee: u64) -> Transaction { let spawn = SystemInstruction::Spawn; Transaction::new( from_keypair, &[], system_program::id(), &spawn, last_id, fee, ) } } #[cfg(test)] mod tests { use super::*; use crate::signature::KeypairUtil; #[test] fn test_move_many() { let from = Keypair::new(); let t1 = Keypair::new(); let t2 = Keypair::new(); let moves = vec![(t1.pubkey(), 1), (t2.pubkey(), 2)]; let tx = SystemTransaction::new_move_many(&from, &moves, Hash::default(), 0); assert_eq!(tx.account_keys[0], from.pubkey()); assert_eq!(tx.account_keys[1], t1.pubkey()); assert_eq!(tx.account_keys[2], t2.pubkey()); assert_eq!(tx.instructions.len(), 2); assert_eq!(tx.instructions[0].accounts, vec![0, 1]); assert_eq!(tx.instructions[1].accounts, vec![0, 2]); } }
last_id: Hash, fee: u64, ) -> Transaction { let move_tokens = SystemInstruction::Move { tokens }; Transaction::new( from_keypair, &[to], system_program::id(), &move_tokens, last_id, fee, ) }
function_block-function_prefix_line
[ { "content": "pub fn create_ticks(num_ticks: u64, mut hash: Hash) -> Vec<Entry> {\n\n let mut ticks = Vec::with_capacity(num_ticks as usize);\n\n for _ in 0..num_ticks {\n\n let new_tick = next_entry_mut(&mut hash, 1, vec![]);\n\n ticks.push(new_tick);\n\n }\n\n\n\n ticks\n\n}\n\n\n", "file_path": "src/entry.rs", "rank": 0, "score": 319733.81711837044 }, { "content": "/// Creates the next Tick or Transaction Entry `num_hashes` after `start_hash`.\n\npub fn next_entry(prev_id: &Hash, num_hashes: u64, transactions: Vec<Transaction>) -> Entry {\n\n assert!(num_hashes > 0 || transactions.is_empty());\n\n Entry {\n\n tick_height: 0,\n\n num_hashes,\n\n id: next_hash(prev_id, num_hashes, &transactions),\n\n transactions,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::entry::Entry;\n\n use crate::packet::{to_blobs, BLOB_DATA_SIZE, PACKET_DATA_SIZE};\n\n use solana_sdk::hash::hash;\n\n use solana_sdk::signature::{Keypair, KeypairUtil};\n\n use solana_sdk::system_transaction::SystemTransaction;\n\n use std::net::{IpAddr, Ipv4Addr, SocketAddr};\n\n\n", "file_path": "src/entry.rs", "rank": 1, "score": 310190.85814791045 }, { "content": "pub fn next_entry_mut(start: &mut Hash, num_hashes: u64, transactions: Vec<Transaction>) -> Entry {\n\n let entry = Entry::new(&start, 0, num_hashes, transactions);\n\n *start = entry.id;\n\n entry\n\n}\n\n\n", "file_path": "src/entry.rs", "rank": 2, "score": 302626.80597998225 }, { "content": "/// Send loopback payment of 0 tokens and confirm the network processed it\n\npub fn send_barrier_transaction(barrier_client: &mut ThinClient, last_id: &mut Hash, id: &Keypair) {\n\n let transfer_start = Instant::now();\n\n\n\n let mut poll_count = 0;\n\n loop {\n\n if poll_count > 0 && poll_count % 8 == 0 {\n\n println!(\n\n \"polling for barrier transaction confirmation, attempt {}\",\n\n poll_count\n\n );\n\n }\n\n\n\n *last_id = barrier_client.get_last_id();\n\n let signature = barrier_client\n\n .transfer(0, &id, id.pubkey(), last_id)\n\n .expect(\"Unable to send barrier transaction\");\n\n\n\n let confirmatiom = barrier_client.poll_for_signature(&signature);\n\n let duration_ms = duration_as_ms(&transfer_start.elapsed());\n\n if confirmatiom.is_ok() {\n", "file_path": "bench-tps/src/bench.rs", "rank": 3, "score": 296376.1674656783 }, { "content": "/// fund the dests keys by spending all of the source keys into MAX_SPENDS_PER_TX\n\n/// on every iteration. This allows us to replay the transfers because the source is either empty,\n\n/// or full\n\npub fn fund_keys(client: &mut ThinClient, source: &Keypair, dests: &[Keypair], tokens: u64) {\n\n let total = tokens * dests.len() as u64;\n\n let mut funded: Vec<(&Keypair, u64)> = vec![(source, total)];\n\n let mut notfunded: Vec<&Keypair> = dests.iter().collect();\n\n\n\n println!(\"funding keys {}\", dests.len());\n\n while !notfunded.is_empty() {\n\n let mut new_funded: Vec<(&Keypair, u64)> = vec![];\n\n let mut to_fund = vec![];\n\n println!(\"creating from... {}\", funded.len());\n\n for f in &mut funded {\n\n let max_units = cmp::min(notfunded.len(), MAX_SPENDS_PER_TX);\n\n if max_units == 0 {\n\n break;\n\n }\n\n let start = notfunded.len() - max_units;\n\n let per_unit = f.1 / (max_units as u64);\n\n let moves: Vec<_> = notfunded[start..]\n\n .iter()\n\n .map(|k| (k.pubkey(), per_unit))\n", "file_path": "bench-tps/src/bench.rs", "rank": 4, "score": 296039.42531073646 }, { "content": "/// Creates the hash `num_hashes` after `start_hash`. If the transaction contains\n\n/// a signature, the final hash will be a hash of both the previous ID and\n\n/// the signature. If num_hashes is zero and there's no transaction data,\n\n/// start_hash is returned.\n\nfn next_hash(start_hash: &Hash, num_hashes: u64, transactions: &[Transaction]) -> Hash {\n\n if num_hashes == 0 && transactions.is_empty() {\n\n return *start_hash;\n\n }\n\n\n\n let mut poh = Poh::new(*start_hash, 0);\n\n\n\n for _ in 1..num_hashes {\n\n poh.hash();\n\n }\n\n\n\n if transactions.is_empty() {\n\n poh.tick().id\n\n } else {\n\n poh.record(Transaction::hash(transactions)).id\n\n }\n\n}\n\n\n", "file_path": "src/entry.rs", "rank": 5, "score": 284211.3710460027 }, { "content": "pub fn id() -> Pubkey {\n\n Pubkey::new(&TOKEN_PROGRAM_ID)\n\n}\n", "file_path": "sdk/src/token_program.rs", "rank": 6, "score": 277831.8151774457 }, { "content": "pub fn metrics_submit_token_balance(token_balance: u64) {\n\n println!(\"Token balance: {}\", token_balance);\n\n solana_metrics::submit(\n\n influxdb::Point::new(\"bench-tps\")\n\n .add_tag(\"op\", influxdb::Value::String(\"token_balance\".to_string()))\n\n .add_field(\"balance\", influxdb::Value::Integer(token_balance as i64))\n\n .to_owned(),\n\n );\n\n}\n\n\n", "file_path": "bench-tps/src/bench.rs", "rank": 7, "score": 264219.691036675 }, { "content": "pub fn sample_file(in_path: &Path, sample_offsets: &[u64]) -> io::Result<Hash> {\n\n let in_file = File::open(in_path)?;\n\n let metadata = in_file.metadata()?;\n\n let mut buffer_file = BufReader::new(in_file);\n\n\n\n let mut hasher = Hasher::default();\n\n let sample_size = size_of::<Hash>();\n\n let sample_size64 = sample_size as u64;\n\n let mut buf = vec![0; sample_size];\n\n\n\n let file_len = metadata.len();\n\n if file_len < sample_size64 {\n\n return Err(Error::new(ErrorKind::Other, \"file too short!\"));\n\n }\n\n for offset in sample_offsets {\n\n if *offset > (file_len - sample_size64) / sample_size64 {\n\n return Err(Error::new(ErrorKind::Other, \"offset too large\"));\n\n }\n\n buffer_file.seek(SeekFrom::Start(*offset * sample_size64))?;\n\n trace!(\"sampling @ {} \", *offset);\n", "file_path": "src/replicator.rs", "rank": 8, "score": 257965.1331772106 }, { "content": "// First transfer 3/4 of the tokens to the dest accounts\n\n// then ping-pong 1/4 of the tokens back to the other account\n\n// this leaves 1/4 token buffer in each account\n\npub fn should_switch_directions(num_tokens_per_account: u64, i: u64) -> bool {\n\n i % (num_tokens_per_account / 4) == 0 && (i >= (3 * num_tokens_per_account) / 4)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn test_switch_directions() {\n\n assert_eq!(should_switch_directions(20, 0), false);\n\n assert_eq!(should_switch_directions(20, 1), false);\n\n assert_eq!(should_switch_directions(20, 14), false);\n\n assert_eq!(should_switch_directions(20, 15), true);\n\n assert_eq!(should_switch_directions(20, 16), false);\n\n assert_eq!(should_switch_directions(20, 19), false);\n\n assert_eq!(should_switch_directions(20, 20), true);\n\n assert_eq!(should_switch_directions(20, 21), false);\n\n assert_eq!(should_switch_directions(20, 99), false);\n\n assert_eq!(should_switch_directions(20, 100), true);\n\n assert_eq!(should_switch_directions(20, 101), false);\n\n }\n\n}\n", "file_path": "bench-tps/src/bench.rs", "rank": 9, "score": 255223.46737248794 }, { "content": "/// Return a Sha256 hash for the given data.\n\npub fn hash(val: &[u8]) -> Hash {\n\n hashv(&[val])\n\n}\n\n\n", "file_path": "sdk/src/hash.rs", "rank": 10, "score": 248036.01398365363 }, { "content": "pub fn check_id(program_id: &Pubkey) -> bool {\n\n program_id.as_ref() == NATIVE_LOADER_PROGRAM_ID\n\n}\n\n\n", "file_path": "sdk/src/native_loader.rs", "rank": 11, "score": 244270.59580223297 }, { "content": "pub fn check_id(program_id: &Pubkey) -> bool {\n\n program_id.as_ref() == BUDGET_PROGRAM_ID\n\n}\n", "file_path": "sdk/src/budget_program.rs", "rank": 12, "score": 244270.59580223297 }, { "content": "pub fn check_id(program_id: &Pubkey) -> bool {\n\n program_id.as_ref() == SYSTEM_PROGRAM_ID\n\n}\n", "file_path": "sdk/src/system_program.rs", "rank": 13, "score": 244270.59580223297 }, { "content": "pub fn check_id(program_id: &Pubkey) -> bool {\n\n program_id.as_ref() == VOTE_PROGRAM_ID\n\n}\n\n\n", "file_path": "sdk/src/vote_program.rs", "rank": 14, "score": 244270.59580223297 }, { "content": "pub fn check_id(program_id: &Pubkey) -> bool {\n\n program_id.as_ref() == STORAGE_PROGRAM_ID\n\n}\n\n\n", "file_path": "sdk/src/storage_program.rs", "rank": 15, "score": 244270.59580223297 }, { "content": "pub fn timestamp() -> u64 {\n\n let now = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"create timestamp in timing\");\n\n duration_as_ms(&now)\n\n}\n", "file_path": "sdk/src/timing.rs", "rank": 16, "score": 236682.44683230144 }, { "content": "/// Return the hash of the given hash extended with the given value.\n\npub fn extend_and_hash(id: &Hash, val: &[u8]) -> Hash {\n\n let mut hash_data = id.as_ref().to_vec();\n\n hash_data.extend_from_slice(val);\n\n hash(&hash_data)\n\n}\n", "file_path": "sdk/src/hash.rs", "rank": 17, "score": 235934.99855841257 }, { "content": "pub fn id() -> Pubkey {\n\n Pubkey::new(&BUDGET_PROGRAM_ID)\n\n}\n\n\n", "file_path": "sdk/src/budget_program.rs", "rank": 18, "score": 232885.45559252056 }, { "content": "pub fn id() -> Pubkey {\n\n Pubkey::new(&STORAGE_PROGRAM_ID)\n\n}\n\n\n", "file_path": "sdk/src/storage_program.rs", "rank": 19, "score": 232885.45559252053 }, { "content": "pub fn id() -> Pubkey {\n\n Pubkey::new(&NATIVE_LOADER_PROGRAM_ID)\n\n}\n\n\n", "file_path": "sdk/src/native_loader.rs", "rank": 20, "score": 232885.45559252053 }, { "content": "pub fn id() -> Pubkey {\n\n Pubkey::new(&SYSTEM_PROGRAM_ID)\n\n}\n\n\n", "file_path": "sdk/src/system_program.rs", "rank": 21, "score": 232885.45559252053 }, { "content": "pub fn id() -> Pubkey {\n\n Pubkey::new(&BPF_LOADER_PROGRAM_ID)\n\n}\n", "file_path": "sdk/src/bpf_loader.rs", "rank": 22, "score": 232885.45559252053 }, { "content": "pub fn id() -> Pubkey {\n\n Pubkey::new(&VOTE_PROGRAM_ID)\n\n}\n\n\n\n// Maximum number of votes to keep around\n\nconst MAX_VOTE_HISTORY: usize = 32;\n\n\n\n#[derive(Serialize, Default, Deserialize, Debug, PartialEq, Eq, Clone)]\n\npub struct Vote {\n\n // TODO: add signature of the state here as well\n\n /// A vote for height tick_height\n\n pub tick_height: u64,\n\n}\n\n\n\nimpl Vote {\n\n pub fn new(tick_height: u64) -> Self {\n\n Self { tick_height }\n\n }\n\n}\n\n\n", "file_path": "sdk/src/vote_program.rs", "rank": 23, "score": 232885.45559252056 }, { "content": "/// Return a Sha256 hash for the given data.\n\npub fn hashv(vals: &[&[u8]]) -> Hash {\n\n let mut hasher = Hasher::default();\n\n hasher.hashv(vals);\n\n hasher.result()\n\n}\n\n\n", "file_path": "sdk/src/hash.rs", "rank": 24, "score": 232751.71282647736 }, { "content": "pub fn get_segment_from_entry(entry_height: u64) -> u64 {\n\n entry_height / ENTRIES_PER_SEGMENT\n\n}\n\n\n", "file_path": "src/storage_stage.rs", "rank": 25, "score": 232740.695213195 }, { "content": "pub fn system_id() -> Pubkey {\n\n Pubkey::new(&STORAGE_SYSTEM_ACCOUNT_ID)\n\n}\n\n\n\npub struct StorageTransaction {}\n\n\n\nimpl StorageTransaction {\n\n pub fn new_mining_proof(\n\n from_keypair: &Keypair,\n\n sha_state: Hash,\n\n last_id: Hash,\n\n entry_height: u64,\n\n signature: Signature,\n\n ) -> Transaction {\n\n let program = StorageProgram::SubmitMiningProof {\n\n sha_state,\n\n entry_height,\n\n signature,\n\n };\n\n Transaction::new(\n", "file_path": "sdk/src/storage_program.rs", "rank": 26, "score": 229266.65654161078 }, { "content": "pub fn create_keyed_accounts(accounts: &mut [(Pubkey, Account)]) -> Vec<KeyedAccount> {\n\n accounts.iter_mut().map(Into::into).collect()\n\n}\n", "file_path": "sdk/src/account.rs", "rank": 27, "score": 228995.12763107294 }, { "content": "fn check_balance(expected_balance: u64, client: &RpcClient, pubkey: Pubkey) {\n\n let balance = client.retry_get_balance(1, pubkey, 1).unwrap().unwrap();\n\n assert_eq!(balance, expected_balance);\n\n}\n\n\n", "file_path": "wallet/tests/pay.rs", "rank": 28, "score": 222668.66915167798 }, { "content": "pub fn duration_as_us(d: &Duration) -> u64 {\n\n (d.as_secs() * 1000 * 1000) + (u64::from(d.subsec_nanos()) / 1_000)\n\n}\n\n\n", "file_path": "sdk/src/timing.rs", "rank": 29, "score": 220261.3188307904 }, { "content": "pub fn duration_as_ms(d: &Duration) -> u64 {\n\n (d.as_secs() * 1000) + (u64::from(d.subsec_nanos()) / 1_000_000)\n\n}\n\n\n", "file_path": "sdk/src/timing.rs", "rank": 30, "score": 220261.3188307904 }, { "content": "pub fn helper_sol_log_u64(arg1: u64, arg2: u64, arg3: u64, arg4: u64, arg5: u64) -> u64 {\n\n info!(\n\n \"sol_log_u64: {:#x}, {:#x}, {:#x}, {:#x}, {:#x}\",\n\n arg1, arg2, arg3, arg4, arg5\n\n );\n\n 0\n\n}\n\n\n", "file_path": "programs/native/bpf_loader/src/lib.rs", "rank": 31, "score": 217013.75398123736 }, { "content": "pub fn helper_sol_log(addr: u64, _arg2: u64, _arg3: u64, _arg4: u64, _arg5: u64) -> u64 {\n\n let c_buf: *const c_char = addr as *const c_char;\n\n let c_str: &CStr = unsafe { CStr::from_ptr(c_buf) };\n\n match c_str.to_str() {\n\n Ok(slice) => info!(\"sol_log: {:?}\", slice),\n\n Err(e) => warn!(\"Error: Cannot print invalid string: {}\", e),\n\n };\n\n 0\n\n}\n\n\n", "file_path": "programs/native/bpf_loader/src/lib.rs", "rank": 32, "score": 216875.45709206985 }, { "content": "/// Helper function that prints a 64 bit values represented in hexadecimal\n\n/// to stdout\n\npub fn sol_log_64(arg1: u64, arg2: u64, arg3: u64, arg4: u64, arg5: u64) {\n\n unsafe {\n\n sol_log_64_(arg1, arg2, arg3, arg4, arg5);\n\n }\n\n}\n\n\n\n/// Prints the hexadecimal representation of a public key\n\n///\n\n/// @param key The public key to print\n", "file_path": "programs/bpf/rust/noop/src/solana_sdk.rs", "rank": 33, "score": 216559.543893964 }, { "content": "fn slice_hash(slice: &[u8], hash_index: u64) -> u64 {\n\n let mut hasher = FnvHasher::with_key(hash_index);\n\n hasher.write(slice);\n\n hasher.finish()\n\n}\n\n\n\nimpl<T: AsRef<[u8]>> BloomHashIndex for T {\n\n fn hash_at_index(&self, hash_index: u64) -> u64 {\n\n slice_hash(self.as_ref(), hash_index)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use solana_sdk::hash::{hash, Hash};\n\n\n\n #[test]\n\n fn test_bloom_filter() {\n\n //empty\n", "file_path": "src/bloom.rs", "rank": 34, "score": 215246.0779514464 }, { "content": "pub fn verify_funding_transfer(client: &mut ThinClient, tx: &Transaction, amount: u64) -> bool {\n\n for a in &tx.account_keys[1..] {\n\n if client.get_balance(a).unwrap_or(0) >= amount {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "bench-tps/src/bench.rs", "rank": 35, "score": 213224.52369020486 }, { "content": "pub fn index_blobs(blobs: &[SharedBlob], blob_index: &mut u64, slots: &[u64]) {\n\n // enumerate all the blobs, those are the indices\n\n for (blob, slot) in blobs.iter().zip(slots) {\n\n let mut blob = blob.write().unwrap();\n\n\n\n blob.set_index(*blob_index);\n\n blob.set_slot(*slot);\n\n blob.forward(true);\n\n *blob_index += 1;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::packet::{\n\n to_packets, Blob, Meta, Packet, Packets, SharedBlob, SharedPackets, NUM_PACKETS,\n\n PACKET_DATA_SIZE,\n\n };\n\n use solana_sdk::hash::Hash;\n\n use solana_sdk::signature::{Keypair, KeypairUtil};\n", "file_path": "src/packet.rs", "rank": 36, "score": 211499.79260462313 }, { "content": "// For integration tests. Listens on random open port and reports port to Sender.\n\npub fn run_local_drone(mint_keypair: Keypair, sender: Sender<SocketAddr>) {\n\n thread::spawn(move || {\n\n let drone_addr = socketaddr!(0, 0);\n\n let drone = Arc::new(Mutex::new(Drone::new(mint_keypair, None, None)));\n\n let socket = TcpListener::bind(&drone_addr).unwrap();\n\n sender.send(socket.local_addr().unwrap()).unwrap();\n\n info!(\"Drone started. Listening on: {}\", drone_addr);\n\n let done = socket\n\n .incoming()\n\n .map_err(|e| debug!(\"failed to accept socket; error = {:?}\", e))\n\n .for_each(move |socket| {\n\n let drone2 = drone.clone();\n\n let framed = BytesCodec::new().framed(socket);\n\n let (writer, reader) = framed.split();\n\n\n\n let processor = reader.and_then(move |bytes| {\n\n let response_bytes = drone2\n\n .lock()\n\n .unwrap()\n\n .process_drone_request(&bytes)\n", "file_path": "drone/src/drone.rs", "rank": 37, "score": 208002.7744305836 }, { "content": "pub fn get_segment_from_entry(entry_height: u64) -> usize {\n\n (entry_height / ENTRIES_PER_SEGMENT) as usize\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub enum ProofStatus {\n\n Valid,\n\n NotValid,\n\n Skipped,\n\n}\n\n\n\n#[derive(Default, Debug, Serialize, Deserialize, Clone)]\n\npub struct ProofInfo {\n\n pub id: Pubkey,\n\n pub signature: Signature,\n\n pub sha_state: Hash,\n\n}\n\n\n\n#[derive(Default, Debug, Serialize, Deserialize, Clone)]\n\npub struct ValidationInfo {\n", "file_path": "sdk/src/storage_program.rs", "rank": 38, "score": 207552.39720214324 }, { "content": "pub fn get_balance(account: &Account) -> u64 {\n\n if let Ok(program) = deserialize(&account.userdata) {\n\n let program: BudgetProgram = program;\n\n if program.is_pending() {\n\n 0\n\n } else {\n\n account.tokens\n\n }\n\n } else {\n\n account.tokens\n\n }\n\n}\n\n*/\n\n\n\nimpl BudgetProgram {\n\n fn is_pending(&self) -> bool {\n\n self.pending_budget != None\n\n }\n\n /// Process a Witness Signature. Any payment plans waiting on this signature\n\n /// will progress one step.\n", "file_path": "programs/native/budget/src/budget_program.rs", "rank": 39, "score": 207552.39720214324 }, { "content": "#[cfg(test)]\n\npub fn verify(initial: Hash, entries: &[PohEntry]) -> bool {\n\n let mut id = initial;\n\n\n\n for entry in entries {\n\n assert!(entry.num_hashes != 0);\n\n\n\n for _ in 1..entry.num_hashes {\n\n id = hash(&id.as_ref());\n\n }\n\n id = match entry.mixin {\n\n Some(mixin) => hashv(&[&id.as_ref(), &mixin.as_ref()]),\n\n None => hash(&id.as_ref()),\n\n };\n\n if id != entry.id {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n", "file_path": "src/poh.rs", "rank": 40, "score": 206267.25776558265 }, { "content": "fn load_program(bank: &Bank, from: &Keypair, loader_id: Pubkey, program: Vec<u8>) -> Pubkey {\n\n let program_account = Keypair::new();\n\n\n\n let tx = SystemTransaction::new_program_account(\n\n from,\n\n program_account.pubkey(),\n\n bank.last_id(),\n\n 1,\n\n program.len() as u64,\n\n loader_id,\n\n 0,\n\n );\n\n bank.process_transaction(&tx).unwrap();\n\n\n\n let chunk_size = 256; // Size of chunk just needs to fit into tx\n\n let mut offset = 0;\n\n for chunk in program.chunks(chunk_size) {\n\n let tx = LoaderTransaction::new_write(\n\n &program_account,\n\n loader_id,\n", "file_path": "programs/tests/programs.rs", "rank": 41, "score": 206108.24755171663 }, { "content": "pub fn read_keypair(path: &str) -> Result<Keypair, Box<error::Error>> {\n\n let pkcs8 = read_pkcs8(path)?;\n\n let keypair = Ed25519KeyPair::from_pkcs8(Input::from(&pkcs8))?;\n\n Ok(keypair)\n\n}\n\n\n", "file_path": "sdk/src/signature.rs", "rank": 42, "score": 204229.3522450581 }, { "content": "#[allow(clippy::trivially_copy_pass_by_ref)]\n\npub fn create_new_ledger(\n\n ledger_path: &str,\n\n genesis_block: &GenesisBlock,\n\n config: &BlocktreeConfig,\n\n) -> Result<(u64, u64, Hash)> {\n\n Blocktree::destroy(ledger_path)?;\n\n genesis_block.write(&ledger_path)?;\n\n\n\n // Add a single tick linked back to the genesis_block to bootstrap the ledger\n\n let blocktree = Blocktree::open_config(ledger_path, config)?;\n\n let entries = crate::entry::create_ticks(1, genesis_block.last_id());\n\n blocktree.write_entries(DEFAULT_SLOT_HEIGHT, 0, 0, &entries)?;\n\n\n\n Ok((1, 1, entries[0].id))\n\n}\n\n\n", "file_path": "src/blocktree.rs", "rank": 43, "score": 203170.0398207039 }, { "content": "/// Execute a transaction.\n\n/// This method calls each instruction in the transaction over the set of loaded Accounts\n\n/// The accounts are committed back to the bank only if every instruction succeeds\n\npub fn execute_transaction(\n\n tx: &Transaction,\n\n loaders: &mut [Vec<(Pubkey, Account)>],\n\n tx_accounts: &mut [Account],\n\n tick_height: u64,\n\n) -> Result<(), RuntimeError> {\n\n for (instruction_index, instruction) in tx.instructions.iter().enumerate() {\n\n let executable_accounts = &mut (&mut loaders[instruction.program_ids_index as usize]);\n\n let mut program_accounts = get_subset_unchecked_mut(tx_accounts, &instruction.accounts);\n\n execute_instruction(\n\n tx,\n\n instruction_index,\n\n executable_accounts,\n\n &mut program_accounts,\n\n tick_height,\n\n )\n\n .map_err(|err| RuntimeError::ProgramError(instruction_index as u8, err))?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "runtime/src/lib.rs", "rank": 44, "score": 203016.1246946562 }, { "content": "#[allow(clippy::trivially_copy_pass_by_ref)]\n\npub fn create_tmp_sample_ledger(\n\n name: &str,\n\n num_tokens: u64,\n\n num_extra_ticks: u64,\n\n bootstrap_leader_id: Pubkey,\n\n bootstrap_leader_tokens: u64,\n\n config: &BlocktreeConfig,\n\n) -> (Keypair, String, u64, u64, Hash, Hash) {\n\n let (genesis_block, mint_keypair) =\n\n GenesisBlock::new_with_leader(num_tokens, bootstrap_leader_id, bootstrap_leader_tokens);\n\n let ledger_path = get_tmp_ledger_path(name);\n\n let (mut entry_height, mut tick_height, mut last_entry_id) =\n\n create_new_ledger(&ledger_path, &genesis_block, config).unwrap();\n\n\n\n let mut last_id = genesis_block.last_id();\n\n if num_extra_ticks > 0 {\n\n let entries = crate::entry::create_ticks(num_extra_ticks, last_entry_id);\n\n\n\n let blocktree = Blocktree::open_config(&ledger_path, config).unwrap();\n\n\n", "file_path": "src/blocktree.rs", "rank": 45, "score": 199343.26630108812 }, { "content": "pub fn airdrop_tokens(\n\n client: &mut ThinClient,\n\n drone_addr: &SocketAddr,\n\n id: &Keypair,\n\n tx_count: u64,\n\n) {\n\n let starting_balance = client.poll_get_balance(&id.pubkey()).unwrap_or(0);\n\n metrics_submit_token_balance(starting_balance);\n\n println!(\"starting balance {}\", starting_balance);\n\n\n\n if starting_balance < tx_count {\n\n let airdrop_amount = tx_count - starting_balance;\n\n println!(\n\n \"Airdropping {:?} tokens from {} for {}\",\n\n airdrop_amount,\n\n drone_addr,\n\n id.pubkey(),\n\n );\n\n\n\n let last_id = client.get_last_id();\n", "file_path": "bench-tps/src/bench.rs", "rank": 46, "score": 199293.434477838 }, { "content": "pub fn request_airdrop_transaction(\n\n _drone_addr: &SocketAddr,\n\n _id: &Pubkey,\n\n tokens: u64,\n\n _last_id: Hash,\n\n) -> Result<Transaction, Error> {\n\n if tokens == 0 {\n\n Err(Error::new(ErrorKind::Other, \"Airdrop failed\"))?\n\n }\n\n let key = Keypair::new();\n\n let to = Keypair::new().pubkey();\n\n let last_id = Hash::default();\n\n let tx = SystemTransaction::new_account(&key, to, 50, last_id, 0);\n\n Ok(tx)\n\n}\n", "file_path": "src/rpc_mock.rs", "rank": 47, "score": 199187.71447779477 }, { "content": "pub fn request_airdrop_transaction(\n\n drone_addr: &SocketAddr,\n\n id: &Pubkey,\n\n tokens: u64,\n\n last_id: Hash,\n\n) -> Result<Transaction, Error> {\n\n info!(\n\n \"request_airdrop_transaction: drone_addr={} id={} tokens={} last_id={}\",\n\n drone_addr, id, tokens, last_id\n\n );\n\n // TODO: make this async tokio client\n\n let mut stream = TcpStream::connect_timeout(drone_addr, Duration::new(3, 0))?;\n\n stream.set_read_timeout(Some(Duration::new(10, 0)))?;\n\n let req = DroneRequest::GetAirdrop {\n\n tokens,\n\n last_id,\n\n to: *id,\n\n };\n\n let req = serialize(&req).expect(\"serialize drone request\");\n\n stream.write_all(&req)?;\n", "file_path": "drone/src/drone.rs", "rank": 48, "score": 199187.71447779477 }, { "content": "pub fn reconstruct_entries_from_blobs<I>(blobs: I) -> Result<(Vec<Entry>, u64)>\n\nwhere\n\n I: IntoIterator,\n\n I::Item: Borrow<Blob>,\n\n{\n\n let mut entries: Vec<Entry> = vec![];\n\n let mut num_ticks = 0;\n\n\n\n for blob in blobs.into_iter() {\n\n let entry: Entry = {\n\n let msg_size = blob.borrow().size();\n\n deserialize(&blob.borrow().data()[..msg_size])?\n\n };\n\n\n\n if entry.is_tick() {\n\n num_ticks += 1\n\n }\n\n entries.push(entry)\n\n }\n\n Ok((entries, num_ticks))\n\n}\n\n\n", "file_path": "src/entry.rs", "rank": 49, "score": 196669.87939748075 }, { "content": "pub fn test_tx() -> Transaction {\n\n let keypair1 = Keypair::new();\n\n let pubkey1 = keypair1.pubkey();\n\n let zero = Hash::default();\n\n SystemTransaction::new_account(&keypair1, pubkey1, 42, zero, 0)\n\n}\n", "file_path": "src/test_tx.rs", "rank": 50, "score": 195923.61712987148 }, { "content": "#[cfg(test)]\n\npub fn make_packet_from_transaction(tx: Transaction) -> Packet {\n\n use bincode::serialize;\n\n\n\n let tx_bytes = serialize(&tx).unwrap();\n\n let mut packet = Packet::default();\n\n packet.meta.size = tx_bytes.len();\n\n packet.data[..packet.meta.size].copy_from_slice(&tx_bytes);\n\n return packet;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::packet::{Packet, SharedPackets};\n\n use crate::sigverify;\n\n use crate::test_tx::test_tx;\n\n use bincode::{deserialize, serialize};\n\n use solana_sdk::budget_program;\n\n use solana_sdk::hash::Hash;\n\n use solana_sdk::signature::{Keypair, KeypairUtil};\n\n use solana_sdk::system_instruction::SystemInstruction;\n", "file_path": "src/sigverify.rs", "rank": 51, "score": 195702.5558286028 }, { "content": "pub fn make_tiny_test_entries_from_id(start: &Hash, num: usize) -> Vec<Entry> {\n\n let keypair = Keypair::new();\n\n\n\n let mut id = *start;\n\n let mut num_hashes = 0;\n\n (0..num)\n\n .map(|_| {\n\n Entry::new_mut(\n\n &mut id,\n\n &mut num_hashes,\n\n vec![BudgetTransaction::new_timestamp(\n\n &keypair,\n\n keypair.pubkey(),\n\n keypair.pubkey(),\n\n Utc::now(),\n\n *start,\n\n )],\n\n )\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/entry.rs", "rank": 52, "score": 191470.0357977643 }, { "content": "pub fn recv_batch(recvr: &PacketReceiver) -> Result<(Vec<SharedPackets>, usize, u64)> {\n\n let timer = Duration::new(1, 0);\n\n let msgs = recvr.recv_timeout(timer)?;\n\n let recv_start = Instant::now();\n\n trace!(\"got msgs\");\n\n let mut len = msgs.read().unwrap().packets.len();\n\n let mut batch = vec![msgs];\n\n while let Ok(more) = recvr.try_recv() {\n\n trace!(\"got more msgs\");\n\n len += more.read().unwrap().packets.len();\n\n batch.push(more);\n\n\n\n if len > 100_000 {\n\n break;\n\n }\n\n }\n\n trace!(\"batch len {}\", batch.len());\n\n Ok((batch, len, duration_as_ms(&recv_start.elapsed())))\n\n}\n\n\n", "file_path": "src/streamer.rs", "rank": 53, "score": 188263.91013868962 }, { "content": "pub fn new_fullnode(ledger_name: &'static str) -> (Fullnode, NodeInfo, Keypair, String) {\n\n use crate::blocktree::create_tmp_sample_ledger;\n\n use crate::cluster_info::Node;\n\n use crate::fullnode::Fullnode;\n\n use crate::voting_keypair::VotingKeypair;\n\n use solana_sdk::signature::KeypairUtil;\n\n\n\n let node_keypair = Arc::new(Keypair::new());\n\n let node = Node::new_localhost_with_pubkey(node_keypair.pubkey());\n\n let node_info = node.info.clone();\n\n\n\n let fullnode_config = &FullnodeConfig::default();\n\n let (mint_keypair, ledger_path, _tick_height, _last_entry_height, _last_id, _last_entry_id) =\n\n create_tmp_sample_ledger(\n\n ledger_name,\n\n 10_000,\n\n 0,\n\n node_info.id,\n\n 42,\n\n &fullnode_config.ledger_config(),\n", "file_path": "src/thin_client.rs", "rank": 54, "score": 185963.94939006615 }, { "content": "pub fn poll_gossip_for_leader(leader_gossip: SocketAddr, timeout: Option<u64>) -> Result<NodeInfo> {\n\n let exit = Arc::new(AtomicBool::new(false));\n\n let (node, gossip_socket) = ClusterInfo::spy_node();\n\n let my_addr = gossip_socket.local_addr().unwrap();\n\n let cluster_info = Arc::new(RwLock::new(ClusterInfo::new(node)));\n\n let gossip_service =\n\n GossipService::new(&cluster_info.clone(), None, gossip_socket, exit.clone());\n\n\n\n let leader_entry_point = NodeInfo::new_entry_point(&leader_gossip);\n\n cluster_info\n\n .write()\n\n .unwrap()\n\n .insert_info(leader_entry_point);\n\n\n\n sleep(Duration::from_millis(100));\n\n\n\n let deadline = match timeout {\n\n Some(timeout) => Duration::new(timeout, 0),\n\n None => Duration::new(std::u64::MAX, 0),\n\n };\n", "file_path": "src/thin_client.rs", "rank": 55, "score": 180955.0538604485 }, { "content": "pub fn make_spy_node(leader: &NodeInfo) -> (GossipService, Arc<RwLock<ClusterInfo>>, Pubkey) {\n\n let keypair = Keypair::new();\n\n let exit = Arc::new(AtomicBool::new(false));\n\n let mut spy = Node::new_localhost_with_pubkey(keypair.pubkey());\n\n let id = spy.info.id;\n\n let daddr = \"0.0.0.0:0\".parse().unwrap();\n\n spy.info.tvu = daddr;\n\n spy.info.rpc = daddr;\n\n let mut spy_cluster_info = ClusterInfo::new_with_keypair(spy.info, Arc::new(keypair));\n\n spy_cluster_info.insert_info(leader.clone());\n\n spy_cluster_info.set_leader(leader.id);\n\n let spy_cluster_info_ref = Arc::new(RwLock::new(spy_cluster_info));\n\n let gossip_service = GossipService::new(\n\n &spy_cluster_info_ref,\n\n None,\n\n spy.sockets.gossip,\n\n exit.clone(),\n\n );\n\n\n\n (gossip_service, spy_cluster_info_ref, id)\n", "file_path": "src/gossip_service.rs", "rank": 56, "score": 178748.4455674465 }, { "content": "/// Create an executable account with the given shared object name.\n\npub fn create_program_account(name: &str) -> Account {\n\n Account {\n\n tokens: 1,\n\n owner: id(),\n\n userdata: name.as_bytes().to_vec(),\n\n executable: true,\n\n loader: id(),\n\n }\n\n}\n", "file_path": "sdk/src/native_loader.rs", "rank": 57, "score": 177051.52248481085 }, { "content": "struct RpcSolPubSubImpl {\n\n uid: Arc<atomic::AtomicUsize>,\n\n bank: Arc<RwLock<RpcPubSubBank>>,\n\n subscription: Arc<RpcSubscriptions>,\n\n}\n\n\n\nimpl RpcSolPubSubImpl {\n\n fn new(bank: Arc<RwLock<RpcPubSubBank>>) -> Self {\n\n RpcSolPubSubImpl {\n\n uid: Arc::new(atomic::AtomicUsize::default()),\n\n bank,\n\n subscription: Arc::new(RpcSubscriptions::default()),\n\n }\n\n }\n\n\n\n fn subscribe_to_account_updates(&self, subscriber: Subscriber<Account>, pubkey_str: String) {\n\n let pubkey_vec = bs58::decode(pubkey_str).into_vec().unwrap();\n\n if pubkey_vec.len() != mem::size_of::<Pubkey>() {\n\n subscriber\n\n .reject(Error {\n", "file_path": "src/rpc_pubsub.rs", "rank": 58, "score": 172065.19967579682 }, { "content": "#[allow(dead_code)]\n\npub fn sol_log_key(key: &SolPubkey) {\n\n for (i, k) in key.key.iter().enumerate() {\n\n sol_log_64(0, 0, 0, i as u64, u64::from(*k));\n\n }\n\n}\n\n\n\n/// Prints the hexadecimal representation of a slice\n\n///\n\n/// @param slice The array to print\n", "file_path": "programs/bpf/rust/noop/src/solana_sdk.rs", "rank": 59, "score": 171294.72267479007 }, { "content": "pub fn receiver(\n\n sock: Arc<UdpSocket>,\n\n exit: Arc<AtomicBool>,\n\n packet_sender: PacketSender,\n\n sender_tag: &'static str,\n\n) -> JoinHandle<()> {\n\n let res = sock.set_read_timeout(Some(Duration::new(1, 0)));\n\n if res.is_err() {\n\n panic!(\"streamer::receiver set_read_timeout error\");\n\n }\n\n Builder::new()\n\n .name(\"solana-receiver\".to_string())\n\n .spawn(move || {\n\n let _ = recv_loop(&sock, &exit, &packet_sender, sender_tag);\n\n })\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/streamer.rs", "rank": 60, "score": 162429.66451311563 }, { "content": "#[cfg(feature = \"cuda\")]\n\npub fn init() {\n\n unsafe {\n\n ed25519_set_verbose(true);\n\n if !ed25519_init() {\n\n panic!(\"ed25519_init() failed\");\n\n }\n\n ed25519_set_verbose(false);\n\n }\n\n}\n\n\n", "file_path": "src/sigverify.rs", "rank": 61, "score": 162429.66451311563 }, { "content": "// Recover the missing data and coding blobs from the input ledger. Returns a vector\n\n// of the recovered missing data blobs and a vector of the recovered coding blobs\n\npub fn recover(\n\n blocktree: &Arc<Blocktree>,\n\n slot: u64,\n\n start_idx: u64,\n\n) -> Result<(Vec<SharedBlob>, Vec<SharedBlob>)> {\n\n let block_start_idx = start_idx - (start_idx % NUM_DATA as u64);\n\n\n\n debug!(\"block_start_idx: {}\", block_start_idx);\n\n\n\n let coding_start_idx = block_start_idx + NUM_DATA as u64 - NUM_CODING as u64;\n\n let block_end_idx = block_start_idx + NUM_DATA as u64;\n\n trace!(\n\n \"recover: coding_start_idx: {} block_end_idx: {}\",\n\n coding_start_idx,\n\n block_end_idx\n\n );\n\n\n\n let data_missing = blocktree\n\n .find_missing_data_indexes(slot, block_start_idx, block_end_idx, NUM_DATA)\n\n .len();\n", "file_path": "src/erasure.rs", "rank": 62, "score": 162429.66451311563 }, { "content": "//pub fn gen_keypair_file(outfile: String) -> Result<String, Box<dyn error::Error>> {\n\npub fn gen_keypair_file(outfile: String) -> Result<String, Box<error::Error>> {\n\n let serialized = serde_json::to_string(&gen_pkcs8()?)?;\n\n\n\n if outfile != \"-\" {\n\n if let Some(outdir) = Path::new(&outfile).parent() {\n\n fs::create_dir_all(outdir)?;\n\n }\n\n let mut f = File::create(outfile)?;\n\n f.write_all(&serialized.clone().into_bytes())?;\n\n }\n\n Ok(serialized)\n\n}\n", "file_path": "sdk/src/signature.rs", "rank": 63, "score": 159964.42569904358 }, { "content": "/// Creates the next Entries for given transactions\n\npub fn next_entries(\n\n start_hash: &Hash,\n\n num_hashes: u64,\n\n transactions: Vec<Transaction>,\n\n) -> Vec<Entry> {\n\n let mut id = *start_hash;\n\n let mut num_hashes = num_hashes;\n\n next_entries_mut(&mut id, &mut num_hashes, transactions)\n\n}\n\n\n", "file_path": "src/entry.rs", "rank": 64, "score": 159588.02310325307 }, { "content": "/// Blocks until all pending points from previous calls to `submit` have been\n\n/// transmitted.\n\npub fn flush() {\n\n let agent_mutex = get_singleton_agent();\n\n let agent = agent_mutex.lock().unwrap();\n\n agent.flush();\n\n}\n\n\n", "file_path": "metrics/src/metrics.rs", "rank": 65, "score": 159576.85238240485 }, { "content": "// Recover data + coding blocks into data blocks\n\n// data: array of blocks to recover into\n\n// coding: arry of coding blocks\n\n// erasures: list of indices in data where blocks should be recovered\n\npub fn decode_blocks(\n\n data: &mut [&mut [u8]],\n\n coding: &mut [&mut [u8]],\n\n erasures: &[i32],\n\n) -> Result<()> {\n\n if data.is_empty() {\n\n return Ok(());\n\n }\n\n let block_len = data[0].len();\n\n let matrix: Vec<i32> = get_matrix(coding.len() as i32, data.len() as i32, ERASURE_W);\n\n\n\n // generate coding pointers, blocks should be the same size\n\n let mut coding_arg: Vec<*mut u8> = Vec::new();\n\n for x in coding.iter_mut() {\n\n if x.len() != block_len {\n\n return Err(Error::ErasureError(ErasureError::InvalidBlockSize));\n\n }\n\n coding_arg.push(x.as_mut_ptr());\n\n }\n\n\n", "file_path": "src/erasure.rs", "rank": 66, "score": 159576.85238240485 }, { "content": "/// Setup function that is only run once, even if called multiple times.\n\npub fn setup() {\n\n INIT.call_once(|| {\n\n env_logger::Builder::from_default_env()\n\n .default_format_timestamp_nanos(true)\n\n .init();\n\n });\n\n}\n", "file_path": "logger/src/lib.rs", "rank": 67, "score": 159576.85238240485 }, { "content": "pub fn create_vm(prog: &[u8]) -> Result<EbpfVmRaw, Error> {\n\n let mut vm = EbpfVmRaw::new(None)?;\n\n vm.set_verifier(bpf_verifier::check)?;\n\n vm.set_max_instruction_count(36000)?; // 36000 is a wag, need to tune\n\n vm.set_elf(&prog)?;\n\n vm.register_helper_ex(\"sol_log\", Some(helper_sol_log_verify), helper_sol_log)?;\n\n vm.register_helper_ex(\"sol_log_\", Some(helper_sol_log_verify), helper_sol_log)?;\n\n vm.register_helper_ex(\"sol_log_64\", None, helper_sol_log_u64)?;\n\n vm.register_helper_ex(\"sol_log_64_\", None, helper_sol_log_u64)?;\n\n Ok(vm)\n\n}\n\n\n", "file_path": "programs/native/bpf_loader/src/lib.rs", "rank": 68, "score": 158952.22337129366 }, { "content": "fn verify_pubkey(input: String) -> Result<Pubkey> {\n\n let pubkey_vec = bs58::decode(input).into_vec().map_err(|err| {\n\n info!(\"verify_pubkey: invalid input: {:?}\", err);\n\n Error::invalid_request()\n\n })?;\n\n if pubkey_vec.len() != mem::size_of::<Pubkey>() {\n\n info!(\n\n \"verify_pubkey: invalid pubkey_vec length: {}\",\n\n pubkey_vec.len()\n\n );\n\n Err(Error::invalid_request())\n\n } else {\n\n Ok(Pubkey::new(&pubkey_vec))\n\n }\n\n}\n\n\n", "file_path": "src/rpc.rs", "rank": 69, "score": 158061.48737186717 }, { "content": "/// Creates the next entries for given transactions, outputs\n\n/// updates start_hash to id of last Entry, sets num_hashes to 0\n\npub fn next_entries_mut(\n\n start_hash: &mut Hash,\n\n num_hashes: &mut u64,\n\n transactions: Vec<Transaction>,\n\n) -> Vec<Entry> {\n\n // TODO: ?? find a number that works better than |?\n\n // V\n\n if transactions.is_empty() || transactions.len() == 1 {\n\n vec![Entry::new_mut(start_hash, num_hashes, transactions)]\n\n } else {\n\n let mut chunk_start = 0;\n\n let mut entries = Vec::new();\n\n\n\n while chunk_start < transactions.len() {\n\n let mut chunk_end = transactions.len();\n\n let mut upper = chunk_end;\n\n let mut lower = chunk_start;\n\n let mut next = chunk_end; // be optimistic that all will fit\n\n\n\n // binary search for how many transactions will fit in an Entry (i.e. a BLOB)\n", "file_path": "src/entry.rs", "rank": 70, "score": 156903.80944837307 }, { "content": "pub fn parse_command(\n\n pubkey: Pubkey,\n\n matches: &ArgMatches<'_>,\n\n) -> Result<WalletCommand, Box<dyn error::Error>> {\n\n let response = match matches.subcommand() {\n\n (\"address\", Some(_address_matches)) => Ok(WalletCommand::Address),\n\n (\"airdrop\", Some(airdrop_matches)) => {\n\n let tokens = airdrop_matches.value_of(\"tokens\").unwrap().parse()?;\n\n Ok(WalletCommand::Airdrop(tokens))\n\n }\n\n (\"balance\", Some(_balance_matches)) => Ok(WalletCommand::Balance),\n\n (\"cancel\", Some(cancel_matches)) => {\n\n let pubkey_vec = bs58::decode(cancel_matches.value_of(\"process-id\").unwrap())\n\n .into_vec()\n\n .expect(\"base58-encoded public key\");\n\n\n\n if pubkey_vec.len() != mem::size_of::<Pubkey>() {\n\n eprintln!(\"{}\", cancel_matches.usage());\n\n Err(WalletError::BadParameter(\"Invalid public key\".to_string()))?;\n\n }\n", "file_path": "wallet/src/wallet.rs", "rank": 71, "score": 156887.19142305982 }, { "content": "/// Process a blob: Add blob to the ledger window.\n\npub fn process_blob(\n\n leader_scheduler: &Arc<RwLock<LeaderScheduler>>,\n\n blocktree: &Arc<Blocktree>,\n\n blob: &SharedBlob,\n\n) -> Result<()> {\n\n let is_coding = blob.read().unwrap().is_coding();\n\n\n\n // Check if the blob is in the range of our known leaders. If not, we return.\n\n let (slot, pix) = {\n\n let r_blob = blob.read().unwrap();\n\n (r_blob.slot(), r_blob.index())\n\n };\n\n let leader = leader_scheduler.read().unwrap().get_leader_for_slot(slot);\n\n\n\n // TODO: Once the original leader signature is added to the blob, make sure that\n\n // the blob was originally generated by the expected leader for this slot\n\n if leader.is_none() {\n\n warn!(\"No leader for slot {}, blob dropped\", slot);\n\n return Ok(()); // Occurs as a leader is rotating into a validator\n\n }\n", "file_path": "src/db_window.rs", "rank": 72, "score": 156887.19142305982 }, { "content": "pub fn retransmit_blobs(\n\n dq: &[SharedBlob],\n\n leader_scheduler: &Arc<RwLock<LeaderScheduler>>,\n\n retransmit: &BlobSender,\n\n id: &Pubkey,\n\n) -> Result<()> {\n\n let mut retransmit_queue: Vec<SharedBlob> = Vec::new();\n\n for b in dq {\n\n // Don't add blobs generated by this node to the retransmit queue\n\n let slot = b.read().unwrap().slot();\n\n if let Some(leader_id) = leader_scheduler.read().unwrap().get_leader_for_slot(slot) {\n\n if leader_id != *id {\n\n retransmit_queue.push(b.clone());\n\n }\n\n }\n\n }\n\n\n\n //todo maybe move this to where retransmit is actually happening\n\n submit(\n\n influxdb::Point::new(\"retransmit-queue\")\n", "file_path": "src/db_window.rs", "rank": 73, "score": 156887.19142305982 }, { "content": "#[allow(clippy::trivially_copy_pass_by_ref)]\n\npub fn new_bank_from_ledger(\n\n ledger_path: &str,\n\n ledger_config: &BlocktreeConfig,\n\n leader_scheduler_config: &LeaderSchedulerConfig,\n\n) -> (Bank, u64, Hash, Blocktree, SyncSender<bool>, Receiver<bool>) {\n\n let (blocktree, ledger_signal_sender, ledger_signal_receiver) =\n\n Blocktree::open_with_config_signal(ledger_path, ledger_config)\n\n .expect(\"Expected to successfully open database ledger\");\n\n let genesis_block =\n\n GenesisBlock::load(ledger_path).expect(\"Expected to successfully open genesis block\");\n\n let mut bank = Bank::new_with_leader_scheduler_config(&genesis_block, leader_scheduler_config);\n\n\n\n let now = Instant::now();\n\n let entries = blocktree.read_ledger().expect(\"opening ledger\");\n\n info!(\"processing ledger...\");\n\n let (entry_height, last_entry_id) = bank.process_ledger(entries).expect(\"process_ledger\");\n\n info!(\n\n \"processed {} ledger entries in {}ms, tick_height={}...\",\n\n entry_height,\n\n duration_as_ms(&now.elapsed()),\n", "file_path": "src/fullnode.rs", "rank": 74, "score": 156887.19142305982 }, { "content": "#[cfg(test)]\n\npub fn make_consecutive_blobs(\n\n num_blobs_to_make: u64,\n\n start_height: u64,\n\n start_hash: Hash,\n\n addr: &std::net::SocketAddr,\n\n) -> Vec<SharedBlob> {\n\n let entries = create_ticks(num_blobs_to_make, start_hash);\n\n\n\n let blobs = entries.to_shared_blobs();\n\n let mut index = start_height;\n\n for blob in &blobs {\n\n let mut blob = blob.write().unwrap();\n\n blob.set_index(index);\n\n blob.forward(true);\n\n blob.meta.set_addr(addr);\n\n index += 1;\n\n }\n\n blobs\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/entry.rs", "rank": 75, "score": 156887.19142305982 }, { "content": "pub fn make_listening_node(\n\n leader: &NodeInfo,\n\n) -> (GossipService, Arc<RwLock<ClusterInfo>>, Node, Pubkey) {\n\n let keypair = Keypair::new();\n\n let exit = Arc::new(AtomicBool::new(false));\n\n let new_node = Node::new_localhost_with_pubkey(keypair.pubkey());\n\n let new_node_info = new_node.info.clone();\n\n let id = new_node.info.id;\n\n let mut new_node_cluster_info = ClusterInfo::new_with_keypair(new_node_info, Arc::new(keypair));\n\n new_node_cluster_info.insert_info(leader.clone());\n\n new_node_cluster_info.set_leader(leader.id);\n\n let new_node_cluster_info_ref = Arc::new(RwLock::new(new_node_cluster_info));\n\n let gossip_service = GossipService::new(\n\n &new_node_cluster_info_ref,\n\n None,\n\n new_node\n\n .sockets\n\n .gossip\n\n .try_clone()\n\n .expect(\"Failed to clone gossip\"),\n\n exit.clone(),\n\n );\n\n\n\n (gossip_service, new_node_cluster_info_ref, new_node, id)\n\n}\n\n\n", "file_path": "src/gossip_service.rs", "rank": 76, "score": 154347.07489291285 }, { "content": "pub fn request_and_confirm_airdrop(\n\n rpc_client: &RpcClient,\n\n drone_addr: &SocketAddr,\n\n signer: &Keypair,\n\n tokens: u64,\n\n) -> Result<(), Box<dyn error::Error>> {\n\n let last_id = get_last_id(rpc_client)?;\n\n let mut tx = request_airdrop_transaction(drone_addr, &signer.pubkey(), tokens, last_id)?;\n\n send_and_confirm_tx(rpc_client, &mut tx, signer)?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use clap::{App, Arg, SubCommand};\n\n use serde_json::Value;\n\n use solana::rpc_mock::{PUBKEY, SIGNATURE};\n\n use solana::socketaddr;\n\n use solana_sdk::signature::{gen_keypair_file, read_keypair, read_pkcs8, Keypair, KeypairUtil};\n", "file_path": "wallet/src/wallet.rs", "rank": 77, "score": 154347.07489291285 }, { "content": "pub fn do_tx_transfers(\n\n exit_signal: &Arc<AtomicBool>,\n\n shared_txs: &SharedTransactions,\n\n leader: &NodeInfo,\n\n shared_tx_thread_count: &Arc<AtomicIsize>,\n\n total_tx_sent_count: &Arc<AtomicUsize>,\n\n thread_batch_sleep_ms: usize,\n\n) {\n\n let client = mk_client(&leader);\n\n loop {\n\n if thread_batch_sleep_ms > 0 {\n\n sleep(Duration::from_millis(thread_batch_sleep_ms as u64));\n\n }\n\n let txs;\n\n {\n\n let mut shared_txs_wl = shared_txs.write().unwrap();\n\n txs = shared_txs_wl.pop_front();\n\n }\n\n if let Some(txs0) = txs {\n\n shared_tx_thread_count.fetch_add(1, Ordering::Relaxed);\n", "file_path": "bench-tps/src/bench.rs", "rank": 78, "score": 154347.07489291285 }, { "content": "pub fn retry_get_balance(\n\n client: &mut ThinClient,\n\n bob_pubkey: &Pubkey,\n\n expected_balance: Option<u64>,\n\n) -> Option<u64> {\n\n const LAST: usize = 30;\n\n for run in 0..LAST {\n\n let balance_result = client.poll_get_balance(bob_pubkey);\n\n if expected_balance.is_none() {\n\n return balance_result.ok();\n\n }\n\n trace!(\n\n \"retry_get_balance[{}] {:?} {:?}\",\n\n run,\n\n balance_result,\n\n expected_balance\n\n );\n\n if let (Some(expected_balance), Ok(balance_result)) = (expected_balance, balance_result) {\n\n if expected_balance == balance_result {\n\n return Some(balance_result);\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/thin_client.rs", "rank": 79, "score": 154347.07489291285 }, { "content": "pub fn generate_txs(\n\n shared_txs: &SharedTransactions,\n\n source: &[Keypair],\n\n dest: &[Keypair],\n\n threads: usize,\n\n reclaim: bool,\n\n leader: &NodeInfo,\n\n) {\n\n let mut client = mk_client(leader);\n\n let last_id = client.get_last_id();\n\n let tx_count = source.len();\n\n println!(\"Signing transactions... {} (reclaim={})\", tx_count, reclaim);\n\n let signing_start = Instant::now();\n\n\n\n let pairs: Vec<_> = if !reclaim {\n\n source.iter().zip(dest.iter()).collect()\n\n } else {\n\n dest.iter().zip(source.iter()).collect()\n\n };\n\n let transactions: Vec<_> = pairs\n", "file_path": "bench-tps/src/bench.rs", "rank": 80, "score": 154347.07489291285 }, { "content": "pub fn entrypoint(\n\n _program_id: &Pubkey,\n\n keyed_accounts: &mut [KeyedAccount],\n\n data: &[u8],\n\n _tick_height: u64,\n\n) -> Result<(), ProgramError> {\n\n if let Ok(syscall) = bincode::deserialize(data) {\n\n trace!(\"process_instruction: {:?}\", syscall);\n\n trace!(\"keyed_accounts: {:?}\", keyed_accounts);\n\n let from = 0;\n\n\n\n // all system instructions require that accounts_keys[0] be a signer\n\n if keyed_accounts[from].signer_key().is_none() {\n\n info!(\"account[from] is unsigned\");\n\n Err(ProgramError::InvalidArgument)?;\n\n }\n\n\n\n match syscall {\n\n SystemInstruction::CreateAccount {\n\n tokens,\n", "file_path": "programs/native/system/src/lib.rs", "rank": 81, "score": 154347.07489291285 }, { "content": "pub fn chacha_cbc_encrypt_ledger(\n\n blocktree: &Arc<Blocktree>,\n\n slice: u64,\n\n out_path: &Path,\n\n ivec: &mut [u8; CHACHA_BLOCK_SIZE],\n\n) -> io::Result<usize> {\n\n let mut out_file =\n\n BufWriter::new(File::create(out_path).expect(\"Can't open ledger encrypted data file\"));\n\n const BUFFER_SIZE: usize = 8 * 1024;\n\n let mut buffer = [0; BUFFER_SIZE];\n\n let mut encrypted_buffer = [0; BUFFER_SIZE];\n\n let key = [0; CHACHA_KEY_SIZE];\n\n let mut total_entries = 0;\n\n let mut total_size = 0;\n\n let mut entry = slice;\n\n\n\n loop {\n\n match blocktree.read_blobs_bytes(\n\n entry,\n\n ENTRIES_PER_SEGMENT - total_entries,\n", "file_path": "src/chacha.rs", "rank": 82, "score": 154347.07489291285 }, { "content": "pub fn default_window() -> Window {\n\n (0..2048).map(|_| WindowSlot::default()).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::packet::{Blob, Packet, Packets, SharedBlob, PACKET_DATA_SIZE};\n\n use crate::streamer::{receiver, responder, PacketReceiver};\n\n use crate::window::{calculate_max_repair, new_window, Window, WindowUtil};\n\n use solana_sdk::pubkey::Pubkey;\n\n use std::io;\n\n use std::io::Write;\n\n use std::net::UdpSocket;\n\n use std::sync::atomic::{AtomicBool, Ordering};\n\n use std::sync::mpsc::channel;\n\n use std::sync::Arc;\n\n use std::time::Duration;\n\n\n\n fn get_msgs(r: PacketReceiver, num: &mut usize) {\n\n for _t in 0..5 {\n", "file_path": "src/window.rs", "rank": 83, "score": 153623.09407513653 }, { "content": "// Create entries such the node identified by active_keypair\n\n// will be added to the active set for leader selection:\n\n// 1) Give the node a nonzero number of tokens,\n\n// 2) A vote from the validator\n\npub fn make_active_set_entries(\n\n active_keypair: &Arc<Keypair>,\n\n token_source: &Keypair,\n\n stake: u64,\n\n tick_height_to_vote_on: u64,\n\n last_entry_id: &Hash,\n\n last_tick_id: &Hash,\n\n num_ending_ticks: u64,\n\n) -> (Vec<Entry>, VotingKeypair) {\n\n // 1) Assume the active_keypair node has no tokens staked\n\n let transfer_tx = SystemTransaction::new_account(\n\n &token_source,\n\n active_keypair.pubkey(),\n\n stake,\n\n *last_tick_id,\n\n 0,\n\n );\n\n let mut last_entry_id = *last_entry_id;\n\n let transfer_entry = next_entry_mut(&mut last_entry_id, 1, vec![transfer_tx]);\n\n\n", "file_path": "src/leader_scheduler.rs", "rank": 84, "score": 151959.7145279934 }, { "content": "pub fn sample_tx_count(\n\n exit_signal: &Arc<AtomicBool>,\n\n maxes: &Arc<RwLock<Vec<(SocketAddr, NodeStats)>>>,\n\n first_tx_count: u64,\n\n v: &NodeInfo,\n\n sample_period: u64,\n\n) {\n\n let mut client = mk_client(&v);\n\n let mut now = Instant::now();\n\n let mut initial_tx_count = client.transaction_count();\n\n let mut max_tps = 0.0;\n\n let mut total;\n\n\n\n let log_prefix = format!(\"{:21}:\", v.tpu.to_string());\n\n\n\n loop {\n\n let tx_count = client.transaction_count();\n\n assert!(\n\n tx_count >= initial_tx_count,\n\n \"expected tx_count({}) >= initial_tx_count({})\",\n", "file_path": "bench-tps/src/bench.rs", "rank": 85, "score": 151944.36818702123 }, { "content": "pub fn compute_and_report_stats(\n\n maxes: &Arc<RwLock<Vec<(SocketAddr, NodeStats)>>>,\n\n sample_period: u64,\n\n tx_send_elapsed: &Duration,\n\n total_tx_send_count: usize,\n\n) {\n\n // Compute/report stats\n\n let mut max_of_maxes = 0.0;\n\n let mut max_tx_count = 0;\n\n let mut nodes_with_zero_tps = 0;\n\n let mut total_maxes = 0.0;\n\n println!(\" Node address | Max TPS | Total Transactions\");\n\n println!(\"---------------------+---------------+--------------------\");\n\n\n\n for (sock, stats) in maxes.read().unwrap().iter() {\n\n let maybe_flag = match stats.tx {\n\n 0 => \"!!!!!\",\n\n _ => \"\",\n\n };\n\n\n", "file_path": "bench-tps/src/bench.rs", "rank": 86, "score": 151944.36818702123 }, { "content": "pub fn entrypoint(\n\n program_id: &Pubkey,\n\n keyed_accounts: &mut [KeyedAccount],\n\n ix_userdata: &[u8],\n\n tick_height: u64,\n\n) -> Result<(), ProgramError> {\n\n if keyed_accounts[0].account.executable {\n\n // dispatch it\n\n let (names, params) = keyed_accounts.split_at_mut(1);\n\n let name = &names[0].account.userdata;\n\n let name = match str::from_utf8(name) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n warn!(\"Invalid UTF-8 sequence: {}\", e);\n\n return Err(ProgramError::GenericError);\n\n }\n\n };\n\n trace!(\"Call native {:?}\", name);\n\n let path = create_path(&name);\n\n // TODO linux tls bug can cause crash on dlclose(), workaround by never unloading\n", "file_path": "programs/native/native_loader/src/lib.rs", "rank": 87, "score": 151944.36818702123 }, { "content": "fn verify_signature(sig: &Signature, pubkey: &Pubkey, msg: &[u8]) -> Result<()> {\n\n if sig.verify(pubkey.as_ref(), msg) {\n\n Ok(())\n\n } else {\n\n Err(Error::invalid_request())\n\n }\n\n}\n\n\n", "file_path": "vote-signer/src/rpc.rs", "rank": 88, "score": 151422.34175904037 }, { "content": "pub fn deserialize_vec_with<T>(\n\n mut reader: &mut Cursor<&[u8]>,\n\n deser_fn: fn(&mut Cursor<&[u8]>) -> Result<T, Error>,\n\n) -> Result<Vec<T>, Error> {\n\n let vec_len = decode_len(&mut reader)?;\n\n let mut vec: Vec<T> = Vec::with_capacity(vec_len);\n\n for _ in 0..vec_len {\n\n let t: T = deser_fn(&mut reader)?;\n\n vec.push(t);\n\n }\n\n Ok(vec)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use bincode::{deserialize, serialize, serialized_size};\n\n use serde::ser::Serializer;\n\n use serde::Deserialize;\n\n use std::fmt;\n", "file_path": "sdk/src/shortvec.rs", "rank": 89, "score": 151082.97754498955 }, { "content": "pub fn serialize_vec_with<T>(\n\n mut writer: &mut Cursor<&mut [u8]>,\n\n input: &[T],\n\n ser_fn: fn(&mut Cursor<&mut [u8]>, &T) -> Result<(), Error>,\n\n) -> Result<(), Error> {\n\n encode_len(&mut writer, input.len())?;\n\n input.iter().for_each(|e| ser_fn(&mut writer, &e).unwrap());\n\n Ok(())\n\n}\n\n\n", "file_path": "sdk/src/shortvec.rs", "rank": 90, "score": 151082.97754498955 }, { "content": "/// Budget DSL contract interface\n\n/// * accounts[0] - The source of the tokens\n\n/// * accounts[1] - The contract context. Once the contract has been completed, the tokens can\n\n/// be spent from this account .\n\npub fn process_instruction(\n\n keyed_accounts: &mut [KeyedAccount],\n\n data: &[u8],\n\n) -> Result<(), BudgetError> {\n\n if let Ok(instruction) = deserialize(data) {\n\n trace!(\"process_instruction: {:?}\", instruction);\n\n apply_debits(keyed_accounts, &instruction)\n\n } else {\n\n info!(\"Invalid transaction userdata: {:?}\", data);\n\n Err(BudgetError::UserdataDeserializeFailure)\n\n }\n\n}\n\n\n\n// TODO: Re-instate budget_program special case in bank.rs?\n\n/*\n", "file_path": "programs/native/budget/src/budget_program.rs", "rank": 91, "score": 149674.42538386866 }, { "content": "struct TransactionVisitor;\n\nimpl<'a> serde::de::Visitor<'a> for TransactionVisitor {\n\n type Value = Transaction;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"Expecting Instruction\")\n\n }\n\n fn visit_bytes<E>(self, data: &[u8]) -> Result<Transaction, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n use serde::de::Error;\n\n let mut rd = Cursor::new(&data[..]);\n\n let signatures: Vec<Signature> =\n\n deserialize_vec_with(&mut rd, Transaction::deserialize_signature)\n\n .map_err(Error::custom)?;\n\n let account_keys: Vec<Pubkey> =\n\n deserialize_vec_with(&mut rd, Transaction::deserialize_pubkey)\n\n .map_err(Error::custom)?;\n\n let mut buf = [0; size_of::<Hash>()];\n", "file_path": "sdk/src/transaction.rs", "rank": 92, "score": 147660.59229186949 }, { "content": "// Encrypt a file with multiple starting IV states, determined by ivecs.len()\n\n//\n\n// Then sample each block at the offsets provided by samples argument with sha256\n\n// and return the vec of sha states\n\npub fn chacha_cbc_encrypt_file_many_keys(\n\n blocktree: &Arc<Blocktree>,\n\n segment: u64,\n\n ivecs: &mut [u8],\n\n samples: &[u64],\n\n) -> io::Result<Vec<Hash>> {\n\n if ivecs.len() % CHACHA_BLOCK_SIZE != 0 {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\n\n \"bad IV length({}) not divisible by {} \",\n\n ivecs.len(),\n\n CHACHA_BLOCK_SIZE,\n\n ),\n\n ));\n\n }\n\n\n\n let mut buffer = [0; 8 * 1024];\n\n let num_keys = ivecs.len() / CHACHA_BLOCK_SIZE;\n\n let mut sha_states = vec![0; num_keys * size_of::<Hash>()];\n", "file_path": "src/chacha_cuda.rs", "rank": 93, "score": 147508.87326995336 }, { "content": "fn process_cancel(rpc_client: &RpcClient, config: &WalletConfig, pubkey: Pubkey) -> ProcessResult {\n\n let last_id = get_last_id(&rpc_client)?;\n\n let mut tx = BudgetTransaction::new_signature(&config.id, pubkey, config.id.pubkey(), last_id);\n\n let signature_str = send_and_confirm_tx(&rpc_client, &mut tx, &config.id)?;\n\n Ok(signature_str.to_string())\n\n}\n\n\n", "file_path": "wallet/src/wallet.rs", "rank": 94, "score": 146540.71563192038 }, { "content": "pub fn get_max_size() -> usize {\n\n // Upper limit on the size of the Vote State. Equal to\n\n // sizeof(VoteState) when votes.len() is MAX_VOTE_HISTORY\n\n let mut vote_program = VoteState::default();\n\n vote_program.votes = VecDeque::from(vec![Vote::default(); MAX_VOTE_HISTORY]);\n\n serialized_size(&vote_program).unwrap() as usize\n\n}\n\n\n\nimpl VoteState {\n\n pub fn new(node_id: Pubkey, staker_id: Pubkey) -> Self {\n\n let votes = VecDeque::new();\n\n let credits = 0;\n\n Self {\n\n votes,\n\n node_id,\n\n staker_id,\n\n credits,\n\n }\n\n }\n\n\n", "file_path": "sdk/src/vote_program.rs", "rank": 95, "score": 146404.11764156213 }, { "content": "#[test]\n\npub fn cluster_info_retransmit() -> result::Result<()> {\n\n solana_logger::setup();\n\n let exit = Arc::new(AtomicBool::new(false));\n\n trace!(\"c1:\");\n\n let (c1, dr1, tn1) = test_node(exit.clone());\n\n trace!(\"c2:\");\n\n let (c2, dr2, tn2) = test_node(exit.clone());\n\n trace!(\"c3:\");\n\n let (c3, dr3, tn3) = test_node(exit.clone());\n\n let c1_data = c1.read().unwrap().my_data().clone();\n\n c1.write().unwrap().set_leader(c1_data.id);\n\n\n\n c2.write().unwrap().insert_info(c1_data.clone());\n\n c3.write().unwrap().insert_info(c1_data.clone());\n\n\n\n c2.write().unwrap().set_leader(c1_data.id);\n\n c3.write().unwrap().set_leader(c1_data.id);\n\n let num = 3;\n\n\n\n //wait to converge\n", "file_path": "tests/gossip.rs", "rank": 96, "score": 145725.98361435506 }, { "content": "#[allow(unused_variables)]\n\npub fn helper_sol_log_verify(\n\n addr: u64,\n\n unused2: u64,\n\n unused3: u64,\n\n unused4: u64,\n\n unused5: u64,\n\n ro_regions: &[MemoryRegion],\n\n unused7: &[MemoryRegion],\n\n) -> Result<(()), Error> {\n\n for region in ro_regions.iter() {\n\n if region.addr <= addr && (addr as u64) < region.addr + region.len {\n\n let c_buf: *const c_char = addr as *const c_char;\n\n let max_size = region.addr + region.len - addr;\n\n unsafe {\n\n for i in 0..max_size {\n\n if std::ptr::read(c_buf.offset(i as isize)) == 0 {\n\n return Ok(());\n\n }\n\n }\n\n }\n\n return Err(Error::new(ErrorKind::Other, \"Error, Unterminated string\"));\n\n }\n\n }\n\n Err(Error::new(\n\n ErrorKind::Other,\n\n \"Error: Load segfault, bad string pointer\",\n\n ))\n\n}\n\n\n", "file_path": "programs/native/bpf_loader/src/lib.rs", "rank": 97, "score": 145457.5758982533 }, { "content": "pub fn duration_as_s(d: &Duration) -> f32 {\n\n d.as_secs() as f32 + (d.subsec_nanos() as f32 / 1_000_000_000.0)\n\n}\n\n\n", "file_path": "sdk/src/timing.rs", "rank": 98, "score": 145442.1034300988 } ]
Rust
kvserver/src/server.rs
GITHUBear/KV_Server
a3532a91f50604c6d757891ceed0f457a34f2afd
extern crate protobuf; extern crate futures; extern crate grpcio; pub mod kvprotos; use std::io::Read; use std::sync::{Arc, RwLock}; use std::{io, thread}; use std::collections::{BTreeMap, HashMap}; use futures::sync::oneshot; use futures::Future; use grpcio::{Environment, RpcContext, ServerBuilder, UnarySink}; use kvprotos::kvserver::{ResponseStatus, GetRequest, GetResponse, PutRequest, PutResponse, DeleteRequest, DeleteResponse, ScanRequest, ScanResponse}; use kvprotos::kvserver_grpc::{self, Kvdb}; #[derive(Clone)] struct KvService{ engine: Arc<RwLock<BTreeMap<String,String>>>, } impl KvService { pub fn new() -> Self { println!("new KvService"); KvService{ engine: Arc::new(RwLock::new(BTreeMap::new())), } } } impl Kvdb for KvService { fn get(&mut self, ctx: RpcContext, req: GetRequest, sink: UnarySink<GetResponse>) { let mut response = GetResponse::new(); println!("Received GetRequest {{ {:?} }}", req); let mutengine = &self.engine; let req_key = &req.key; let tmp = mutengine.read().unwrap(); let res = tmp.get(req_key); println!("====>getcheck {:?}", self.engine); match res { Some(s) => { response.set_status(ResponseStatus::kSuccess); response.set_value(s.clone()); }, None => response.set_status(ResponseStatus::kNotFound), } let f = sink.success(response.clone()) .map(move |_| println!("Responded with {{ {:?} }}", response)) .map_err(move |err| eprintln!("Failed to reply: {:?}", err)); ctx.spawn(f) } fn put(&mut self, ctx: RpcContext, req: PutRequest, sink: UnarySink<PutResponse>){ let mut response = PutResponse::new(); println!("Received PutRequest {{ {:?} }}", req); let mutengine = &mut self.engine; let req_key = &req.key; let req_val = &req.value; let _res = mutengine.write().unwrap().insert(req_key.clone(), req_val.clone()); println!("====>putcheck {:?}", self.engine); response.set_status(ResponseStatus::kSuccess); let f = sink.success(response.clone()) .map(move |_| println!("Responded with {{ {:?} }}", response)) .map_err(move |err| eprintln!("Failed to reply: {:?}", err)); ctx.spawn(f) } fn delete(&mut self, ctx: RpcContext, req: DeleteRequest, sink: UnarySink<DeleteResponse>){ let mut response = DeleteResponse::new(); println!("Received DeleteRequest {{ {:?} }}", req); let mutengine = &mut self.engine; let req_key = &req.key; let res = mutengine.write().unwrap().remove(req_key); match res { Some(_) => response.set_status(ResponseStatus::kSuccess), None => response.set_status(ResponseStatus::kNotFound), } let f = sink.success(response.clone()) .map(move |_| println!("Responded with {{ {:?} }}", response)) .map_err(move |err| eprintln!("Failed to reply: {:?}", err)); ctx.spawn(f) } fn scan(&mut self, ctx: RpcContext, req: ScanRequest, sink: UnarySink<ScanResponse>){ let mut response = ScanResponse::new(); println!("Received ScanRequest {{ {:?} }}", req); let mutengine = &self.engine; let key_start = &req.key_start; let key_end = &req.key_end; let mut resmap = HashMap::new(); for (k, v) in mutengine.read().unwrap().range(key_start.clone()..key_end.clone()){ resmap.insert(k.clone(), v.clone()); } if resmap.len() != 0 { response.set_status(ResponseStatus::kSuccess); response.set_key_value(resmap); }else{ response.set_status(ResponseStatus::kNotFound); } let f = sink.success(response.clone()) .map(move |_| println!("Responded with {{ {:?} }}", response)) .map_err(move |err| eprintln!("Failed to reply: {:?}", err)); ctx.spawn(f) } } fn main(){ let env = Arc::new(Environment::new(1)); let service = kvserver_grpc::create_kvdb(KvService::new()); let mut server = ServerBuilder::new(env) .register_service(service) .bind("127.0.0.1", 20001) .build() .unwrap(); server.start(); let (tx, rx) = oneshot::channel(); thread::spawn(move || { println!("Press ENTER to exit..."); let _ = io::stdin().read(&mut [0]).unwrap(); tx.send(()) }); let _ = rx.wait(); let _ = server.shutdown().wait(); }
extern crate protobuf; extern crate futures; extern crate grpcio; pub mod kvprotos; use std::io::Read; use std::sync::{Arc, RwLock}; use std::{io, thread}; use std::collections::{BTreeMap, HashMap}; use futures::sync::oneshot; use futures::Future; use grpcio::{Environment, RpcContext, ServerBuilder, UnarySink}; use kvprotos::kvserver::{ResponseStatus, GetRequest, GetResponse, PutRequest, PutResponse, DeleteRequest, DeleteResponse, ScanRequest, ScanResponse}; use kvprotos::kvserver_grpc::{self, Kvdb}; #[derive(Clone)] struct KvService{ engine: Arc<RwLock<BTreeMap<String,String>>>, } impl KvService { pub fn new() -> Self { println!("new KvService"); KvService{ engine: Arc::new(RwLock::new(BTreeMap::new())), } } } impl Kvdb for KvService { fn get(&mut self, ctx: RpcContext, req: GetRequest, sink: UnarySink<GetResponse>) { let mut response = GetResponse::new(); println!("Received GetRequest {{ {:?} }}", req); let mutengine = &self.engine; let req_key = &req.key; let tmp = mutengine.read().unwrap(); let res = tmp.get(req_key); println!("====>getcheck {:?}", self.engine); match res { Some(s) => { response.set_status(ResponseStatus::kSuccess); response.set_value(s.clone()); }, None => response.set_status(ResponseStatus::kNotFound), } let f = sink.success(response.clone()) .map(move |_| println!("Responded with {{ {:?} }}", response))
:new(); for (k, v) in mutengine.read().unwrap().range(key_start.clone()..key_end.clone()){ resmap.insert(k.clone(), v.clone()); } if resmap.len() != 0 { response.set_status(ResponseStatus::kSuccess); response.set_key_value(resmap); }else{ response.set_status(ResponseStatus::kNotFound); } let f = sink.success(response.clone()) .map(move |_| println!("Responded with {{ {:?} }}", response)) .map_err(move |err| eprintln!("Failed to reply: {:?}", err)); ctx.spawn(f) } } fn main(){ let env = Arc::new(Environment::new(1)); let service = kvserver_grpc::create_kvdb(KvService::new()); let mut server = ServerBuilder::new(env) .register_service(service) .bind("127.0.0.1", 20001) .build() .unwrap(); server.start(); let (tx, rx) = oneshot::channel(); thread::spawn(move || { println!("Press ENTER to exit..."); let _ = io::stdin().read(&mut [0]).unwrap(); tx.send(()) }); let _ = rx.wait(); let _ = server.shutdown().wait(); }
.map_err(move |err| eprintln!("Failed to reply: {:?}", err)); ctx.spawn(f) } fn put(&mut self, ctx: RpcContext, req: PutRequest, sink: UnarySink<PutResponse>){ let mut response = PutResponse::new(); println!("Received PutRequest {{ {:?} }}", req); let mutengine = &mut self.engine; let req_key = &req.key; let req_val = &req.value; let _res = mutengine.write().unwrap().insert(req_key.clone(), req_val.clone()); println!("====>putcheck {:?}", self.engine); response.set_status(ResponseStatus::kSuccess); let f = sink.success(response.clone()) .map(move |_| println!("Responded with {{ {:?} }}", response)) .map_err(move |err| eprintln!("Failed to reply: {:?}", err)); ctx.spawn(f) } fn delete(&mut self, ctx: RpcContext, req: DeleteRequest, sink: UnarySink<DeleteResponse>){ let mut response = DeleteResponse::new(); println!("Received DeleteRequest {{ {:?} }}", req); let mutengine = &mut self.engine; let req_key = &req.key; let res = mutengine.write().unwrap().remove(req_key); match res { Some(_) => response.set_status(ResponseStatus::kSuccess), None => response.set_status(ResponseStatus::kNotFound), } let f = sink.success(response.clone()) .map(move |_| println!("Responded with {{ {:?} }}", response)) .map_err(move |err| eprintln!("Failed to reply: {:?}", err)); ctx.spawn(f) } fn scan(&mut self, ctx: RpcContext, req: ScanRequest, sink: UnarySink<ScanResponse>){ let mut response = ScanResponse::new(); println!("Received ScanRequest {{ {:?} }}", req); let mutengine = &self.engine; let key_start = &req.key_start; let key_end = &req.key_end; let mut resmap = HashMap:
random
[ { "content": "pub fn create_kvdb<S: Kvdb + Send + Clone + 'static>(s: S) -> ::grpcio::Service {\n\n let mut builder = ::grpcio::ServiceBuilder::new();\n\n let mut instance = s.clone();\n\n builder = builder.add_unary_handler(&METHOD_KVDB_GET, move |ctx, req, resp| {\n\n instance.get(ctx, req, resp)\n\n });\n\n let mut instance = s.clone();\n\n builder = builder.add_unary_handler(&METHOD_KVDB_PUT, move |ctx, req, resp| {\n\n instance.put(ctx, req, resp)\n\n });\n\n let mut instance = s.clone();\n\n builder = builder.add_unary_handler(&METHOD_KVDB_DELETE, move |ctx, req, resp| {\n\n instance.delete(ctx, req, resp)\n\n });\n\n let mut instance = s.clone();\n\n builder = builder.add_unary_handler(&METHOD_KVDB_SCAN, move |ctx, req, resp| {\n\n instance.scan(ctx, req, resp)\n\n });\n\n builder.build()\n\n}\n", "file_path": "kvserver/src/kvprotos/kvserver_grpc.rs", "rank": 0, "score": 77933.17532362812 }, { "content": "pub trait Kvdb {\n\n fn get(&mut self, ctx: ::grpcio::RpcContext, req: super::kvserver::GetRequest, sink: ::grpcio::UnarySink<super::kvserver::GetResponse>);\n\n fn put(&mut self, ctx: ::grpcio::RpcContext, req: super::kvserver::PutRequest, sink: ::grpcio::UnarySink<super::kvserver::PutResponse>);\n\n fn delete(&mut self, ctx: ::grpcio::RpcContext, req: super::kvserver::DeleteRequest, sink: ::grpcio::UnarySink<super::kvserver::DeleteResponse>);\n\n fn scan(&mut self, ctx: ::grpcio::RpcContext, req: super::kvserver::ScanRequest, sink: ::grpcio::UnarySink<super::kvserver::ScanResponse>);\n\n}\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver_grpc.rs", "rank": 1, "score": 66622.41450143763 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n unsafe {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n }\n\n}\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 2, "score": 62916.270677361106 }, { "content": "fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {\n\n ::protobuf::parse_from_bytes(file_descriptor_proto_data).unwrap()\n\n}\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 3, "score": 50650.395701083515 }, { "content": "pub mod kvserver;\n\npub mod kvserver_grpc;", "file_path": "kvserver/src/kvprotos/mod.rs", "rank": 4, "score": 38281.28919071771 }, { "content": "fn main() {\n\n let proto_root = \"src/kvprotos\";\n\n println!(\"cargo:rerun-if-changed={}\", proto_root);\n\n protoc_grpcio::compile_grpc_protos(&[\"kvserver.proto\"], &[proto_root], &proto_root, None)\n\n .expect(\"Failed to compile gRPC definitions!\");\n\n}", "file_path": "kvserver/build.rs", "rank": 5, "score": 34166.3829990005 }, { "content": "struct Client {\n\n client: KvdbClient,\n\n}\n\n\n\nimpl Client {\n\n\tpub fn new(host: String, port: u16) -> Self {\n\n let addr = format!(\"{}:{}\", host, port);\n\n let env = Arc::new(EnvBuilder::new().build());\n\n let ch = ChannelBuilder::new(env).connect(addr.as_ref());\n\n let kv_client = KvdbClient::new(ch);\n\n\n\n Client {\n\n client: kv_client,\n\n }\n\n }\n\n\n\n pub fn get(&self, key: String) -> Option<String> {\n\n let mut request = GetRequest::new();\n\n request.set_key(key);\n\n let ret = self.client.get(&request).expect(\"RPC failed\");\n", "file_path": "kvserver/src/client.rs", "rank": 6, "score": 33114.376216078614 }, { "content": "fn printusage(){\n\n\tprintln!(\"Usage:\\n\\t\\tput <key> <value>\\n\\t\\t\n\n\t\t get <key>\\n\\t\\t\n\n\t\t delete <key>\\n\\t\\t\n\n\t\t scan <key> <key>\\n\\t\\t\n\n\t\t exit\");\n\n}\n\n\n", "file_path": "kvserver/src/client.rs", "rank": 8, "score": 32685.887829050364 }, { "content": "fn main() {\n\n\tlet test_host = String::from(\"127.0.0.1\");\n\n let test_port = 20001;\n\n\n\n let client = Client::new(test_host.clone(), test_port);\n\n\n\n cmd(&client);\n\n // client.put(\"aa\".to_string(),\"aaaaa\".to_string());\n\n // client.put(\"bb\".to_string(),\"bbbbb\".to_string());\n\n // client.put(\"cc\".to_string(),\"ccccc\".to_string());\n\n // let ret = client.get(\"aa\".to_string());\n\n // match ret {\n\n // Some(v) => println!(\"get:aa's value:{}\", v),\n\n // None => println!(\"get None\")\n\n // }\n\n // client.delete(\"aa\".to_string());\n\n // client.put(\"dd\".to_string(),\"ccccc\".to_string());\n\n // client.put(\"dd\".to_string(),\"ddddd\".to_string());\n\n // let ret = client.scan(\"aa\".to_string(),\"ee\".to_string());\n\n // match ret {\n", "file_path": "kvserver/src/client.rs", "rank": 9, "score": 32685.887829050364 }, { "content": "// put <key> <value>\n\n// get <key>\n\n// delete <key>\n\n// scan <key> <key>\n\n// exit\n\nfn cmd(client : &Client){\n\n\n\n\tloop {\n\n\t\tlet mut cmdline = String::new();\n\n\t\tio::stdin().read_line(&mut cmdline).expect(\"Failed to read line\");\n\n\n\n\t\tlet mut cmditer = cmdline.trim().split_whitespace();\n\n\n\n\t\tlet mut str_vec = vec![];\n\n\t\twhile let Some(cmdele) = cmditer.next() {\n\n\t\t\tstr_vec.push(cmdele);\n\n\t\t}\n\n\n\n\t\tlet len = str_vec.len();\n\n\t\tif len == 0 {\n\n\t\t\tprintusage();\n\n\t\t\tcontinue;\n\n\t\t}\n\n\n\n\t\tif str_vec[0] == \"put\" {\n", "file_path": "kvserver/src/client.rs", "rank": 11, "score": 26066.127138627297 }, { "content": " // If field is not initialized, it is initialized with default value first.\n\n pub fn mut_value(&mut self) -> &mut ::std::string::String {\n\n &mut self.value\n\n }\n\n\n\n // Take field\n\n pub fn take_value(&mut self) -> ::std::string::String {\n\n ::std::mem::replace(&mut self.value, ::std::string::String::new())\n\n }\n\n}\n\n\n\nimpl ::protobuf::Message for PutRequest {\n\n fn is_initialized(&self) -> bool {\n\n true\n\n }\n\n\n\n fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {\n\n while !is.eof()? {\n\n let (field_number, wire_type) = is.read_tag_unpack()?;\n\n match field_number {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 12, "score": 15270.910349242136 }, { "content": "\n\n // Take field\n\n pub fn take_key_end(&mut self) -> ::std::string::String {\n\n ::std::mem::replace(&mut self.key_end, ::std::string::String::new())\n\n }\n\n}\n\n\n\nimpl ::protobuf::Message for ScanRequest {\n\n fn is_initialized(&self) -> bool {\n\n true\n\n }\n\n\n\n fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {\n\n while !is.eof()? {\n\n let (field_number, wire_type) = is.read_tag_unpack()?;\n\n match field_number {\n\n 1 => {\n\n ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.key_start)?;\n\n },\n\n 2 => {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 13, "score": 15270.293975377313 }, { "content": " }\n\n pub fn clear_status(&mut self) {\n\n self.status = ResponseStatus::kNoType;\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_status(&mut self, v: ResponseStatus) {\n\n self.status = v;\n\n }\n\n}\n\n\n\nimpl ::protobuf::Message for DeleteResponse {\n\n fn is_initialized(&self) -> bool {\n\n true\n\n }\n\n\n\n fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {\n\n while !is.eof()? {\n\n let (field_number, wire_type) = is.read_tag_unpack()?;\n\n match field_number {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 14, "score": 15269.761519307936 }, { "content": "\n\n // Mutable pointer to the field.\n\n // If field is not initialized, it is initialized with default value first.\n\n pub fn mut_key(&mut self) -> &mut ::std::string::String {\n\n &mut self.key\n\n }\n\n\n\n // Take field\n\n pub fn take_key(&mut self) -> ::std::string::String {\n\n ::std::mem::replace(&mut self.key, ::std::string::String::new())\n\n }\n\n}\n\n\n\nimpl ::protobuf::Message for DeleteRequest {\n\n fn is_initialized(&self) -> bool {\n\n true\n\n }\n\n\n\n fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {\n\n while !is.eof()? {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 15, "score": 15269.440987693019 }, { "content": " // Take field\n\n pub fn take_key_value(&mut self) -> ::std::collections::HashMap<::std::string::String, ::std::string::String> {\n\n ::std::mem::replace(&mut self.key_value, ::std::collections::HashMap::new())\n\n }\n\n}\n\n\n\nimpl ::protobuf::Message for ScanResponse {\n\n fn is_initialized(&self) -> bool {\n\n true\n\n }\n\n\n\n fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {\n\n while !is.eof()? {\n\n let (field_number, wire_type) = is.read_tag_unpack()?;\n\n match field_number {\n\n 1 => {\n\n ::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.status, 1, &mut self.unknown_fields)?\n\n },\n\n 2 => {\n\n ::protobuf::rt::read_map_into::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeString>(wire_type, is, &mut self.key_value)?;\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 16, "score": 15269.260942339319 }, { "content": " self.key = v;\n\n }\n\n\n\n // Mutable pointer to the field.\n\n // If field is not initialized, it is initialized with default value first.\n\n pub fn mut_key(&mut self) -> &mut ::std::string::String {\n\n &mut self.key\n\n }\n\n\n\n // Take field\n\n pub fn take_key(&mut self) -> ::std::string::String {\n\n ::std::mem::replace(&mut self.key, ::std::string::String::new())\n\n }\n\n}\n\n\n\nimpl ::protobuf::Message for GetRequest {\n\n fn is_initialized(&self) -> bool {\n\n true\n\n }\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 17, "score": 15268.8156548443 }, { "content": "\n\n fn as_any(&self) -> &::std::any::Any {\n\n self as &::std::any::Any\n\n }\n\n fn as_any_mut(&mut self) -> &mut ::std::any::Any {\n\n self as &mut ::std::any::Any\n\n }\n\n fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {\n\n self\n\n }\n\n\n\n fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {\n\n Self::descriptor_static()\n\n }\n\n\n\n fn new() -> GetResponse {\n\n GetResponse::new()\n\n }\n\n\n\n fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 18, "score": 15268.701611536724 }, { "content": " &mut self.unknown_fields\n\n }\n\n\n\n fn as_any(&self) -> &::std::any::Any {\n\n self as &::std::any::Any\n\n }\n\n fn as_any_mut(&mut self) -> &mut ::std::any::Any {\n\n self as &mut ::std::any::Any\n\n }\n\n fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {\n\n self\n\n }\n\n\n\n fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {\n\n Self::descriptor_static()\n\n }\n\n\n\n fn new() -> DeleteResponse {\n\n DeleteResponse::new()\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 19, "score": 15268.476468093577 }, { "content": " &mut self.unknown_fields\n\n }\n\n\n\n fn as_any(&self) -> &::std::any::Any {\n\n self as &::std::any::Any\n\n }\n\n fn as_any_mut(&mut self) -> &mut ::std::any::Any {\n\n self as &mut ::std::any::Any\n\n }\n\n fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {\n\n self\n\n }\n\n\n\n fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {\n\n Self::descriptor_static()\n\n }\n\n\n\n fn new() -> ScanResponse {\n\n ScanResponse::new()\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 20, "score": 15268.476468093577 }, { "content": "\n\n fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {\n\n &mut self.unknown_fields\n\n }\n\n\n\n fn as_any(&self) -> &::std::any::Any {\n\n self as &::std::any::Any\n\n }\n\n fn as_any_mut(&mut self) -> &mut ::std::any::Any {\n\n self as &mut ::std::any::Any\n\n }\n\n fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {\n\n self\n\n }\n\n\n\n fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {\n\n Self::descriptor_static()\n\n }\n\n\n\n fn new() -> DeleteRequest {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 21, "score": 15268.263346944224 }, { "content": " self as &mut ::std::any::Any\n\n }\n\n fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {\n\n self\n\n }\n\n\n\n fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {\n\n Self::descriptor_static()\n\n }\n\n\n\n fn new() -> PutResponse {\n\n PutResponse::new()\n\n }\n\n\n\n fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {\n\n static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,\n\n };\n\n unsafe {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 22, "score": 15268.07909345859 }, { "content": "\n\nuse protobuf::Message as Message_imported_for_functions;\n\nuse protobuf::ProtobufEnum as ProtobufEnum_imported_for_functions;\n\n\n\n/// Generated files are compatible only with the same version\n\n/// of protobuf runtime.\n\nconst _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_7_0;\n\n\n\n#[derive(PartialEq,Clone,Default)]\n\npub struct GetRequest {\n\n // message fields\n\n pub key: ::std::string::String,\n\n // special fields\n\n pub unknown_fields: ::protobuf::UnknownFields,\n\n pub cached_size: ::protobuf::CachedSize,\n\n}\n\n\n\nimpl<'a> ::std::default::Default for &'a GetRequest {\n\n fn default() -> &'a GetRequest {\n\n <GetRequest as ::protobuf::Message>::default_instance()\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 23, "score": 15268.00472537163 }, { "content": " pub unknown_fields: ::protobuf::UnknownFields,\n\n pub cached_size: ::protobuf::CachedSize,\n\n}\n\n\n\nimpl<'a> ::std::default::Default for &'a DeleteResponse {\n\n fn default() -> &'a DeleteResponse {\n\n <DeleteResponse as ::protobuf::Message>::default_instance()\n\n }\n\n}\n\n\n\nimpl DeleteResponse {\n\n pub fn new() -> DeleteResponse {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // .kvserver.ResponseStatus status = 1;\n\n\n\n\n\n pub fn get_status(&self) -> ResponseStatus {\n\n self.status\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 24, "score": 15267.998042162164 }, { "content": " pub key: ::std::string::String,\n\n pub value: ::std::string::String,\n\n // special fields\n\n pub unknown_fields: ::protobuf::UnknownFields,\n\n pub cached_size: ::protobuf::CachedSize,\n\n}\n\n\n\nimpl<'a> ::std::default::Default for &'a PutRequest {\n\n fn default() -> &'a PutRequest {\n\n <PutRequest as ::protobuf::Message>::default_instance()\n\n }\n\n}\n\n\n\nimpl PutRequest {\n\n pub fn new() -> PutRequest {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // string key = 1;\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 25, "score": 15267.947409495908 }, { "content": " pub status: ResponseStatus,\n\n pub value: ::std::string::String,\n\n // special fields\n\n pub unknown_fields: ::protobuf::UnknownFields,\n\n pub cached_size: ::protobuf::CachedSize,\n\n}\n\n\n\nimpl<'a> ::std::default::Default for &'a GetResponse {\n\n fn default() -> &'a GetResponse {\n\n <GetResponse as ::protobuf::Message>::default_instance()\n\n }\n\n}\n\n\n\nimpl GetResponse {\n\n pub fn new() -> GetResponse {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // .kvserver.ResponseStatus status = 1;\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 26, "score": 15267.897706205444 }, { "content": " pub cached_size: ::protobuf::CachedSize,\n\n}\n\n\n\nimpl<'a> ::std::default::Default for &'a ScanRequest {\n\n fn default() -> &'a ScanRequest {\n\n <ScanRequest as ::protobuf::Message>::default_instance()\n\n }\n\n}\n\n\n\nimpl ScanRequest {\n\n pub fn new() -> ScanRequest {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // string key_start = 1;\n\n\n\n\n\n pub fn get_key_start(&self) -> &str {\n\n &self.key_start\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 27, "score": 15267.809543199828 }, { "content": " }\n\n}\n\n\n\nimpl GetRequest {\n\n pub fn new() -> GetRequest {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // string key = 1;\n\n\n\n\n\n pub fn get_key(&self) -> &str {\n\n &self.key\n\n }\n\n pub fn clear_key(&mut self) {\n\n self.key.clear();\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_key(&mut self, v: ::std::string::String) {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 28, "score": 15267.702048772311 }, { "content": "\n\nimpl DeleteRequest {\n\n pub fn new() -> DeleteRequest {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // string key = 1;\n\n\n\n\n\n pub fn get_key(&self) -> &str {\n\n &self.key\n\n }\n\n pub fn clear_key(&mut self) {\n\n self.key.clear();\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_key(&mut self, v: ::std::string::String) {\n\n self.key = v;\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 29, "score": 15267.634966457716 }, { "content": "pub enum ResponseStatus {\n\n kNoType = 0,\n\n kSuccess = 1,\n\n kNotFound = 2,\n\n kFailed = 3,\n\n}\n\n\n\nimpl ::protobuf::ProtobufEnum for ResponseStatus {\n\n fn value(&self) -> i32 {\n\n *self as i32\n\n }\n\n\n\n fn from_i32(value: i32) -> ::std::option::Option<ResponseStatus> {\n\n match value {\n\n 0 => ::std::option::Option::Some(ResponseStatus::kNoType),\n\n 1 => ::std::option::Option::Some(ResponseStatus::kSuccess),\n\n 2 => ::std::option::Option::Some(ResponseStatus::kNotFound),\n\n 3 => ::std::option::Option::Some(ResponseStatus::kFailed),\n\n _ => ::std::option::Option::None\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 30, "score": 15267.49028718621 }, { "content": "\n\nimpl ::std::fmt::Debug for DeleteResponse {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n ::protobuf::text_format::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl ::protobuf::reflect::ProtobufValue for DeleteResponse {\n\n fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {\n\n ::protobuf::reflect::ProtobufValueRef::Message(self)\n\n }\n\n}\n\n\n\n#[derive(PartialEq,Clone,Default)]\n\npub struct ScanRequest {\n\n // message fields\n\n pub key_start: ::std::string::String,\n\n pub key_end: ::std::string::String,\n\n // special fields\n\n pub unknown_fields: ::protobuf::UnknownFields,\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 31, "score": 15267.4625066226 }, { "content": " }\n\n}\n\n\n\nimpl PutResponse {\n\n pub fn new() -> PutResponse {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // .kvserver.ResponseStatus status = 1;\n\n\n\n\n\n pub fn get_status(&self) -> ResponseStatus {\n\n self.status\n\n }\n\n pub fn clear_status(&mut self) {\n\n self.status = ResponseStatus::kNoType;\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_status(&mut self, v: ResponseStatus) {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 32, "score": 15267.439630877514 }, { "content": "impl ScanResponse {\n\n pub fn new() -> ScanResponse {\n\n ::std::default::Default::default()\n\n }\n\n\n\n // .kvserver.ResponseStatus status = 1;\n\n\n\n\n\n pub fn get_status(&self) -> ResponseStatus {\n\n self.status\n\n }\n\n pub fn clear_status(&mut self) {\n\n self.status = ResponseStatus::kNoType;\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_status(&mut self, v: ResponseStatus) {\n\n self.status = v;\n\n }\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 33, "score": 15267.376411585785 }, { "content": " }\n\n}\n\n\n\nimpl ::std::fmt::Debug for DeleteRequest {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n ::protobuf::text_format::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl ::protobuf::reflect::ProtobufValue for DeleteRequest {\n\n fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {\n\n ::protobuf::reflect::ProtobufValueRef::Message(self)\n\n }\n\n}\n\n\n\n#[derive(PartialEq,Clone,Default)]\n\npub struct DeleteResponse {\n\n // message fields\n\n pub status: ResponseStatus,\n\n // special fields\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 34, "score": 15267.359804540538 }, { "content": " }\n\n\n\n fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {\n\n Self::descriptor_static()\n\n }\n\n\n\n fn new() -> PutRequest {\n\n PutRequest::new()\n\n }\n\n\n\n fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {\n\n static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,\n\n };\n\n unsafe {\n\n descriptor.get(|| {\n\n let mut fields = ::std::vec::Vec::new();\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(\n\n \"key\",\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 35, "score": 15267.267740852321 }, { "content": " };\n\n unsafe {\n\n instance.get(PutResponse::new)\n\n }\n\n }\n\n}\n\n\n\nimpl ::protobuf::Clear for PutResponse {\n\n fn clear(&mut self) {\n\n self.status = ResponseStatus::kNoType;\n\n self.unknown_fields.clear();\n\n }\n\n}\n\n\n\nimpl ::std::fmt::Debug for PutResponse {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n ::protobuf::text_format::fmt(self, f)\n\n }\n\n}\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 36, "score": 15267.239560905022 }, { "content": " ptr: 0 as *const PutRequest,\n\n };\n\n unsafe {\n\n instance.get(PutRequest::new)\n\n }\n\n }\n\n}\n\n\n\nimpl ::protobuf::Clear for PutRequest {\n\n fn clear(&mut self) {\n\n self.key.clear();\n\n self.value.clear();\n\n self.unknown_fields.clear();\n\n }\n\n}\n\n\n\nimpl ::std::fmt::Debug for PutRequest {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n ::protobuf::text_format::fmt(self, f)\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 37, "score": 15267.04950314364 }, { "content": " self.status = v;\n\n }\n\n}\n\n\n\nimpl ::protobuf::Message for PutResponse {\n\n fn is_initialized(&self) -> bool {\n\n true\n\n }\n\n\n\n fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {\n\n while !is.eof()? {\n\n let (field_number, wire_type) = is.read_tag_unpack()?;\n\n match field_number {\n\n 1 => {\n\n ::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.status, 1, &mut self.unknown_fields)?\n\n },\n\n _ => {\n\n ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;\n\n },\n\n };\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 38, "score": 15267.03569514203 }, { "content": " fields,\n\n file_descriptor_proto()\n\n )\n\n })\n\n }\n\n }\n\n\n\n fn default_instance() -> &'static GetRequest {\n\n static mut instance: ::protobuf::lazy::Lazy<GetRequest> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const GetRequest,\n\n };\n\n unsafe {\n\n instance.get(GetRequest::new)\n\n }\n\n }\n\n}\n\n\n\nimpl ::protobuf::Clear for GetRequest {\n\n fn clear(&mut self) {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 39, "score": 15266.881473829517 }, { "content": "impl ::protobuf::Message for GetResponse {\n\n fn is_initialized(&self) -> bool {\n\n true\n\n }\n\n\n\n fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {\n\n while !is.eof()? {\n\n let (field_number, wire_type) = is.read_tag_unpack()?;\n\n match field_number {\n\n 1 => {\n\n ::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.status, 1, &mut self.unknown_fields)?\n\n },\n\n 2 => {\n\n ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.value)?;\n\n },\n\n _ => {\n\n ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;\n\n },\n\n };\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 40, "score": 15266.848567974006 }, { "content": " )\n\n })\n\n }\n\n }\n\n\n\n fn default_instance() -> &'static DeleteRequest {\n\n static mut instance: ::protobuf::lazy::Lazy<DeleteRequest> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const DeleteRequest,\n\n };\n\n unsafe {\n\n instance.get(DeleteRequest::new)\n\n }\n\n }\n\n}\n\n\n\nimpl ::protobuf::Clear for DeleteRequest {\n\n fn clear(&mut self) {\n\n self.key.clear();\n\n self.unknown_fields.clear();\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 41, "score": 15266.820932422756 }, { "content": " file_descriptor_proto()\n\n )\n\n })\n\n }\n\n }\n\n\n\n fn default_instance() -> &'static GetResponse {\n\n static mut instance: ::protobuf::lazy::Lazy<GetResponse> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const GetResponse,\n\n };\n\n unsafe {\n\n instance.get(GetResponse::new)\n\n }\n\n }\n\n}\n\n\n\nimpl ::protobuf::Clear for GetResponse {\n\n fn clear(&mut self) {\n\n self.status = ResponseStatus::kNoType;\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 42, "score": 15266.702455924382 }, { "content": " }\n\n }\n\n\n\n fn default_instance() -> &'static DeleteResponse {\n\n static mut instance: ::protobuf::lazy::Lazy<DeleteResponse> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const DeleteResponse,\n\n };\n\n unsafe {\n\n instance.get(DeleteResponse::new)\n\n }\n\n }\n\n}\n\n\n\nimpl ::protobuf::Clear for DeleteResponse {\n\n fn clear(&mut self) {\n\n self.status = ResponseStatus::kNoType;\n\n self.unknown_fields.clear();\n\n }\n\n}\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 43, "score": 15266.702455924382 }, { "content": " self.key.clear();\n\n self.unknown_fields.clear();\n\n }\n\n}\n\n\n\nimpl ::std::fmt::Debug for GetRequest {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n ::protobuf::text_format::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl ::protobuf::reflect::ProtobufValue for GetRequest {\n\n fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {\n\n ::protobuf::reflect::ProtobufValueRef::Message(self)\n\n }\n\n}\n\n\n\n#[derive(PartialEq,Clone,Default)]\n\npub struct GetResponse {\n\n // message fields\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 44, "score": 15266.689605808298 }, { "content": " self.value.clear();\n\n self.unknown_fields.clear();\n\n }\n\n}\n\n\n\nimpl ::std::fmt::Debug for GetResponse {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n ::protobuf::text_format::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl ::protobuf::reflect::ProtobufValue for GetResponse {\n\n fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {\n\n ::protobuf::reflect::ProtobufValueRef::Message(self)\n\n }\n\n}\n\n\n\n#[derive(PartialEq,Clone,Default)]\n\npub struct PutRequest {\n\n // message fields\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 45, "score": 15266.689605808298 }, { "content": " \"ScanResponse\",\n\n fields,\n\n file_descriptor_proto()\n\n )\n\n })\n\n }\n\n }\n\n\n\n fn default_instance() -> &'static ScanResponse {\n\n static mut instance: ::protobuf::lazy::Lazy<ScanResponse> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ScanResponse,\n\n };\n\n unsafe {\n\n instance.get(ScanResponse::new)\n\n }\n\n }\n\n}\n\n\n\nimpl ::protobuf::Clear for ScanResponse {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 46, "score": 15266.296803996054 }, { "content": " fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {\n\n &self.unknown_fields\n\n }\n\n\n\n fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {\n\n &mut self.unknown_fields\n\n }\n\n\n\n fn as_any(&self) -> &::std::any::Any {\n\n self as &::std::any::Any\n\n }\n\n fn as_any_mut(&mut self) -> &mut ::std::any::Any {\n\n self as &mut ::std::any::Any\n\n }\n\n fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {\n\n self\n\n }\n\n\n\n fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {\n\n Self::descriptor_static()\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 47, "score": 15266.285099845603 }, { "content": "}\n\n\n\nimpl ::protobuf::reflect::ProtobufValue for PutRequest {\n\n fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {\n\n ::protobuf::reflect::ProtobufValueRef::Message(self)\n\n }\n\n}\n\n\n\n#[derive(PartialEq,Clone,Default)]\n\npub struct PutResponse {\n\n // message fields\n\n pub status: ResponseStatus,\n\n // special fields\n\n pub unknown_fields: ::protobuf::UnknownFields,\n\n pub cached_size: ::protobuf::CachedSize,\n\n}\n\n\n\nimpl<'a> ::std::default::Default for &'a PutResponse {\n\n fn default() -> &'a PutResponse {\n\n <PutResponse as ::protobuf::Message>::default_instance()\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 48, "score": 15266.262715144223 }, { "content": "impl ::protobuf::reflect::ProtobufValue for PutResponse {\n\n fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {\n\n ::protobuf::reflect::ProtobufValueRef::Message(self)\n\n }\n\n}\n\n\n\n#[derive(PartialEq,Clone,Default)]\n\npub struct DeleteRequest {\n\n // message fields\n\n pub key: ::std::string::String,\n\n // special fields\n\n pub unknown_fields: ::protobuf::UnknownFields,\n\n pub cached_size: ::protobuf::CachedSize,\n\n}\n\n\n\nimpl<'a> ::std::default::Default for &'a DeleteRequest {\n\n fn default() -> &'a DeleteRequest {\n\n <DeleteRequest as ::protobuf::Message>::default_instance()\n\n }\n\n}\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 49, "score": 15266.234001113 }, { "content": " &self.unknown_fields\n\n }\n\n\n\n fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {\n\n &mut self.unknown_fields\n\n }\n\n\n\n fn as_any(&self) -> &::std::any::Any {\n\n self as &::std::any::Any\n\n }\n\n fn as_any_mut(&mut self) -> &mut ::std::any::Any {\n\n self as &mut ::std::any::Any\n\n }\n\n fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {\n\n self\n\n }\n\n\n\n fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {\n\n Self::descriptor_static()\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 50, "score": 15266.212194008956 }, { "content": " })\n\n }\n\n }\n\n}\n\n\n\nimpl ::std::marker::Copy for ResponseStatus {\n\n}\n\n\n\nimpl ::std::default::Default for ResponseStatus {\n\n fn default() -> Self {\n\n ResponseStatus::kNoType\n\n }\n\n}\n\n\n\nimpl ::protobuf::reflect::ProtobufValue for ResponseStatus {\n\n fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {\n\n ::protobuf::reflect::ProtobufValueRef::Enum(self.descriptor())\n\n }\n\n}\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 51, "score": 15266.192711075419 }, { "content": "\n\n fn new() -> GetRequest {\n\n GetRequest::new()\n\n }\n\n\n\n fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {\n\n static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,\n\n };\n\n unsafe {\n\n descriptor.get(|| {\n\n let mut fields = ::std::vec::Vec::new();\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(\n\n \"key\",\n\n |m: &GetRequest| { &m.key },\n\n |m: &mut GetRequest| { &mut m.key },\n\n ));\n\n ::protobuf::reflect::MessageDescriptor::new::<GetRequest>(\n\n \"GetRequest\",\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 52, "score": 15266.049017291549 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl ::protobuf::Clear for ScanRequest {\n\n fn clear(&mut self) {\n\n self.key_start.clear();\n\n self.key_end.clear();\n\n self.unknown_fields.clear();\n\n }\n\n}\n\n\n\nimpl ::std::fmt::Debug for ScanRequest {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n ::protobuf::text_format::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl ::protobuf::reflect::ProtobufValue for ScanRequest {\n\n fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 53, "score": 15265.881397684818 }, { "content": " DeleteRequest::new()\n\n }\n\n\n\n fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {\n\n static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,\n\n };\n\n unsafe {\n\n descriptor.get(|| {\n\n let mut fields = ::std::vec::Vec::new();\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(\n\n \"key\",\n\n |m: &DeleteRequest| { &m.key },\n\n |m: &mut DeleteRequest| { &mut m.key },\n\n ));\n\n ::protobuf::reflect::MessageDescriptor::new::<DeleteRequest>(\n\n \"DeleteRequest\",\n\n fields,\n\n file_descriptor_proto()\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 54, "score": 15265.76883919742 }, { "content": " }\n\n\n\n fn new() -> ScanRequest {\n\n ScanRequest::new()\n\n }\n\n\n\n fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {\n\n static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,\n\n };\n\n unsafe {\n\n descriptor.get(|| {\n\n let mut fields = ::std::vec::Vec::new();\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(\n\n \"key_start\",\n\n |m: &ScanRequest| { &m.key_start },\n\n |m: &mut ScanRequest| { &mut m.key_start },\n\n ));\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 55, "score": 15265.733393675155 }, { "content": " pub fn take_key(&mut self) -> ::std::string::String {\n\n ::std::mem::replace(&mut self.key, ::std::string::String::new())\n\n }\n\n\n\n // string value = 2;\n\n\n\n\n\n pub fn get_value(&self) -> &str {\n\n &self.value\n\n }\n\n pub fn clear_value(&mut self) {\n\n self.value.clear();\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_value(&mut self, v: ::std::string::String) {\n\n self.value = v;\n\n }\n\n\n\n // Mutable pointer to the field.\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 56, "score": 15265.579939079984 }, { "content": " }\n\n\n\n fn values() -> &'static [Self] {\n\n static values: &'static [ResponseStatus] = &[\n\n ResponseStatus::kNoType,\n\n ResponseStatus::kSuccess,\n\n ResponseStatus::kNotFound,\n\n ResponseStatus::kFailed,\n\n ];\n\n values\n\n }\n\n\n\n fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {\n\n static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::EnumDescriptor> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ::protobuf::reflect::EnumDescriptor,\n\n };\n\n unsafe {\n\n descriptor.get(|| {\n\n ::protobuf::reflect::EnumDescriptor::new(\"ResponseStatus\", file_descriptor_proto())\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 57, "score": 15265.535787916531 }, { "content": " static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,\n\n };\n\n unsafe {\n\n descriptor.get(|| {\n\n let mut fields = ::std::vec::Vec::new();\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<ResponseStatus>>(\n\n \"status\",\n\n |m: &GetResponse| { &m.status },\n\n |m: &mut GetResponse| { &mut m.status },\n\n ));\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(\n\n \"value\",\n\n |m: &GetResponse| { &m.value },\n\n |m: &mut GetResponse| { &mut m.value },\n\n ));\n\n ::protobuf::reflect::MessageDescriptor::new::<GetResponse>(\n\n \"GetResponse\",\n\n fields,\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 58, "score": 15265.500598193838 }, { "content": " descriptor.get(|| {\n\n let mut fields = ::std::vec::Vec::new();\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<ResponseStatus>>(\n\n \"status\",\n\n |m: &PutResponse| { &m.status },\n\n |m: &mut PutResponse| { &mut m.status },\n\n ));\n\n ::protobuf::reflect::MessageDescriptor::new::<PutResponse>(\n\n \"PutResponse\",\n\n fields,\n\n file_descriptor_proto()\n\n )\n\n })\n\n }\n\n }\n\n\n\n fn default_instance() -> &'static PutResponse {\n\n static mut instance: ::protobuf::lazy::Lazy<PutResponse> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const PutResponse,\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 59, "score": 15265.399494564293 }, { "content": " ::protobuf::reflect::ProtobufValueRef::Message(self)\n\n }\n\n}\n\n\n\n#[derive(PartialEq,Clone,Default)]\n\npub struct ScanResponse {\n\n // message fields\n\n pub status: ResponseStatus,\n\n pub key_value: ::std::collections::HashMap<::std::string::String, ::std::string::String>,\n\n // special fields\n\n pub unknown_fields: ::protobuf::UnknownFields,\n\n pub cached_size: ::protobuf::CachedSize,\n\n}\n\n\n\nimpl<'a> ::std::default::Default for &'a ScanResponse {\n\n fn default() -> &'a ScanResponse {\n\n <ScanResponse as ::protobuf::Message>::default_instance()\n\n }\n\n}\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 60, "score": 15265.39891968627 }, { "content": "\n\n fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {\n\n static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,\n\n };\n\n unsafe {\n\n descriptor.get(|| {\n\n let mut fields = ::std::vec::Vec::new();\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<ResponseStatus>>(\n\n \"status\",\n\n |m: &ScanResponse| { &m.status },\n\n |m: &mut ScanResponse| { &mut m.status },\n\n ));\n\n fields.push(::protobuf::reflect::accessor::make_map_accessor::<_, ::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeString>(\n\n \"key_value\",\n\n |m: &ScanResponse| { &m.key_value },\n\n |m: &mut ScanResponse| { &mut m.key_value },\n\n ));\n\n ::protobuf::reflect::MessageDescriptor::new::<ScanResponse>(\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 61, "score": 15265.394745400063 }, { "content": "\n\n fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {\n\n static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,\n\n };\n\n unsafe {\n\n descriptor.get(|| {\n\n let mut fields = ::std::vec::Vec::new();\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<ResponseStatus>>(\n\n \"status\",\n\n |m: &DeleteResponse| { &m.status },\n\n |m: &mut DeleteResponse| { &mut m.status },\n\n ));\n\n ::protobuf::reflect::MessageDescriptor::new::<DeleteResponse>(\n\n \"DeleteResponse\",\n\n fields,\n\n file_descriptor_proto()\n\n )\n\n })\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 62, "score": 15265.393005140793 }, { "content": " self.value.clear();\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_value(&mut self, v: ::std::string::String) {\n\n self.value = v;\n\n }\n\n\n\n // Mutable pointer to the field.\n\n // If field is not initialized, it is initialized with default value first.\n\n pub fn mut_value(&mut self) -> &mut ::std::string::String {\n\n &mut self.value\n\n }\n\n\n\n // Take field\n\n pub fn take_value(&mut self) -> ::std::string::String {\n\n ::std::mem::replace(&mut self.value, ::std::string::String::new())\n\n }\n\n}\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 63, "score": 15265.387465995842 }, { "content": " pub fn clear_key_start(&mut self) {\n\n self.key_start.clear();\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_key_start(&mut self, v: ::std::string::String) {\n\n self.key_start = v;\n\n }\n\n\n\n // Mutable pointer to the field.\n\n // If field is not initialized, it is initialized with default value first.\n\n pub fn mut_key_start(&mut self) -> &mut ::std::string::String {\n\n &mut self.key_start\n\n }\n\n\n\n // Take field\n\n pub fn take_key_start(&mut self) -> ::std::string::String {\n\n ::std::mem::replace(&mut self.key_start, ::std::string::String::new())\n\n }\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 64, "score": 15265.314283148022 }, { "content": " fn get_cached_size(&self) -> u32 {\n\n self.cached_size.get()\n\n }\n\n\n\n fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {\n\n &self.unknown_fields\n\n }\n\n\n\n fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {\n\n &mut self.unknown_fields\n\n }\n\n\n\n fn as_any(&self) -> &::std::any::Any {\n\n self as &::std::any::Any\n\n }\n\n fn as_any_mut(&mut self) -> &mut ::std::any::Any {\n\n self as &mut ::std::any::Any\n\n }\n\n fn into_any(self: Box<Self>) -> ::std::boxed::Box<::std::any::Any> {\n\n self\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 65, "score": 15265.294090251811 }, { "content": " fn clear(&mut self) {\n\n self.status = ResponseStatus::kNoType;\n\n self.key_value.clear();\n\n self.unknown_fields.clear();\n\n }\n\n}\n\n\n\nimpl ::std::fmt::Debug for ScanResponse {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n ::protobuf::text_format::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl ::protobuf::reflect::ProtobufValue for ScanResponse {\n\n fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {\n\n ::protobuf::reflect::ProtobufValueRef::Message(self)\n\n }\n\n}\n\n\n\n#[derive(Clone,PartialEq,Eq,Debug,Hash)]\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 66, "score": 15265.275741143265 }, { "content": " \"key_end\",\n\n |m: &ScanRequest| { &m.key_end },\n\n |m: &mut ScanRequest| { &mut m.key_end },\n\n ));\n\n ::protobuf::reflect::MessageDescriptor::new::<ScanRequest>(\n\n \"ScanRequest\",\n\n fields,\n\n file_descriptor_proto()\n\n )\n\n })\n\n }\n\n }\n\n\n\n fn default_instance() -> &'static ScanRequest {\n\n static mut instance: ::protobuf::lazy::Lazy<ScanRequest> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ScanRequest,\n\n };\n\n unsafe {\n\n instance.get(ScanRequest::new)\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 67, "score": 15265.252312136812 }, { "content": " |m: &PutRequest| { &m.key },\n\n |m: &mut PutRequest| { &mut m.key },\n\n ));\n\n fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(\n\n \"value\",\n\n |m: &PutRequest| { &m.value },\n\n |m: &mut PutRequest| { &mut m.value },\n\n ));\n\n ::protobuf::reflect::MessageDescriptor::new::<PutRequest>(\n\n \"PutRequest\",\n\n fields,\n\n file_descriptor_proto()\n\n )\n\n })\n\n }\n\n }\n\n\n\n fn default_instance() -> &'static PutRequest {\n\n static mut instance: ::protobuf::lazy::Lazy<PutRequest> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 68, "score": 15264.972200981045 }, { "content": " fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream) -> ::protobuf::ProtobufResult<()> {\n\n while !is.eof()? {\n\n let (field_number, wire_type) = is.read_tag_unpack()?;\n\n match field_number {\n\n 1 => {\n\n ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.key)?;\n\n },\n\n _ => {\n\n ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;\n\n },\n\n };\n\n }\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n // Compute sizes of nested messages\n\n #[allow(unused_variables)]\n\n fn compute_size(&self) -> u32 {\n\n let mut my_size = 0;\n\n if !self.key.is_empty() {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 69, "score": 15264.949835195579 }, { "content": " let (field_number, wire_type) = is.read_tag_unpack()?;\n\n match field_number {\n\n 1 => {\n\n ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.key)?;\n\n },\n\n _ => {\n\n ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;\n\n },\n\n };\n\n }\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n // Compute sizes of nested messages\n\n #[allow(unused_variables)]\n\n fn compute_size(&self) -> u32 {\n\n let mut my_size = 0;\n\n if !self.key.is_empty() {\n\n my_size += ::protobuf::rt::string_size(1, &self.key);\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 70, "score": 15264.414400788311 }, { "content": "\n\n pub fn get_key(&self) -> &str {\n\n &self.key\n\n }\n\n pub fn clear_key(&mut self) {\n\n self.key.clear();\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_key(&mut self, v: ::std::string::String) {\n\n self.key = v;\n\n }\n\n\n\n // Mutable pointer to the field.\n\n // If field is not initialized, it is initialized with default value first.\n\n pub fn mut_key(&mut self) -> &mut ::std::string::String {\n\n &mut self.key\n\n }\n\n\n\n // Take field\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 71, "score": 15263.826441779169 }, { "content": "\n\n pub fn get_status(&self) -> ResponseStatus {\n\n self.status\n\n }\n\n pub fn clear_status(&mut self) {\n\n self.status = ResponseStatus::kNoType;\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_status(&mut self, v: ResponseStatus) {\n\n self.status = v;\n\n }\n\n\n\n // string value = 2;\n\n\n\n\n\n pub fn get_value(&self) -> &str {\n\n &self.value\n\n }\n\n pub fn clear_value(&mut self) {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 72, "score": 15263.75679509721 }, { "content": " // string key_end = 2;\n\n\n\n\n\n pub fn get_key_end(&self) -> &str {\n\n &self.key_end\n\n }\n\n pub fn clear_key_end(&mut self) {\n\n self.key_end.clear();\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_key_end(&mut self, v: ::std::string::String) {\n\n self.key_end = v;\n\n }\n\n\n\n // Mutable pointer to the field.\n\n // If field is not initialized, it is initialized with default value first.\n\n pub fn mut_key_end(&mut self) -> &mut ::std::string::String {\n\n &mut self.key_end\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 73, "score": 15263.658505973544 }, { "content": " // repeated .kvserver.ScanResponse.KeyValueEntry key_value = 2;\n\n\n\n\n\n pub fn get_key_value(&self) -> &::std::collections::HashMap<::std::string::String, ::std::string::String> {\n\n &self.key_value\n\n }\n\n pub fn clear_key_value(&mut self) {\n\n self.key_value.clear();\n\n }\n\n\n\n // Param is passed by value, moved\n\n pub fn set_key_value(&mut self, v: ::std::collections::HashMap<::std::string::String, ::std::string::String>) {\n\n self.key_value = v;\n\n }\n\n\n\n // Mutable pointer to the field.\n\n pub fn mut_key_value(&mut self) -> &mut ::std::collections::HashMap<::std::string::String, ::std::string::String> {\n\n &mut self.key_value\n\n }\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 74, "score": 15263.268480211067 }, { "content": " }\n\n\n\n fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {\n\n if self.status != ResponseStatus::kNoType {\n\n os.write_enum(1, self.status.value())?;\n\n }\n\n ::protobuf::rt::write_map_with_cached_sizes::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeString>(2, &self.key_value, os)?;\n\n os.write_unknown_fields(self.get_unknown_fields())?;\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n fn get_cached_size(&self) -> u32 {\n\n self.cached_size.get()\n\n }\n\n\n\n fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {\n\n &self.unknown_fields\n\n }\n\n\n\n fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 75, "score": 15263.154911933163 }, { "content": " my_size\n\n }\n\n\n\n fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {\n\n if self.status != ResponseStatus::kNoType {\n\n os.write_enum(1, self.status.value())?;\n\n }\n\n os.write_unknown_fields(self.get_unknown_fields())?;\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n fn get_cached_size(&self) -> u32 {\n\n self.cached_size.get()\n\n }\n\n\n\n fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {\n\n &self.unknown_fields\n\n }\n\n\n\n fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 76, "score": 15263.040656058385 }, { "content": " os.write_unknown_fields(self.get_unknown_fields())?;\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n fn get_cached_size(&self) -> u32 {\n\n self.cached_size.get()\n\n }\n\n\n\n fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {\n\n &self.unknown_fields\n\n }\n\n\n\n fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {\n\n &mut self.unknown_fields\n\n }\n\n\n\n fn as_any(&self) -> &::std::any::Any {\n\n self as &::std::any::Any\n\n }\n\n fn as_any_mut(&mut self) -> &mut ::std::any::Any {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 77, "score": 15262.928219712963 }, { "content": " 1 => {\n\n ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.key)?;\n\n },\n\n 2 => {\n\n ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.value)?;\n\n },\n\n _ => {\n\n ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;\n\n },\n\n };\n\n }\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n // Compute sizes of nested messages\n\n #[allow(unused_variables)]\n\n fn compute_size(&self) -> u32 {\n\n let mut my_size = 0;\n\n if !self.key.is_empty() {\n\n my_size += ::protobuf::rt::string_size(1, &self.key);\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 78, "score": 15262.882613291471 }, { "content": " 1 => {\n\n ::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.status, 1, &mut self.unknown_fields)?\n\n },\n\n _ => {\n\n ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;\n\n },\n\n };\n\n }\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n // Compute sizes of nested messages\n\n #[allow(unused_variables)]\n\n fn compute_size(&self) -> u32 {\n\n let mut my_size = 0;\n\n if self.status != ResponseStatus::kNoType {\n\n my_size += ::protobuf::rt::enum_size(1, self.status);\n\n }\n\n my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());\n\n self.cached_size.set(my_size);\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 79, "score": 15262.742608822746 }, { "content": " ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.key_end)?;\n\n },\n\n _ => {\n\n ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;\n\n },\n\n };\n\n }\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n // Compute sizes of nested messages\n\n #[allow(unused_variables)]\n\n fn compute_size(&self) -> u32 {\n\n let mut my_size = 0;\n\n if !self.key_start.is_empty() {\n\n my_size += ::protobuf::rt::string_size(1, &self.key_start);\n\n }\n\n if !self.key_end.is_empty() {\n\n my_size += ::protobuf::rt::string_size(2, &self.key_end);\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 80, "score": 15262.624442655737 }, { "content": " },\n\n _ => {\n\n ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;\n\n },\n\n };\n\n }\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n // Compute sizes of nested messages\n\n #[allow(unused_variables)]\n\n fn compute_size(&self) -> u32 {\n\n let mut my_size = 0;\n\n if self.status != ResponseStatus::kNoType {\n\n my_size += ::protobuf::rt::enum_size(1, self.status);\n\n }\n\n my_size += ::protobuf::rt::compute_map_size::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeString>(2, &self.key_value);\n\n my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());\n\n self.cached_size.set(my_size);\n\n my_size\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 81, "score": 15262.55974501527 }, { "content": " os.write_enum(1, self.status.value())?;\n\n }\n\n if !self.value.is_empty() {\n\n os.write_string(2, &self.value)?;\n\n }\n\n os.write_unknown_fields(self.get_unknown_fields())?;\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n fn get_cached_size(&self) -> u32 {\n\n self.cached_size.get()\n\n }\n\n\n\n fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {\n\n &self.unknown_fields\n\n }\n\n\n\n fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {\n\n &mut self.unknown_fields\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 82, "score": 15262.538808946943 }, { "content": " ::std::result::Result::Ok(())\n\n }\n\n\n\n // Compute sizes of nested messages\n\n #[allow(unused_variables)]\n\n fn compute_size(&self) -> u32 {\n\n let mut my_size = 0;\n\n if self.status != ResponseStatus::kNoType {\n\n my_size += ::protobuf::rt::enum_size(1, self.status);\n\n }\n\n if !self.value.is_empty() {\n\n my_size += ::protobuf::rt::string_size(2, &self.value);\n\n }\n\n my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());\n\n self.cached_size.set(my_size);\n\n my_size\n\n }\n\n\n\n fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {\n\n if self.status != ResponseStatus::kNoType {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 83, "score": 15262.493914033143 }, { "content": " }\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n // Compute sizes of nested messages\n\n #[allow(unused_variables)]\n\n fn compute_size(&self) -> u32 {\n\n let mut my_size = 0;\n\n if self.status != ResponseStatus::kNoType {\n\n my_size += ::protobuf::rt::enum_size(1, self.status);\n\n }\n\n my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());\n\n self.cached_size.set(my_size);\n\n my_size\n\n }\n\n\n\n fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {\n\n if self.status != ResponseStatus::kNoType {\n\n os.write_enum(1, self.status.value())?;\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 84, "score": 15262.442574677425 }, { "content": " utResponse\\\"\\0\\x12=\\n\\x06Delete\\x12\\x17.kvserver.DeleteRequest\\x1a\\x18.k\\\n\n vserver.DeleteResponse\\\"\\0\\x127\\n\\x04Scan\\x12\\x15.kvserver.ScanRequest\\\n\n \\x1a\\x16.kvserver.ScanResponse\\\"\\0b\\x06proto3\\\n\n\";\n\n\n\nstatic mut file_descriptor_proto_lazy: ::protobuf::lazy::Lazy<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::lazy::Lazy {\n\n lock: ::protobuf::lazy::ONCE_INIT,\n\n ptr: 0 as *const ::protobuf::descriptor::FileDescriptorProto,\n\n};\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 85, "score": 15261.920615347888 }, { "content": " my_size += ::protobuf::rt::string_size(1, &self.key);\n\n }\n\n my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());\n\n self.cached_size.set(my_size);\n\n my_size\n\n }\n\n\n\n fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {\n\n if !self.key.is_empty() {\n\n os.write_string(1, &self.key)?;\n\n }\n\n os.write_unknown_fields(self.get_unknown_fields())?;\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n fn get_cached_size(&self) -> u32 {\n\n self.cached_size.get()\n\n }\n\n\n\n fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 86, "score": 15261.911726737262 }, { "content": " }\n\n if !self.value.is_empty() {\n\n my_size += ::protobuf::rt::string_size(2, &self.value);\n\n }\n\n my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());\n\n self.cached_size.set(my_size);\n\n my_size\n\n }\n\n\n\n fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {\n\n if !self.key.is_empty() {\n\n os.write_string(1, &self.key)?;\n\n }\n\n if !self.value.is_empty() {\n\n os.write_string(2, &self.value)?;\n\n }\n\n os.write_unknown_fields(self.get_unknown_fields())?;\n\n ::std::result::Result::Ok(())\n\n }\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 87, "score": 15261.900807078495 }, { "content": " my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());\n\n self.cached_size.set(my_size);\n\n my_size\n\n }\n\n\n\n fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {\n\n if !self.key.is_empty() {\n\n os.write_string(1, &self.key)?;\n\n }\n\n os.write_unknown_fields(self.get_unknown_fields())?;\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n fn get_cached_size(&self) -> u32 {\n\n self.cached_size.get()\n\n }\n\n\n\n fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {\n\n &self.unknown_fields\n\n }\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 88, "score": 15261.881224777504 }, { "content": " my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());\n\n self.cached_size.set(my_size);\n\n my_size\n\n }\n\n\n\n fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream) -> ::protobuf::ProtobufResult<()> {\n\n if !self.key_start.is_empty() {\n\n os.write_string(1, &self.key_start)?;\n\n }\n\n if !self.key_end.is_empty() {\n\n os.write_string(2, &self.key_end)?;\n\n }\n\n os.write_unknown_fields(self.get_unknown_fields())?;\n\n ::std::result::Result::Ok(())\n\n }\n\n\n\n fn get_cached_size(&self) -> u32 {\n\n self.cached_size.get()\n\n }\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 89, "score": 15261.6986270812 }, { "content": "// This file is generated by rust-protobuf 2.7.0. Do not edit\n\n// @generated\n\n\n\n// https://github.com/Manishearth/rust-clippy/issues/702\n\n#![allow(unknown_lints)]\n\n#![allow(clippy::all)]\n\n\n\n#![cfg_attr(rustfmt, rustfmt_skip)]\n\n\n\n#![allow(box_pointers)]\n\n#![allow(dead_code)]\n\n#![allow(missing_docs)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(non_snake_case)]\n\n#![allow(non_upper_case_globals)]\n\n#![allow(trivial_casts)]\n\n#![allow(unsafe_code)]\n\n#![allow(unused_imports)]\n\n#![allow(unused_results)]\n\n//! Generated file from `kvserver.proto`\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 90, "score": 15259.048372491096 }, { "content": "static file_descriptor_proto_data: &'static [u8] = b\"\\\n\n \\n\\x0ekvserver.proto\\x12\\x08kvserver\\\"\\x1e\\n\\nGetRequest\\x12\\x10\\n\\x03ke\\\n\n y\\x18\\x01\\x20\\x01(\\tR\\x03key\\\"U\\n\\x0bGetResponse\\x120\\n\\x06status\\x18\\\n\n \\x01\\x20\\x01(\\x0e2\\x18.kvserver.ResponseStatusR\\x06status\\x12\\x14\\n\\x05v\\\n\n alue\\x18\\x02\\x20\\x01(\\tR\\x05value\\\"4\\n\\nPutRequest\\x12\\x10\\n\\x03key\\x18\\\n\n \\x01\\x20\\x01(\\tR\\x03key\\x12\\x14\\n\\x05value\\x18\\x02\\x20\\x01(\\tR\\x05value\\\n\n \\\"?\\n\\x0bPutResponse\\x120\\n\\x06status\\x18\\x01\\x20\\x01(\\x0e2\\x18.kvserver\\\n\n .ResponseStatusR\\x06status\\\"!\\n\\rDeleteRequest\\x12\\x10\\n\\x03key\\x18\\x01\\\n\n \\x20\\x01(\\tR\\x03key\\\"B\\n\\x0eDeleteResponse\\x120\\n\\x06status\\x18\\x01\\x20\\\n\n \\x01(\\x0e2\\x18.kvserver.ResponseStatusR\\x06status\\\"C\\n\\x0bScanRequest\\\n\n \\x12\\x1b\\n\\tkey_start\\x18\\x01\\x20\\x01(\\tR\\x08keyStart\\x12\\x17\\n\\x07key_e\\\n\n nd\\x18\\x02\\x20\\x01(\\tR\\x06keyEnd\\\"\\xc0\\x01\\n\\x0cScanResponse\\x120\\n\\x06s\\\n\n tatus\\x18\\x01\\x20\\x01(\\x0e2\\x18.kvserver.ResponseStatusR\\x06status\\x12A\\\n\n \\n\\tkey_value\\x18\\x02\\x20\\x03(\\x0b2$.kvserver.ScanResponse.KeyValueEntry\\\n\n R\\x08keyValue\\x1a;\\n\\rKeyValueEntry\\x12\\x10\\n\\x03key\\x18\\x01\\x20\\x01(\\tR\\\n\n \\x03key\\x12\\x14\\n\\x05value\\x18\\x02\\x20\\x01(\\tR\\x05value:\\x028\\x01*G\\n\\\n\n \\x0eResponseStatus\\x12\\x0b\\n\\x07kNoType\\x10\\0\\x12\\x0c\\n\\x08kSuccess\\x10\\\n\n \\x01\\x12\\r\\n\\tkNotFound\\x10\\x02\\x12\\x0b\\n\\x07kFailed\\x10\\x032\\xea\\x01\\n\\\n\n \\x04Kvdb\\x124\\n\\x03Get\\x12\\x14.kvserver.GetRequest\\x1a\\x15.kvserver.GetR\\\n\n esponse\\\"\\0\\x124\\n\\x03Put\\x12\\x14.kvserver.PutRequest\\x1a\\x15.kvserver.P\\\n", "file_path": "kvserver/src/kvprotos/kvserver.rs", "rank": 91, "score": 15258.342942115136 }, { "content": "\n\nconst METHOD_KVDB_SCAN: ::grpcio::Method<super::kvserver::ScanRequest, super::kvserver::ScanResponse> = ::grpcio::Method {\n\n ty: ::grpcio::MethodType::Unary,\n\n name: \"/kvserver.Kvdb/Scan\",\n\n req_mar: ::grpcio::Marshaller { ser: ::grpcio::pb_ser, de: ::grpcio::pb_de },\n\n resp_mar: ::grpcio::Marshaller { ser: ::grpcio::pb_ser, de: ::grpcio::pb_de },\n\n};\n\n\n\n#[derive(Clone)]\n\npub struct KvdbClient {\n\n client: ::grpcio::Client,\n\n}\n\n\n\nimpl KvdbClient {\n\n pub fn new(channel: ::grpcio::Channel) -> Self {\n\n KvdbClient {\n\n client: ::grpcio::Client::new(channel),\n\n }\n\n }\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver_grpc.rs", "rank": 92, "score": 14394.389138758987 }, { "content": " pub fn delete_async_opt(&self, req: &super::kvserver::DeleteRequest, opt: ::grpcio::CallOption) -> ::grpcio::Result<::grpcio::ClientUnaryReceiver<super::kvserver::DeleteResponse>> {\n\n self.client.unary_call_async(&METHOD_KVDB_DELETE, req, opt)\n\n }\n\n\n\n pub fn delete_async(&self, req: &super::kvserver::DeleteRequest) -> ::grpcio::Result<::grpcio::ClientUnaryReceiver<super::kvserver::DeleteResponse>> {\n\n self.delete_async_opt(req, ::grpcio::CallOption::default())\n\n }\n\n\n\n pub fn scan_opt(&self, req: &super::kvserver::ScanRequest, opt: ::grpcio::CallOption) -> ::grpcio::Result<super::kvserver::ScanResponse> {\n\n self.client.unary_call(&METHOD_KVDB_SCAN, req, opt)\n\n }\n\n\n\n pub fn scan(&self, req: &super::kvserver::ScanRequest) -> ::grpcio::Result<super::kvserver::ScanResponse> {\n\n self.scan_opt(req, ::grpcio::CallOption::default())\n\n }\n\n\n\n pub fn scan_async_opt(&self, req: &super::kvserver::ScanRequest, opt: ::grpcio::CallOption) -> ::grpcio::Result<::grpcio::ClientUnaryReceiver<super::kvserver::ScanResponse>> {\n\n self.client.unary_call_async(&METHOD_KVDB_SCAN, req, opt)\n\n }\n\n\n\n pub fn scan_async(&self, req: &super::kvserver::ScanRequest) -> ::grpcio::Result<::grpcio::ClientUnaryReceiver<super::kvserver::ScanResponse>> {\n\n self.scan_async_opt(req, ::grpcio::CallOption::default())\n\n }\n\n pub fn spawn<F>(&self, f: F) where F: ::futures::Future<Item = (), Error = ()> + Send + 'static {\n\n self.client.spawn(f)\n\n }\n\n}\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver_grpc.rs", "rank": 93, "score": 14393.229908677542 }, { "content": " pub fn get_opt(&self, req: &super::kvserver::GetRequest, opt: ::grpcio::CallOption) -> ::grpcio::Result<super::kvserver::GetResponse> {\n\n self.client.unary_call(&METHOD_KVDB_GET, req, opt)\n\n }\n\n\n\n pub fn get(&self, req: &super::kvserver::GetRequest) -> ::grpcio::Result<super::kvserver::GetResponse> {\n\n self.get_opt(req, ::grpcio::CallOption::default())\n\n }\n\n\n\n pub fn get_async_opt(&self, req: &super::kvserver::GetRequest, opt: ::grpcio::CallOption) -> ::grpcio::Result<::grpcio::ClientUnaryReceiver<super::kvserver::GetResponse>> {\n\n self.client.unary_call_async(&METHOD_KVDB_GET, req, opt)\n\n }\n\n\n\n pub fn get_async(&self, req: &super::kvserver::GetRequest) -> ::grpcio::Result<::grpcio::ClientUnaryReceiver<super::kvserver::GetResponse>> {\n\n self.get_async_opt(req, ::grpcio::CallOption::default())\n\n }\n\n\n\n pub fn put_opt(&self, req: &super::kvserver::PutRequest, opt: ::grpcio::CallOption) -> ::grpcio::Result<super::kvserver::PutResponse> {\n\n self.client.unary_call(&METHOD_KVDB_PUT, req, opt)\n\n }\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver_grpc.rs", "rank": 94, "score": 14389.653492479985 }, { "content": " pub fn put(&self, req: &super::kvserver::PutRequest) -> ::grpcio::Result<super::kvserver::PutResponse> {\n\n self.put_opt(req, ::grpcio::CallOption::default())\n\n }\n\n\n\n pub fn put_async_opt(&self, req: &super::kvserver::PutRequest, opt: ::grpcio::CallOption) -> ::grpcio::Result<::grpcio::ClientUnaryReceiver<super::kvserver::PutResponse>> {\n\n self.client.unary_call_async(&METHOD_KVDB_PUT, req, opt)\n\n }\n\n\n\n pub fn put_async(&self, req: &super::kvserver::PutRequest) -> ::grpcio::Result<::grpcio::ClientUnaryReceiver<super::kvserver::PutResponse>> {\n\n self.put_async_opt(req, ::grpcio::CallOption::default())\n\n }\n\n\n\n pub fn delete_opt(&self, req: &super::kvserver::DeleteRequest, opt: ::grpcio::CallOption) -> ::grpcio::Result<super::kvserver::DeleteResponse> {\n\n self.client.unary_call(&METHOD_KVDB_DELETE, req, opt)\n\n }\n\n\n\n pub fn delete(&self, req: &super::kvserver::DeleteRequest) -> ::grpcio::Result<super::kvserver::DeleteResponse> {\n\n self.delete_opt(req, ::grpcio::CallOption::default())\n\n }\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver_grpc.rs", "rank": 95, "score": 14389.33575058787 }, { "content": "const METHOD_KVDB_GET: ::grpcio::Method<super::kvserver::GetRequest, super::kvserver::GetResponse> = ::grpcio::Method {\n\n ty: ::grpcio::MethodType::Unary,\n\n name: \"/kvserver.Kvdb/Get\",\n\n req_mar: ::grpcio::Marshaller { ser: ::grpcio::pb_ser, de: ::grpcio::pb_de },\n\n resp_mar: ::grpcio::Marshaller { ser: ::grpcio::pb_ser, de: ::grpcio::pb_de },\n\n};\n\n\n\nconst METHOD_KVDB_PUT: ::grpcio::Method<super::kvserver::PutRequest, super::kvserver::PutResponse> = ::grpcio::Method {\n\n ty: ::grpcio::MethodType::Unary,\n\n name: \"/kvserver.Kvdb/Put\",\n\n req_mar: ::grpcio::Marshaller { ser: ::grpcio::pb_ser, de: ::grpcio::pb_de },\n\n resp_mar: ::grpcio::Marshaller { ser: ::grpcio::pb_ser, de: ::grpcio::pb_de },\n\n};\n\n\n\nconst METHOD_KVDB_DELETE: ::grpcio::Method<super::kvserver::DeleteRequest, super::kvserver::DeleteResponse> = ::grpcio::Method {\n\n ty: ::grpcio::MethodType::Unary,\n\n name: \"/kvserver.Kvdb/Delete\",\n\n req_mar: ::grpcio::Marshaller { ser: ::grpcio::pb_ser, de: ::grpcio::pb_de },\n\n resp_mar: ::grpcio::Marshaller { ser: ::grpcio::pb_ser, de: ::grpcio::pb_de },\n\n};\n", "file_path": "kvserver/src/kvprotos/kvserver_grpc.rs", "rank": 96, "score": 14386.648521181189 }, { "content": "// This file is generated. Do not edit\n\n// @generated\n\n\n\n// https://github.com/Manishearth/rust-clippy/issues/702\n\n#![allow(unknown_lints)]\n\n#![allow(clippy::all)]\n\n\n\n#![cfg_attr(rustfmt, rustfmt_skip)]\n\n\n\n#![allow(box_pointers)]\n\n#![allow(dead_code)]\n\n#![allow(missing_docs)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(non_snake_case)]\n\n#![allow(non_upper_case_globals)]\n\n#![allow(trivial_casts)]\n\n#![allow(unsafe_code)]\n\n#![allow(unused_imports)]\n\n#![allow(unused_results)]\n\n\n", "file_path": "kvserver/src/kvprotos/kvserver_grpc.rs", "rank": 97, "score": 14377.07128898987 }, { "content": "extern crate protobuf;\n\nextern crate grpcio;\n\nextern crate futures;\n\n\n\npub mod kvprotos;\n\n// pub mod engine;\n", "file_path": "kvserver/src/lib.rs", "rank": 98, "score": 28.738373323658344 } ]
Rust
src/main.rs
rodneylab/cmessless
7ba78963aea19b3618c60215a55745207195960f
mod parser; mod utility; use atty::{is, Stream}; use clap::Parser; use std::{ fs, io::{self, BufRead}, path::{Path, PathBuf}, }; use watchexec::{ config::{Config, ConfigBuilder}, error::Result, pathop::PathOp, run::{watch, ExecHandler, Handler}, }; use parser::{author_name_from_cargo_pkg_authors, parse_mdx_file}; #[derive(Parser)] #[clap(author,version,about,long_about=None)] struct Cli { path: Vec<PathBuf>, #[clap(short, long)] check: bool, #[clap(short, long)] modified: bool, #[clap(short = 'R', long)] relative: bool, #[clap(short, long)] verbose: bool, #[clap(short = 'V', long)] version: bool, #[clap(short, long)] watch: bool, #[clap(parse(from_os_str))] #[clap(short, long)] output: std::path::PathBuf, } fn get_title() -> String { let mut the_title = String::from(env!("CARGO_PKG_NAME")); the_title.push_str(" (v"); the_title.push_str(env!("CARGO_PKG_VERSION")); the_title.push_str("), "); the_title.push_str(env!("CARGO_PKG_DESCRIPTION")); the_title } fn print_short_banner() { println!("{}", get_title()); } fn print_long_banner() { print_short_banner(); println!( "Written by: {}", author_name_from_cargo_pkg_authors().trim() ); println!("Repo: {}", env!("CARGO_PKG_REPOSITORY")); println!("Usage: {} <somefile>.mdx", env!("CARGO_PKG_NAME")); println!(" {} --watch <somefile>.mdx", env!("CARGO_PKG_NAME")); } struct CmslessHandler(ExecHandler); impl Handler for CmslessHandler { fn args(&self) -> Config { self.0.args() } fn on_manual(&self) -> Result<bool> { println!("[ INFO ] Running manually..."); self.0.on_manual() } fn on_update(&self, ops: &[PathOp]) -> Result<bool> { println!("[ INFO ] Running manually {:?}...", ops); self.0.on_update(ops) } } fn parse_then_watch(mdx_paths: &[PathBuf], output_path: &Path, verbose: bool) -> Result<()> { let output_path_str = output_path.to_string_lossy(); let mut command: Vec<String> = vec!["cmessless".into()]; command.extend(mdx_paths.iter().map(|value| value.to_string_lossy().into())); command.push("--check --output".into()); command.push(output_path.to_string_lossy().into()); command.push("| cmessless --relative".into()); if verbose { command.push("--verbose".into()); } command.push(" --output".into()); command.push(output_path_str.into()); let config = ConfigBuilder::default() .clear_screen(true) .run_initially(true) .paths(mdx_paths) .cmd(command) .build() .expect("[ ERROR ] Issue while configuring watchexec"); let handler = CmslessHandler( ExecHandler::new(config).expect("[ ERROR ] Issue while creating watchexec handler"), ); watch(&handler) } fn relative_output_path_from_input(input_path: &Path, relative_output_path: &Path) -> PathBuf { match input_path.to_string_lossy().find("/./") { Some(_) => {} None => panic!( "[ ERROR ] Using relative mode: check input paths include a \"/./\" marker to separate \ root and relative parts." ), } let mut components = input_path.components(); loop { if components.as_path().to_string_lossy().find("/./") == None { break; } components.next(); } let mut result = PathBuf::new(); result.push(relative_output_path); let tail = components.as_path(); let mut output_file_name = String::from(tail.file_stem().unwrap().to_string_lossy()); output_file_name.push_str(".astro"); relative_output_path .join(tail.parent().unwrap()) .join(PathBuf::from(output_file_name)) } fn check_modified_files(mdx_paths: &[PathBuf], relative_output_path: &Path) { let mut modified_files: Vec<String> = Vec::new(); for input_path in mdx_paths { let output_path = relative_output_path_from_input(input_path.as_path(), relative_output_path); let input_modified = match fs::metadata(input_path).unwrap().modified() { Ok(value) => Some(value), Err(_) => None, }; let output_modified = match fs::metadata(output_path) { Ok(metadata_value) => match metadata_value.modified() { Ok(value) => Some(value), Err(_) => None, }, Err(_) => None, }; if output_modified == None || input_modified == None || input_modified.unwrap() > output_modified.unwrap() { modified_files.push(String::from(input_path.to_string_lossy())); } } println!("{}", modified_files.join(" ")); } fn parse_multiple_files(mdx_paths: &[PathBuf], relative_output_path: &Path, verbose: bool) { for input_path in mdx_paths { let output_path = relative_output_path_from_input(input_path.as_path(), relative_output_path); parse_mdx_file(input_path.as_path(), output_path.as_path(), verbose); } println!("\n[ INFO ] {} files parsed.", mdx_paths.len()); } fn get_piped_input() -> Vec<PathBuf> { let mut buffer = String::new(); let stdin = io::stdin(); let mut handle = stdin.lock(); handle.read_line(&mut buffer).unwrap_or(0); let result = buffer[..buffer.len() - 1] .split(' ') .map(PathBuf::from) .collect(); result } fn main() -> Result<()> { let cli = &Cli::parse(); if cli.check { check_modified_files(&cli.path, &cli.output); return Ok(()); } let inputs = if is(Stream::Stdin) { cli.path.to_vec() } else { get_piped_input() }; if inputs.is_empty() { return Ok(()); } if cli.verbose { print_long_banner(); } else { print_short_banner(); } if cli.version { println!("{}", get_title()); return Ok(()); } if cli.watch { return parse_then_watch(&inputs, cli.output.as_path(), cli.verbose); } else if cli.path.len() > 1 { if !cli.relative { println!( "\n[ ERROR ] for multiple inputs, use the --relative flag to set a relative output path." ); } parse_multiple_files(&inputs, &cli.output, cli.verbose); } else if cli.relative { let output_path = relative_output_path_from_input(inputs[0].as_path(), &cli.output); parse_mdx_file(inputs[0].as_path(), &output_path, cli.verbose); } else { parse_mdx_file(inputs[0].as_path(), cli.output.as_path(), cli.verbose); } Ok(()) } #[cfg(test)] mod tests { use crate::relative_output_path_from_input; use std::path::PathBuf; #[test] pub fn test_relative_output_path_from_input() { let input_path = PathBuf::from("local/files/input/./day-one/morning.txt"); let relative_output_path = PathBuf::from("local/files/output"); assert_eq!( relative_output_path_from_input(input_path.as_path(), relative_output_path.as_path()), PathBuf::from("local/files/output/day-one/morning.astro") ); } #[test] #[should_panic( expected = "[ ERROR ] Using relative mode: check input paths include a \"/./\" \ marker to separate root and relative parts." )] pub fn test_relative_output_path_from_input_panic() { let input_path = PathBuf::from("local/files/input/day-one/morning.mdx"); let relative_output_path = PathBuf::from("local/files/output"); assert_eq!( relative_output_path_from_input(input_path.as_path(), relative_output_path.as_path()), PathBuf::from("local/files/output/day-one/morning.astro") ); } }
mod parser; mod utility; use atty::{is, Stream}; use clap::Parser; use std::{ fs, io::{self, BufRead}, path::{Path, PathBuf}, }; use watchexec::{ config::{Config, ConfigBuilder}, error::Result, pathop::PathOp, run::{watch, ExecHandler, Handler}, }; use parser::{author_name_from_cargo_pkg_authors, parse_mdx_file}; #[derive(Parser)] #[clap(author,version,about,long_about=None)] struct Cli { path: Vec<PathBuf>, #[clap(short, long)] check: bool, #[clap(short, long)] modified: bool, #[clap(short = 'R', long)] relative: bool, #[clap(short, long)] verbose: bool, #[clap(short = 'V', long)] version: bool, #[clap(short, long)] watch: bool, #[clap(parse(from_os_str))] #[clap(short, long)] output: std::path::PathBuf, } fn get_title() -> String { let mut the_title = String::from(env!("CARGO_PKG_NAME")); the_title.push_str(" (v"); the_title.push_str(env!("CARGO_PKG_VERSION")); the_title.push_str("), "); the_title.push_str(env!("CARGO_PKG_DESCRIPTION")); the_title } fn print_short_banner() { println!("{}", get_title()); } fn print_long_banner() { print_short_banner(); println!( "Written by: {}", author_name_from_cargo_pkg_authors().trim() ); println!("Repo: {}", env!("CARGO_PKG_REPOSITORY")); println!("Usage: {} <somefile>.mdx", env!("CARGO_PKG_NAME")); println!(" {} --watch <somefile>.mdx", env!("CARGO_PKG_NAME")); } struct CmslessHandler(ExecHandler); impl Handler for CmslessHandler { fn args(&self) -> Config { self.0.args() } fn on_manual(&self) -> Result<bool> { println!("[ INFO ] Running manually..."); self.0.on_manual() } fn on_update(&self, ops: &[PathOp]) -> Result<bool> { println!("[ INFO ] Running manually {:?}...", ops); self.0.on_update(ops) } } fn parse_then_watch(mdx_paths: &[PathBuf], output_path: &Path, verbose: bool) -> Result<()> { let output_path_str = output_path.to_string_lossy(); let mut command: Vec<String> = vec!["cmessless".into()]; command.extend(mdx_paths.iter().map(|value| value.to_string_lossy().into())); command.push("--check --output".into()); command.push(output_path.to_string_lossy().into()); command.push("| cmessless --relative".into()); if verbose { command.push("--verbose".into()); } command.push(" --output".into()); command.push(output_path_str.into()); let config = ConfigBuilder::default() .clear_screen(true) .run_initially(true) .paths(mdx_paths) .cmd(command) .build() .expect("[ ERROR ] Issue while configuring watchexec"); let handler = CmslessHandler( ExecHandler::new(config).expect("[ ERROR ] Issue while creating watchexec handler"), ); watch(&handler) } fn relative_output_path_from_input(input_path: &Path, relative_output_path: &Path) -> PathBuf { match input_path.to_string_lossy().find("/./") { Some(_) => {} None => panic!( "[ ERROR ] Using relative mode: check input paths include a \"/./\" marker to separate \ root and relative parts." ), } let mut components = input_path.components(); loop { if components.as_path().to_string_lossy().find("/./") == None { break; } components.next(); } let mut result = PathBuf::new(); result.push(relative_output_path); let tail = components.as_path(); let mut output_file_name = String::from(tail.file_stem().unwrap().to_string_lossy()); output_file_name.push_str(".astro"); relative_output_path .join(tail.parent().unwrap()) .join(PathBuf::from(output_file_name)) } fn check_modified_files(mdx_paths: &[PathBuf], relative_output_path: &Path) { let mut modified_files: Vec<String> = Vec::new(); for input_path in mdx_paths { let output_path = relative_output_path_from_input(input_path.as_path(), relative_output_path); let input_modified = match fs::metadata(input_path).unwrap().modified() { Ok(value) => Some(value), Err(_) => None, }; let output_modified =
; if output_modified == None || input_modified == None || input_modified.unwrap() > output_modified.unwrap() { modified_files.push(String::from(input_path.to_string_lossy())); } } println!("{}", modified_files.join(" ")); } fn parse_multiple_files(mdx_paths: &[PathBuf], relative_output_path: &Path, verbose: bool) { for input_path in mdx_paths { let output_path = relative_output_path_from_input(input_path.as_path(), relative_output_path); parse_mdx_file(input_path.as_path(), output_path.as_path(), verbose); } println!("\n[ INFO ] {} files parsed.", mdx_paths.len()); } fn get_piped_input() -> Vec<PathBuf> { let mut buffer = String::new(); let stdin = io::stdin(); let mut handle = stdin.lock(); handle.read_line(&mut buffer).unwrap_or(0); let result = buffer[..buffer.len() - 1] .split(' ') .map(PathBuf::from) .collect(); result } fn main() -> Result<()> { let cli = &Cli::parse(); if cli.check { check_modified_files(&cli.path, &cli.output); return Ok(()); } let inputs = if is(Stream::Stdin) { cli.path.to_vec() } else { get_piped_input() }; if inputs.is_empty() { return Ok(()); } if cli.verbose { print_long_banner(); } else { print_short_banner(); } if cli.version { println!("{}", get_title()); return Ok(()); } if cli.watch { return parse_then_watch(&inputs, cli.output.as_path(), cli.verbose); } else if cli.path.len() > 1 { if !cli.relative { println!( "\n[ ERROR ] for multiple inputs, use the --relative flag to set a relative output path." ); } parse_multiple_files(&inputs, &cli.output, cli.verbose); } else if cli.relative { let output_path = relative_output_path_from_input(inputs[0].as_path(), &cli.output); parse_mdx_file(inputs[0].as_path(), &output_path, cli.verbose); } else { parse_mdx_file(inputs[0].as_path(), cli.output.as_path(), cli.verbose); } Ok(()) } #[cfg(test)] mod tests { use crate::relative_output_path_from_input; use std::path::PathBuf; #[test] pub fn test_relative_output_path_from_input() { let input_path = PathBuf::from("local/files/input/./day-one/morning.txt"); let relative_output_path = PathBuf::from("local/files/output"); assert_eq!( relative_output_path_from_input(input_path.as_path(), relative_output_path.as_path()), PathBuf::from("local/files/output/day-one/morning.astro") ); } #[test] #[should_panic( expected = "[ ERROR ] Using relative mode: check input paths include a \"/./\" \ marker to separate root and relative parts." )] pub fn test_relative_output_path_from_input_panic() { let input_path = PathBuf::from("local/files/input/day-one/morning.mdx"); let relative_output_path = PathBuf::from("local/files/output"); assert_eq!( relative_output_path_from_input(input_path.as_path(), relative_output_path.as_path()), PathBuf::from("local/files/output/day-one/morning.astro") ); } }
match fs::metadata(output_path) { Ok(metadata_value) => match metadata_value.modified() { Ok(value) => Some(value), Err(_) => None, }, Err(_) => None, }
if_condition
[ { "content": "pub fn parse_mdx_file(input_path: &Path, output_path: &Path, verbose: bool) {\n\n println!(\"[ INFO ] Parsing {:?}...\", input_path);\n\n let start = Instant::now();\n\n\n\n let file = File::open(input_path).expect(\"[ ERROR ] Couldn't open that file!\");\n\n let frontmatter_end_line_number = parse_frontmatter(&file);\n\n let file = File::open(input_path).expect(\"[ ERROR ] Couldn't open that file!\");\n\n\n\n let slug = slug_from_input_file_path(input_path);\n\n let mut tokens: Vec<String> = Vec::new();\n\n let reader = BufReader::new(&file);\n\n\n\n let mut current_indentation: usize = 0;\n\n let mut open_lists = Stack::new();\n\n\n\n // used to keep a track of open blocks\n\n // let mut open_jsx_component_type: Stack<JSXComponentType> = Stack::new();\n\n let mut open_jsx_component_register = JSXComponentRegister::new();\n\n let mut open_html_block_element_stack: Stack<HTMLBlockElementType> = Stack::new();\n\n let mut open_markdown_block_stack: Stack<MarkdownBlock> = Stack::new();\n", "file_path": "src/parser/mod.rs", "rank": 1, "score": 238464.0586359909 }, { "content": "fn form_code_span_html_string(input: &str) -> String {\n\n match segment_code_span_line(input) {\n\n Ok((_, (initial_segment, code_segment, final_segment))) => {\n\n format!(\n\n \"{initial_segment}<code>{code_segment}</code>{}\",\n\n form_code_span_html_string(final_segment)\n\n )\n\n }\n\n Err(_) => String::from(input),\n\n }\n\n}\n\n\n\n/* if the last word of the title is shorter than 6 characters, replaces the last space with a\n\n * non-breaking space\n\n */\n", "file_path": "src/parser/mod.rs", "rank": 5, "score": 170122.6027899622 }, { "content": "pub fn slug_from_input_file_path(path: &Path) -> &str {\n\n match path\n\n .file_stem()\n\n .expect(\"[ ERROR ] Couldn't open that file!\")\n\n .to_str()\n\n {\n\n Some(value) => match value {\n\n \"index\" => path\n\n .parent()\n\n .expect(\"[ ERROR ] Couldn't open that file!\")\n\n .file_name()\n\n .expect(\"[ ERROR ] Couldn't open that file!\")\n\n .to_str()\n\n .expect(\"[ ERROR ] Couldn't open that file!\"),\n\n other => other,\n\n },\n\n None => panic!(\"[ ERROR ] Couldn't open that file!\"),\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 6, "score": 159573.0170263049 }, { "content": "pub fn form_image_component(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let component_identifier = \"Image\";\n\n let (_, attributes) = parse_jsx_component(line, component_identifier)?;\n\n Ok((\"\", (format!(\"<Image{attributes}/>\"), LineType::Image, 0)))\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 7, "score": 130870.6088364363 }, { "content": "pub fn form_questions_component(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let component_identifier = \"Questions\";\n\n let (_, attributes) = parse_jsx_component(line, component_identifier)?;\n\n Ok((\n\n \"\",\n\n (format!(\"<Questions{attributes}/>\"), LineType::Questions, 0),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 8, "score": 130870.6088364363 }, { "content": "pub fn form_tweet_component(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let component_identifier = \"Tweet\";\n\n let (_, attributes) = parse_jsx_component(line, component_identifier)?;\n\n Ok((\"\", (format!(\"<Tweet{attributes}/>\"), LineType::Tweet, 0)))\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 9, "score": 130870.6088364363 }, { "content": "fn form_emphasis_line(line: &str) -> IResult<&str, String> {\n\n let (_, (initial_segment, bold_segment, final_segment)) = segment_emphasis_line(line)?;\n\n Ok((\n\n final_segment,\n\n format!(\"{initial_segment}<em>{bold_segment}</em>\"),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 10, "score": 130661.54417148612 }, { "content": "fn form_poll_component_last_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let component_identifier = \"Poll\";\n\n let (final_segment, initial_segment) =\n\n parse_jsx_component_last_line(line, component_identifier)?;\n\n Ok((\n\n \"\",\n\n (\n\n format!(\"{initial_segment}{final_segment}\"),\n\n LineType::Poll,\n\n 0,\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 11, "score": 129761.988919014 }, { "content": "// assumed tag is already open\n\npub fn form_how_to_component_last_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (remaining_line, (markup, tag_type, indentation)) =\n\n form_jsx_component_last_line(line, \"HowTo\")?;\n\n match tag_type {\n\n HTMLTagType::Closing => Ok((remaining_line, (markup, LineType::HowTo, indentation))),\n\n HTMLTagType::Opening | HTMLTagType::OpeningStart | HTMLTagType::SelfClosing => {\n\n Ok((remaining_line, (markup, LineType::HowToOpen, indentation)))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 12, "score": 128022.71394850512 }, { "content": "// handles the continuation of an opening tag\n\npub fn form_how_to_component_opening_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (remaining_line, (markup, tag_type, indentation)) = form_jsx_component_opening_line(line)?;\n\n match tag_type {\n\n HTMLTagType::Opening | HTMLTagType::SelfClosing => {\n\n Ok((remaining_line, (markup, LineType::HowToOpen, indentation)))\n\n }\n\n _ => Ok((\n\n \"\",\n\n (String::from(line), LineType::HowToOpening, indentation),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 13, "score": 128022.7139485051 }, { "content": "pub fn form_how_to_component_first_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (remaining_line, (markup, tag_type, indentation)) =\n\n form_jsx_component_first_line(line, \"HowTo\")?;\n\n match tag_type {\n\n HTMLTagType::Opening => Ok((remaining_line, (markup, LineType::HowToOpen, indentation))),\n\n HTMLTagType::OpeningStart => Ok((\"\", (markup, LineType::HowToOpening, indentation))),\n\n HTMLTagType::SelfClosing => Ok((remaining_line, (markup, LineType::HowTo, indentation))),\n\n HTMLTagType::Closing => Err(Err::Error(Error::new(line, ErrorKind::Tag))),\n\n }\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 14, "score": 128022.7139485051 }, { "content": "fn form_strong_emphasis_line(line: &str) -> IResult<&str, String> {\n\n let (_, (initial_segment, bold_segment, final_segment)) = segment_strong_emphasis_line(line)?;\n\n match form_code_span_line(bold_segment) {\n\n Ok((_, code_segment)) => Ok((\n\n final_segment,\n\n format!(\"{initial_segment}<strong>{code_segment}</strong>\"),\n\n )),\n\n Err(_) => Ok((\n\n final_segment,\n\n format!(\"{initial_segment}<strong>{bold_segment}</strong>\"),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 15, "score": 127556.54321451252 }, { "content": "fn parse_inline_wrap_text(line: &str) -> IResult<&str, String> {\n\n fn is_wrap_tag(c: char) -> bool {\n\n c == '`' || c == '*' || c == '<'\n\n }\n\n\n\n let first_tag = line.find(is_wrap_tag);\n\n if let Some(first_tag) = first_tag {\n\n let line_from_tag = &line[first_tag..];\n\n let parsed_result = match &line_from_tag[0..1] {\n\n \"`\" => form_code_span_line(line_from_tag),\n\n \"<\" => form_html_anchor_element_line(line_from_tag),\n\n \"*\" => alt((form_strong_emphasis_line, form_emphasis_line))(line_from_tag),\n\n _ => return Ok((\"\", line.to_string())),\n\n };\n\n let (initial_segment, final_segment) = match parsed_result {\n\n Ok((value_1, value_2)) => (value_2, value_1),\n\n Err(_) => return Ok((\"\", line.to_string())),\n\n };\n\n let (_, final_final_segment) = parse_inline_wrap_text(final_segment)?;\n\n let line_before_tag = &line[..first_tag];\n\n Ok((\n\n \"\",\n\n format!(\"{line_before_tag}{initial_segment}{final_final_segment}\"),\n\n ))\n\n } else {\n\n Ok((\"\", line.to_string()))\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 16, "score": 127556.54321451254 }, { "content": "fn form_code_span_line(line: &str) -> IResult<&str, String> {\n\n let (_, (initial_segment, code_segment, final_segment)) = segment_code_span_line(line)?;\n\n Ok((\n\n final_segment,\n\n format!(\n\n \"{initial_segment}<InlineCodeFragment code={{`{}`}} />\",\n\n escape_code(code_segment)\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 17, "score": 127556.54321451252 }, { "content": "// assumed tag is opened in earlier line and this has been recognised\n\nfn form_jsx_component_opening_line(line: &str) -> IResult<&str, (String, HTMLTagType, usize)> {\n\n let (remaining_line, (_attributes, tag_type)) =\n\n alt((parse_self_closing_html_tag_end, parse_opening_html_tag_end))(line)?;\n\n match tag_type {\n\n HTMLTagType::Opening | HTMLTagType::SelfClosing => {\n\n Ok((remaining_line, (line.to_string(), tag_type, 0)))\n\n }\n\n HTMLTagType::OpeningStart | HTMLTagType::Closing => {\n\n Err(Err::Error(Error::new(line, ErrorKind::Tag)))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 19, "score": 127066.68630339351 }, { "content": "fn form_code_fragment_component_last_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let component_identifier = \"CodeFragment\";\n\n let (final_segment, initial_segment) =\n\n parse_jsx_component_last_line(line, component_identifier)?;\n\n Ok((\n\n \"\",\n\n (\n\n format!(\"{initial_segment}{final_segment}\"),\n\n LineType::CodeFragment,\n\n 0,\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 20, "score": 127066.68630339351 }, { "content": "pub fn form_video_component_first_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let component_identifier = \"Video\";\n\n let (_, (__parsed_value_, jsx_tag_type)) =\n\n parse_jsx_component_first_line(line, component_identifier)?;\n\n match jsx_tag_type {\n\n JSXTagType::Closed => Ok((\"\", (line.to_string(), LineType::VideoOpen, 0))),\n\n JSXTagType::Opened => Ok((\"\", (line.to_string(), LineType::VideoOpening, 0))),\n\n JSXTagType::SelfClosed => Ok((\"\", (line.to_string(), LineType::Video, 0))),\n\n }\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 21, "score": 125327.41133288461 }, { "content": "pub fn form_poll_component_opening_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (_, line_type) = alt((\n\n map(terminated(take_until(\"/>\"), tag(\"/>\")), |_| LineType::Poll),\n\n map(terminated(take_until(\">\"), tag(\">\")), |_| {\n\n LineType::PollOpen\n\n }),\n\n map(rest, |_| LineType::PollOpening),\n\n ))(line)?;\n\n Ok((\"\", (line.to_string(), line_type, 0)))\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 22, "score": 125327.41133288461 }, { "content": "pub fn form_video_component_last_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let component_identifier = \"Video\";\n\n let (final_segment, initial_segment) =\n\n parse_jsx_component_last_line(line, component_identifier)?;\n\n Ok((\n\n \"\",\n\n (\n\n format!(\"{initial_segment}{final_segment}\"),\n\n LineType::Video,\n\n 0,\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 23, "score": 125327.41133288461 }, { "content": "pub fn form_video_component_opening_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (remaining_line, (markup, tag_type, indentation)) = form_jsx_component_opening_line(line)?;\n\n match tag_type {\n\n HTMLTagType::Opening => Ok((remaining_line, (markup, LineType::VideoOpen, indentation))),\n\n HTMLTagType::SelfClosing => Ok((remaining_line, (markup, LineType::Video, indentation))),\n\n _ => Ok((\n\n \"\",\n\n (String::from(line), LineType::VideoOpening, indentation),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 24, "score": 125327.41133288463 }, { "content": "pub fn form_poll_component_first_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let component_identifier = \"Poll\";\n\n let (_, (_parsed_value, jsx_tag_type)) =\n\n parse_jsx_component_first_line(line, component_identifier)?;\n\n match jsx_tag_type {\n\n JSXTagType::Closed => Ok((\"\", (line.to_string(), LineType::PollOpen, 0))),\n\n JSXTagType::Opened => Ok((\"\", (line.to_string(), LineType::PollOpening, 0))),\n\n JSXTagType::SelfClosed => Ok((\"\", (line.to_string(), LineType::Poll, 0))),\n\n }\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 25, "score": 125327.41133288461 }, { "content": "fn form_html_anchor_element_line(line: &str) -> IResult<&str, String> {\n\n let (_, (initial_segment, anchor_attributes_segment, final_segment)) = alt((\n\n segment_anchor_element_with_attributes_line,\n\n segment_anchor_element_no_attributes_line,\n\n ))(line)?;\n\n let (_, attributes_vector) = parse_html_tag_attributes(anchor_attributes_segment)?;\n\n let (remaining_line, link_content) = take_until(\"</a>\")(final_segment)?;\n\n\n\n let attributes_hash_map: HashMap<&str, &str> = attributes_vector.into_iter().collect();\n\n let href = attributes_hash_map\n\n .get(\"href\")\n\n .unwrap_or_else(|| panic!(\"[ ERROR ] Anchor tag missing href: {line}\"));\n\n let external_site = parse_href_scheme(href).is_ok();\n\n let mut additional_attributes = String::new();\n\n\n\n if external_site {\n\n if !attributes_hash_map.contains_key(\"target\") {\n\n additional_attributes.push_str(\" target=\\\"_blank\\\"\");\n\n }\n\n if !attributes_hash_map.contains_key(\"rel\") {\n", "file_path": "src/parser/mod.rs", "rank": 26, "score": 124643.64265583262 }, { "content": "fn form_astro_frontmatter(components: &HashSet<JSXComponentType>, slug: &str) -> Vec<String> {\n\n let mut result: Vec<String> = Vec::new();\n\n let mut define_slug = false;\n\n let mut image_data_imports: Vec<String> = Vec::new();\n\n\n\n result.push(String::from(\"---\"));\n\n if components.contains(&JSXComponentType::CodeFragment) {\n\n result.push(String::from(\n\n \"import CodeFragment from '$components/CodeFragment.tsx';\",\n\n ));\n\n }\n\n result.push(String::from(\n\n \"import Heading from '$components/Heading.svelte';\",\n\n ));\n\n if components.contains(&JSXComponentType::HowTo) {\n\n define_slug = true;\n\n result.push(String::from(\n\n \"import HowTo from '$components/HowTo/index.svelte';\n\nimport HowToSection from '$components/HowTo/HowToSection.svelte';\n\nimport HowToStep from '$components/HowTo/HowToStep.svelte';\n", "file_path": "src/parser/mod.rs", "rank": 27, "score": 122578.31254397359 }, { "content": "fn escape_code(line: &str) -> String {\n\n line.replace('<', \"\\\\u003C\")\n\n .replace('>', \"\\\\u003E\")\n\n .replace('`', \"\\\\u0060\")\n\n .replace('{', \"\\\\u007B\")\n\n .replace('}', \"\\\\u007D\")\n\n .replace(\"import.\", \"import..\")\n\n .replace(\"process.env\", \"process..env\")\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 28, "score": 122387.31784582206 }, { "content": "fn slugify_title(title: &str) -> String {\n\n match remove_html_tags(title) {\n\n Ok((final_value, initial_value)) => format!(\n\n \"{}{}\",\n\n slugify_title(initial_value),\n\n slugify_title(final_value)\n\n ),\n\n Err(_) => {\n\n let deunicoded_title = deunicode(title);\n\n let mut result = String::with_capacity(deunicoded_title.len());\n\n let mut last_was_replaced = true;\n\n let remove_characters = \"?'`:[]()\";\n\n let replace_characters = \" -/.,\"; // include '-' here to avoid \"--\" in result\n\n for chars in deunicoded_title.chars() {\n\n if replace_characters.contains(chars) {\n\n if !last_was_replaced {\n\n last_was_replaced = true;\n\n result.push('-');\n\n }\n\n } else if !remove_characters.contains(chars) {\n\n last_was_replaced = false;\n\n result.push_str(&chars.to_lowercase().to_string());\n\n }\n\n }\n\n result\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 29, "score": 122387.31784582206 }, { "content": "fn format_heading_widows(heading: &str) -> String {\n\n match heading.rsplit_once(' ') {\n\n Some((before_space, after_space)) => {\n\n if after_space.len() < 5 {\n\n format!(\n\n \"{}\\\\u00a0{}\",\n\n format_heading(before_space),\n\n format_heading(after_space)\n\n )\n\n } else {\n\n format_heading(heading).to_string()\n\n }\n\n }\n\n None => format_heading(heading).to_string(),\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 30, "score": 119446.00619971853 }, { "content": "fn form_heading_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (value, level) = parse_heading_text(line)?;\n\n let parsed_text = form_code_span_html_string(value);\n\n let id = slugify_title(value);\n\n Ok((\n\n \"\",\n\n (\n\n format!(\n\n \"<h{level} id=\\\"{id}\\\"><Heading id=\\\"{id}\\\" text=\\\"{}\\\"/></h{level}>\",\n\n format_heading_widows(parsed_text.trim_end())\n\n ),\n\n LineType::Heading,\n\n level,\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 31, "score": 117668.74009778834 }, { "content": "// special row between head and body with alignment markers\n\nfn form_table_header_row(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n parse_table_header_row(line)?;\n\n Ok((\n\n \"\",\n\n (\n\n String::from(\" </thead>\\n <tbody>\"),\n\n LineType::HTMLTableBodyOpen,\n\n 0,\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 33, "score": 115093.40698795478 }, { "content": "fn form_ordered_list_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (list_text, (indentation, _start)) = parse_ordered_list_text(line)?;\n\n let (_, parsed_list_text) = parse_inline_wrap_text(list_text)?;\n\n Ok((\n\n \"\",\n\n (\n\n format!(\" <li>{parsed_list_text}\"),\n\n LineType::OrderedListItemOpen,\n\n indentation,\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 34, "score": 115090.17817563242 }, { "content": "fn form_inline_wrap_text(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (_, parsed_line) = parse_inline_wrap_text(line)?;\n\n if !parsed_line.is_empty() {\n\n Ok((\n\n \"\",\n\n (format!(\"<p>{parsed_line}</p>\"), LineType::Paragraph, 0),\n\n ))\n\n } else {\n\n Ok((\"\", (String::new(), LineType::Paragraph, 0)))\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 35, "score": 115090.17817563243 }, { "content": "fn form_table_body_row(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (_, cells) = parse_table_line(line)?;\n\n let mut markup = String::from(\" <tr>\");\n\n for cell in cells {\n\n markup.push_str(\"\\n <td>\");\n\n markup.push_str(cell.trim_end());\n\n markup.push_str(\"</td>\");\n\n }\n\n markup.push_str(\"\\n </tr>\");\n\n Ok((\"\", (markup, LineType::HTMLTableBodyOpen, 0)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 36, "score": 115090.17817563242 }, { "content": "// regular row in table head\n\nfn form_table_head_row(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (_, cells) = parse_table_line(line)?;\n\n let mut markup = String::from(\" <tr>\");\n\n for cell in cells {\n\n markup.push_str(\"\\n <th scope=\\\"col\\\">\");\n\n markup.push_str(cell);\n\n markup.push_str(\"</th>\");\n\n }\n\n markup.push_str(\"\\n </tr>\");\n\n Ok((\"\", (markup, LineType::HTMLTableHeadOpen, 0)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 37, "score": 115090.17817563242 }, { "content": "fn form_unordered_list_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (list_text, indentation) = parse_unordered_list_text(line)?;\n\n let (_, parsed_list_text) = parse_inline_wrap_text(list_text)?;\n\n Ok((\n\n \"\",\n\n (\n\n format!(\"<li>\\n {parsed_list_text}\\n</li>\"),\n\n LineType::UnorderedListItem,\n\n indentation,\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 38, "score": 115090.17817563242 }, { "content": "fn form_table_body_last_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n match form_table_body_row(line) {\n\n Ok(value) => Ok(value),\n\n Err(_) => Ok((\n\n \"\",\n\n (\n\n String::from(\" </tbody>\\n</table>\"),\n\n LineType::HTMLTableBody,\n\n 0,\n\n ),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 39, "score": 112657.59935040792 }, { "content": "// optimistically try to end the head section or alternatively add additional head line\n\nfn form_table_head_last_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n alt((form_table_header_row, form_table_head_row))(line)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 40, "score": 112657.59935040792 }, { "content": "fn form_table_head_first_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (_, (row_body, line_type, indentation)) = form_table_head_row(line)?;\n\n let markup = String::from(\"<table>\\n <thead>\");\n\n Ok((\n\n \"\",\n\n (format!(\"{markup}\\n{row_body}\"), line_type, indentation),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 41, "score": 112657.59935040792 }, { "content": "fn form_ordered_list_first_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (list_text, (indentation, start)) = parse_ordered_list_text(line)?;\n\n let (_, parsed_list_text) = parse_inline_wrap_text(list_text)?;\n\n let markup = match start {\n\n \"1\" => format!(\"<ol>\\n <li>{parsed_list_text}\"),\n\n _ => format!(\"<ol start=\\\"{start}\\\">\\n <li>{parsed_list_text}\"),\n\n };\n\n Ok((\"\", (markup, LineType::OrderedListItemOpen, indentation)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 42, "score": 112657.59935040792 }, { "content": "fn form_fenced_code_block_last_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (_final_segment, _initial_segment) = parse_fenced_code_block_last_line(line)?;\n\n Ok((\"\", (String::from(\" `} />\"), LineType::FencedCodeBlock, 0)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 43, "score": 110358.9478226604 }, { "content": "fn form_html_block_element_last_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (_remaining_line, (tag_name, _tag_attributes, _tag_type)) = parse_closing_html_tag(line)?;\n\n match tag_name {\n\n \"dl\" => Ok((\"\", (String::from(line), LineType::HTMLDescriptionList, 0))),\n\n \"figure\" => Ok((\"\", (String::from(line), LineType::HTMLFigureBlock, 0))),\n\n _ => Err(Err::Error(Error::new(line, ErrorKind::Tag))),\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 44, "score": 110358.9478226604 }, { "content": "fn form_html_block_element_first_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (_remaining_line, (tag_name, _tag_attributes, _tag_type)) = parse_opening_html_tag(line)?;\n\n match tag_name {\n\n \"dl\" => Ok((\n\n \"\",\n\n (String::from(line), LineType::HTMLDescriptionListOpen, 0),\n\n )),\n\n \"figure\" => Ok((\"\", (String::from(line), LineType::HTMLFigureBlockOpen, 0))),\n\n _ => panic!(\"[ ERROR ] Unrecognised HTML block element: {tag_name}\"),\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 45, "score": 110358.9478226604 }, { "content": "fn form_fenced_code_block_first_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n let (\n\n _,\n\n (\n\n language_option,\n\n first_line_number_option,\n\n highlight_line_numbers_option,\n\n title_option,\n\n caption_option,\n\n collapse_option,\n\n ),\n\n ) = parse_fenced_code_block_first_line(line)?;\n\n\n\n let mut markup = String::from(\"<CodeFragment\\n client:visible\\n set:html\");\n\n if let Some(value) = language_option {\n\n markup.push_str(\"\\n language=\\\"\");\n\n markup.push_str(value);\n\n markup.push('\\\"');\n\n };\n\n if let Some(value) = first_line_number_option {\n", "file_path": "src/parser/mod.rs", "rank": 46, "score": 110358.9478226604 }, { "content": "fn form_html_block_level_comment_last_line(line: &str) -> IResult<&str, (String, LineType, usize)> {\n\n match parse_html_block_level_comment_last_line(line) {\n\n Ok((after_comment, end_of_comment)) => {\n\n let (_, after_comment) = parse_inline_wrap_text(after_comment)?;\n\n let markup = format!(\"{end_of_comment}-->{}\", after_comment.trim_end());\n\n Ok((\"\", (markup, LineType::HTMLBlockLevelComment, 0)))\n\n }\n\n Err(_) => Ok((\n\n \"\",\n\n (\n\n line.trim_end().to_string(),\n\n LineType::HTMLBlockLevelCommentOpen,\n\n 0,\n\n ),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 47, "score": 108183.45970988156 }, { "content": "fn parse_jsx_component<'a>(\n\n line: &'a str,\n\n component_identifier: &'a str,\n\n) -> IResult<&'a str, &'a str> {\n\n let delimiter = &mut String::from(\"<\");\n\n delimiter.push_str(component_identifier);\n\n let result = delimited(tag(delimiter.as_str()), take_until(\"/>\"), tag(\"/>\"))(line);\n\n result\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 48, "score": 107101.96577004183 }, { "content": "fn parse_frontmatter_line(line: &str) -> (Option<String>, LineType) {\n\n match parse_frontmatter_delimiter(line) {\n\n Ok((_frontmatter_line, _)) => (None, LineType::FrontmatterDelimiter),\n\n Err(_) => (Some(String::from(line)), LineType::Frontmatter),\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 49, "score": 106071.18387070826 }, { "content": "fn parse_mdx_line(line: &str) -> Option<(String, LineType, usize)> {\n\n match alt((\n\n form_code_fragment_component_first_line,\n\n form_fenced_code_block_first_line,\n\n form_how_to_component_first_line,\n\n form_html_block_level_comment_first_line,\n\n form_html_block_element_first_line,\n\n form_table_head_first_line,\n\n form_image_component,\n\n form_poll_component_first_line,\n\n form_questions_component,\n\n form_tweet_component,\n\n form_gatsby_not_maintained_component,\n\n form_video_component_first_line,\n\n form_heading_line,\n\n form_ordered_list_first_line,\n\n form_unordered_list_line,\n\n form_inline_wrap_text,\n\n ))(line)\n\n {\n", "file_path": "src/parser/mod.rs", "rank": 50, "score": 101561.00926468345 }, { "content": "pub fn form_gatsby_not_maintained_component(\n\n line: &str,\n\n) -> IResult<&str, (String, LineType, usize)> {\n\n let component_identifier = \"GatsbyNotMaintained\";\n\n let (_, attributes) = parse_jsx_component(line, component_identifier)?;\n\n Ok((\n\n \"\",\n\n (\n\n format!(\"<GatsbyNotMaintained{attributes}/>\"),\n\n LineType::GatsbyNotMaintained,\n\n 0,\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 52, "score": 100963.79757283445 }, { "content": "fn parse_jsx_component_first_line<'a>(\n\n line: &'a str,\n\n component_identifier: &'a str,\n\n) -> IResult<&'a str, (&'a str, &'a JSXTagType)> {\n\n let left_delimiter = &mut String::from(\"<\");\n\n left_delimiter.push_str(component_identifier);\n\n let result = alt((\n\n value(\n\n (line, &JSXTagType::SelfClosed),\n\n delimited(tag(left_delimiter.as_str()), take_until(\"/>\"), tag(\"/>\")),\n\n ),\n\n value(\n\n (line, &JSXTagType::Closed),\n\n delimited(tag(left_delimiter.as_str()), take_until(\">\"), tag(\">\")),\n\n ),\n\n value(\n\n (line, &JSXTagType::Opened),\n\n preceded(tag(left_delimiter.as_str()), rest),\n\n ),\n\n ))(line)?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 53, "score": 100963.79757283445 }, { "content": "fn parse_jsx_component_last_line<'a>(\n\n line: &'a str,\n\n component_identifier: &'a str,\n\n) -> IResult<&'a str, &'a str> {\n\n let delimiter = &mut String::from(\"</\");\n\n delimiter.push_str(component_identifier);\n\n let result = tag(delimiter.as_str())(line);\n\n result\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 54, "score": 100963.79757283445 }, { "content": "fn form_jsx_component_last_line<'a>(\n\n line: &'a str,\n\n component_identifier: &'a str,\n\n) -> IResult<&'a str, (String, HTMLTagType, usize)> {\n\n let (_remaining_line, (component_name, _attributes, tag_type)) = parse_closing_html_tag(line)?;\n\n all_consuming(tag(component_identifier))(component_name)?; // check names match\n\n match tag_type {\n\n HTMLTagType::Closing => Ok((_remaining_line, (line.to_string(), tag_type, 0))),\n\n HTMLTagType::Opening | HTMLTagType::OpeningStart | HTMLTagType::SelfClosing => {\n\n Err(Err::Error(Error::new(line, ErrorKind::Tag)))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 55, "score": 100963.79757283445 }, { "content": "fn form_jsx_component_first_line<'a>(\n\n line: &'a str,\n\n component_identifier: &'a str,\n\n) -> IResult<&'a str, (String, HTMLTagType, usize)> {\n\n let (remaining_line, (component_name, _attributes, tag_type)) = alt((\n\n parse_self_closing_html_tag,\n\n parse_opening_html_tag,\n\n parse_opening_html_tag_start,\n\n ))(line)?;\n\n all_consuming(tag(component_identifier))(component_name)?; // check names match\n\n match tag_type {\n\n HTMLTagType::Opening | HTMLTagType::OpeningStart | HTMLTagType::SelfClosing => {\n\n Ok((remaining_line, (line.to_string(), tag_type, 0)))\n\n }\n\n HTMLTagType::Closing => Err(Err::Error(Error::new(line, ErrorKind::Tag))),\n\n }\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 56, "score": 100963.79757283445 }, { "content": "pub fn form_code_fragment_component_first_line(\n\n line: &str,\n\n) -> IResult<&str, (String, LineType, usize)> {\n\n let component_identifier = \"CodeFragment\";\n\n let (_, (_parsed_value, jsx_tag_type)) =\n\n parse_jsx_component_first_line(line, component_identifier)?;\n\n match jsx_tag_type {\n\n JSXTagType::Closed => Ok((\"\", (line.to_string(), LineType::CodeFragmentOpen, 0))),\n\n JSXTagType::Opened => Ok((\"\", (line.to_string(), LineType::CodeFragmentOpening, 0))),\n\n JSXTagType::SelfClosed => Ok((\"\", (line.to_string(), LineType::CodeFragment, 0))),\n\n }\n\n}\n\n\n", "file_path": "src/parser/jsx/mod.rs", "rank": 58, "score": 95635.23102987051 }, { "content": "fn parse_table_cell(line: &str) -> IResult<&str, &str> {\n\n terminated(take_until(\"|\"), pair(tag(\"|\"), multispace0))(line)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 60, "score": 88726.58144627283 }, { "content": "#[allow(dead_code)]\n\nfn discard_leading_whitespace(line: &str) -> IResult<&str, &str> {\n\n preceded(multispace0, rest)(line)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 61, "score": 88726.58144627283 }, { "content": "fn parse_heading_text(line: &str) -> IResult<&str, usize> {\n\n let (heading, level) = terminated(many1_count(tag(\"#\")), multispace1)(line)?;\n\n Ok((heading, level))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 62, "score": 88726.58144627283 }, { "content": "fn parse_frontmatter_delimiter(line: &str) -> IResult<&str, &str> {\n\n let (line, _) = tag(\"---\")(line)?;\n\n Ok((line, \"\"))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 63, "score": 88726.58144627283 }, { "content": "fn parse_href_scheme(href: &str) -> IResult<&str, &str> {\n\n alt((tag_no_case(\"HTTP://\"), tag_no_case(\"HTTPS://\")))(href)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 64, "score": 88726.58144627283 }, { "content": "fn remove_html_tags(line: &str) -> IResult<&str, &str> {\n\n let (remaining_line, initial_segment) = take_until(\"<\")(line)?;\n\n let (final_segment, _) = parse_self_closing_html_tag(remaining_line)?;\n\n Ok((final_segment, initial_segment))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 65, "score": 88726.58144627283 }, { "content": "fn parse_unordered_list_text(line: &str) -> IResult<&str, usize> {\n\n let (heading, indentation) = terminated(many0_count(tag(\" \")), tag(\"- \"))(line)?;\n\n Ok((heading, indentation))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 66, "score": 86350.8957804085 }, { "content": "fn parse_table_line(line: &str) -> IResult<&str, Vec<&str>> {\n\n let (headings, _) = preceded(tag(\"|\"), multispace1)(line)?;\n\n many1(parse_table_cell)(headings)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 67, "score": 85381.97477498962 }, { "content": "fn parse_html_tag_attributes_str(line: &str) -> IResult<&str, &str> {\n\n is_not(\">/\")(line)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 68, "score": 84122.18918764495 }, { "content": "fn parse_table_column_alignment(line: &str) -> IResult<&str, TableAlign> {\n\n let (remaining_line, cell) = terminated(take_until(\"|\"), pair(tag(\"|\"), multispace0))(line)?;\n\n let (_, alignment) = alt((\n\n value(\n\n TableAlign::Centre,\n\n delimited(tag(\":\"), tag(\"---\"), tag(\":\")),\n\n ),\n\n value(TableAlign::Left, preceded(tag(\":\"), tag(\"---\"))),\n\n value(TableAlign::Right, terminated(tag(\"---\"), tag(\":\"))),\n\n ))(cell)?;\n\n Ok((remaining_line, alignment))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 69, "score": 84122.18918764495 }, { "content": "fn parse_html_tag_content(line: &str) -> IResult<&str, (&str, &str)> {\n\n let (remainder, tag_content) = is_not(\">/\")(line)?;\n\n let (attributes, (tag_name, _space)) = pair(alphanumeric1, multispace0)(tag_content)?;\n\n Ok((remainder, (tag_name, attributes)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 70, "score": 83153.26818222606 }, { "content": "fn parse_ordered_list_text(line: &str) -> IResult<&str, (usize, &str)> {\n\n let (content_text, (indentation, start, _full_stop_tag)) =\n\n tuple((many0_count(tag(\" \")), digit1, tag(\". \")))(line)?;\n\n Ok((content_text.trim(), (indentation, start)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 71, "score": 83153.26818222606 }, { "content": "fn parse_html_tag_attribute(line: &str) -> IResult<&str, (&str, &str)> {\n\n tuple((\n\n preceded(multispace0, take_until(\"=\")),\n\n delimited(tag(\"=\\\"\"), take_until(\"\\\"\"), tag(\"\\\"\")),\n\n ))(line)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 72, "score": 83153.26818222606 }, { "content": "fn parse_open_markdown_block(\n\n line: &str,\n\n open_markdown_block: Option<&MarkdownBlock>,\n\n) -> Option<(String, LineType, usize)> {\n\n match open_markdown_block {\n\n Some(MarkdownBlock::OrderedList) => match form_ordered_list_line(line) {\n\n Ok((_, (line, line_type, level))) => {\n\n if !line.is_empty() {\n\n let markup = format!(\"</li>{line}\");\n\n Some((markup, line_type, level))\n\n } else {\n\n Some((String::from(\"</ol>\"), LineType::OrderedList, level))\n\n }\n\n }\n\n Err(_) => None,\n\n },\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 73, "score": 82630.0577473269 }, { "content": "fn parse_open_html_block(\n\n line: &str,\n\n open_html_block_elements: Option<&HTMLBlockElementType>,\n\n) -> Option<(String, LineType, usize)> {\n\n match open_html_block_elements {\n\n Some(HTMLBlockElementType::Figure) => match form_html_block_element_last_line(line) {\n\n Ok((_, (line, line_type, level))) => {\n\n if !line.is_empty() {\n\n Some((line, line_type, level))\n\n } else {\n\n None\n\n }\n\n }\n\n Err(_) => Some((line.to_string(), LineType::HTMLFigureBlockOpen, 0)),\n\n },\n\n Some(HTMLBlockElementType::DescriptionList) => {\n\n match form_html_block_element_last_line(line) {\n\n Ok((_, (line, line_type, level))) => {\n\n if !line.is_empty() {\n\n Some((line, line_type, level))\n", "file_path": "src/parser/mod.rs", "rank": 74, "score": 82630.0577473269 }, { "content": "fn segment_emphasis_line(line: &str) -> IResult<&str, (&str, &str, &str)> {\n\n let delimiter = \"*\";\n\n let (_, (initial_segment, remainder)) = parse_up_to_inline_wrap_segment(line, delimiter)?;\n\n let (_, (bold_segment, final_segment)) = parse_inline_wrap_segment(remainder, delimiter)?;\n\n Ok((\"\", (initial_segment, bold_segment, final_segment)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 75, "score": 82307.28620857453 }, { "content": "fn parse_fenced_code_block_last_line(line: &str) -> IResult<&str, &str> {\n\n tag(\"```\")(line)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 76, "score": 82027.23098041752 }, { "content": "// parses row separating header and body containing alignment markers\n\nfn parse_table_header_row(line: &str) -> IResult<&str, Vec<TableAlign>> {\n\n let (headings, _) = preceded(tag(\"|\"), multispace1)(line)?;\n\n many1(parse_table_column_alignment)(headings)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 77, "score": 81064.34342139313 }, { "content": "// consumes delimiter\n\nfn parse_inline_wrap_segment<'a>(\n\n line: &'a str,\n\n delimiter: &'a str,\n\n) -> IResult<&'a str, (&'a str, &'a str)> {\n\n separated_pair(take_until(delimiter), tag(delimiter), rest)(line)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 78, "score": 80328.86837322614 }, { "content": "fn parse_up_to_opening_html_tag<'a>(\n\n line: &'a str,\n\n element_tag: &'a str,\n\n) -> IResult<&'a str, &'a str> {\n\n let delimiter = &mut String::from(\"<\");\n\n delimiter.push_str(element_tag);\n\n let result = take_until(delimiter.as_str())(line);\n\n result\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 79, "score": 80328.86837322613 }, { "content": "// consumes delimiter\n\nfn parse_up_to_inline_wrap_segment<'a>(\n\n line: &'a str,\n\n delimiter: &'a str,\n\n) -> IResult<&'a str, (&'a str, &'a str)> {\n\n separated_pair(take_until(delimiter), tag(delimiter), rest)(line)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 80, "score": 80328.86837322613 }, { "content": "fn parse_mdx_lines<B>(\n\n line: &str,\n\n lines_iterator: std::io::Lines<B>,\n\n open_markdown_block: Option<&MarkdownBlock>,\n\n open_html_block_elements: Option<&HTMLBlockElementType>,\n\n open_jsx_components: Option<&JSXComponentType>,\n\n) -> (std::io::Lines<B>, Option<(String, LineType, usize)>)\n\nwhere\n\n B: BufRead,\n\n{\n\n match parse_open_markdown_block(line, open_markdown_block) {\n\n Some(value) => (lines_iterator, Some(value)),\n\n None => match parse_open_html_block(line, open_html_block_elements) {\n\n Some(value) => (lines_iterator, Some(value)),\n\n None => match parse_open_jsx_block(line, open_jsx_components) {\n\n Some(value) => (lines_iterator, Some(value)),\n\n None => (lines_iterator, parse_mdx_line(line)),\n\n },\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 81, "score": 80328.86837322614 }, { "content": "fn segment_strong_emphasis_line(line: &str) -> IResult<&str, (&str, &str, &str)> {\n\n let delimiter = \"**\";\n\n let (_, (initial_segment, remainder)) = parse_up_to_inline_wrap_segment(line, delimiter)?;\n\n let (_, (bold_segment, final_segment)) = parse_inline_wrap_segment(remainder, delimiter)?;\n\n Ok((\"\", (initial_segment, bold_segment, final_segment)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 82, "score": 80212.32800134708 }, { "content": "fn parse_html_tag_attributes(attributes: &str) -> IResult<&str, Vec<(&str, &str)>> {\n\n many0(parse_html_tag_attribute)(attributes)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 83, "score": 80212.32800134708 }, { "content": "fn segment_code_span_line(line: &str) -> IResult<&str, (&str, &str, &str)> {\n\n let delimiter = \"`\";\n\n let (_, (initial_segment, remainder)) = parse_up_to_inline_wrap_segment(line, delimiter)?;\n\n let (_, (bold_segment, final_segment)) = parse_inline_wrap_segment(remainder, delimiter)?;\n\n Ok((\"\", (initial_segment, bold_segment, final_segment)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 84, "score": 80212.3280013471 }, { "content": "#[test]\n\npub fn test_parse_jsx_component() {\n\n let mdx_line = \"<Questions {questions} />\";\n\n assert_eq!(\n\n parse_jsx_component(mdx_line, \"Questions\"),\n\n Ok((\"\", \" {questions} \"))\n\n );\n\n}\n\n\n", "file_path": "src/parser/jsx/tests.rs", "rank": 85, "score": 80115.72140532098 }, { "content": "fn parse_html_block_level_comment_first_line(line: &str) -> IResult<&str, &str> {\n\n tag(\"<!--\")(line)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 86, "score": 80054.33220135233 }, { "content": "fn parse_html_block_level_comment_last_line(line: &str) -> IResult<&str, &str> {\n\n terminated(take_until(\"-->\"), tag(\"-->\"))(line)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 87, "score": 80054.33220135233 }, { "content": "fn segment_anchor_element_no_attributes_line(line: &str) -> IResult<&str, (&str, &str, &str)> {\n\n let delimiter = \"a\";\n\n let (remainder, initial_segment) = parse_up_to_opening_html_tag(line, delimiter)?;\n\n let (final_segment, anchor_attributes_segment) =\n\n parse_opening_html_tag_no_attributes(remainder, delimiter)?;\n\n Ok((\n\n \"\",\n\n (initial_segment, anchor_attributes_segment, final_segment),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 88, "score": 78239.4292222819 }, { "content": "fn segment_anchor_element_with_attributes_line(line: &str) -> IResult<&str, (&str, &str, &str)> {\n\n let delimiter = \"a\";\n\n let (remainder, initial_segment) = parse_up_to_opening_html_tag(line, delimiter)?;\n\n let (final_segment, anchor_attributes_segment) =\n\n parse_opening_html_tag_with_attributes(remainder, delimiter)?;\n\n Ok((\n\n \"\",\n\n (initial_segment, anchor_attributes_segment, final_segment),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 89, "score": 78239.4292222819 }, { "content": "fn parse_author_name_from_cargo_pkg_authors(cargo_pkg_authors: &str) -> IResult<&str, &str> {\n\n take_until(\" <\")(cargo_pkg_authors)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 90, "score": 78193.12741389751 }, { "content": "fn parse_opening_html_tag_no_attributes<'a>(\n\n line: &'a str,\n\n element_tag: &'a str,\n\n) -> IResult<&'a str, &'a str> {\n\n let closed_delimiter = &mut String::from(\"<\");\n\n closed_delimiter.push_str(element_tag);\n\n closed_delimiter.push('>');\n\n let (tag_close, _attributes) = tag(closed_delimiter.as_str())(line)?;\n\n Ok((tag_close, \"\"))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 91, "score": 78127.20983191137 }, { "content": "fn parse_opening_html_tag_with_attributes<'a>(\n\n line: &'a str,\n\n element_tag: &'a str,\n\n) -> IResult<&'a str, &'a str> {\n\n let delimiter = &mut String::from(\"<\");\n\n delimiter.push_str(element_tag);\n\n let (tag_close, attributes) = delimited(\n\n tag(delimiter.as_str()),\n\n delimited(multispace1, take_until(\">\"), multispace0),\n\n tag(\">\"),\n\n )(line)?;\n\n Ok((tag_close, attributes))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 92, "score": 78127.20983191137 }, { "content": "fn parse_opening_html_tag_end(line: &str) -> IResult<&str, (&str, HTMLTagType)> {\n\n let (remaining_line, tag_attributes) = alt((\n\n delimited(multispace0, parse_html_tag_attributes_str, tag(\">\")),\n\n terminated(multispace0, tag(\">\")),\n\n ))(line)?;\n\n Ok((remaining_line, (tag_attributes, HTMLTagType::Opening)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 93, "score": 77224.20640847863 }, { "content": "#[test]\n\n#[should_panic(expected = \"[ ERROR ] Anchor tag missing href\")]\n\npub fn test_form_html_anchor_element_line_panic() {\n\n // Panics if href attribute is not present\n\n let mdx_line = \"<a to=\\\"https://www.example.com\\\">site</a>.\";\n\n assert_eq!(\n\n form_html_anchor_element_line(mdx_line),\n\n Ok((\n\n \"site</a>.\",\n\n String::from(\n\n \"<a href=\\\"https://www.example.com\\\" target=\\\"_blank\\\" rel=\\\"nofollow noopener noreferrer\\\">\"\n\n )\n\n ))\n\n );\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 94, "score": 76699.79246117851 }, { "content": "fn form_html_block_level_comment_first_line(\n\n line: &str,\n\n) -> IResult<&str, (String, LineType, usize)> {\n\n parse_html_block_level_comment_first_line(line)?;\n\n Ok((\n\n \"\",\n\n (\n\n line.trim_end().to_string(),\n\n LineType::HTMLBlockLevelCommentOpen,\n\n 0,\n\n ),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 95, "score": 76475.52083546596 }, { "content": "fn parse_opening_html_tag(line: &str) -> IResult<&str, (&str, &str, HTMLTagType)> {\n\n let (remaining_line, (tag_name, tag_attributes)) =\n\n delimited(tag(\"<\"), parse_html_tag_content, tag(\">\"))(line)?;\n\n Ok((\n\n remaining_line,\n\n (tag_name, tag_attributes, HTMLTagType::Opening),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 96, "score": 76378.22443482709 }, { "content": "fn parse_closing_html_tag(line: &str) -> IResult<&str, (&str, &str, HTMLTagType)> {\n\n let (remaining_line, (tag_name, tag_attributes)) =\n\n delimited(tag(\"</\"), parse_html_tag_content, tag(\">\"))(line)?;\n\n Ok((\n\n remaining_line,\n\n (tag_name, tag_attributes, HTMLTagType::Closing),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 97, "score": 76378.22443482709 }, { "content": "#[test]\n\npub fn test_parse_jsx_component_first_line() {\n\n let mdx_line = \"<CodeFragment\";\n\n assert_eq!(\n\n parse_jsx_component_first_line(mdx_line, \"CodeFragment\"),\n\n Ok((\"\", (\"<CodeFragment\", &JSXTagType::Opened)))\n\n );\n\n\n\n let mdx_line = \"<CodeFragment count={3} >\";\n\n assert_eq!(\n\n parse_jsx_component_first_line(mdx_line, \"CodeFragment\"),\n\n Ok((\"\", (\"<CodeFragment count={3} >\", &JSXTagType::Closed)))\n\n );\n\n\n\n let mdx_line = \"<CodeFragment count={3} />\";\n\n assert_eq!(\n\n parse_jsx_component_first_line(mdx_line, \"CodeFragment\"),\n\n Ok((\"\", (\"<CodeFragment count={3} />\", &JSXTagType::SelfClosed)))\n\n );\n\n}\n", "file_path": "src/parser/jsx/tests.rs", "rank": 98, "score": 76162.04688313053 }, { "content": "#[test]\n\npub fn test_form_jsx_component_first_line() {\n\n let mdx_line = \"<Component />\";\n\n assert_eq!(\n\n form_jsx_component_first_line(mdx_line, \"Component\"),\n\n Ok((\n\n \"\",\n\n (String::from(\"<Component />\"), HTMLTagType::SelfClosing, 0)\n\n ))\n\n );\n\n\n\n let mdx_line = \"<ComponentPure />\";\n\n assert_eq!(\n\n form_jsx_component_first_line(mdx_line, \"Component\"),\n\n Err(Err::Error(Error::new(\"Pure\", ErrorKind::Eof)))\n\n );\n\n\n\n let mdx_line = \"<Component\";\n\n assert_eq!(\n\n form_jsx_component_first_line(mdx_line, \"Component\"),\n\n Ok((\n\n \"\",\n\n (String::from(\"<Component\"), HTMLTagType::OpeningStart, 0)\n\n ))\n\n );\n\n}\n\n\n", "file_path": "src/parser/jsx/tests.rs", "rank": 99, "score": 76162.04688313053 } ]
Rust
sqlx-core/src/pool/mod.rs
broomstar/sqlx
6c8fd949dd052ddbad5c4dfe343682aed9286615
use std::{ fmt, ops::{Deref, DerefMut}, sync::Arc, time::{Duration, Instant}, }; use crate::Database; use self::inner::SharedPool; pub use self::options::Builder; use self::options::Options; mod executor; mod inner; mod options; pub struct Pool<DB>(Arc<SharedPool<DB>>) where DB: Database; struct Connection<DB: Database> { live: Option<Live<DB>>, pool: Arc<SharedPool<DB>>, } struct Live<DB: Database> { raw: DB::Connection, created: Instant, } struct Idle<DB: Database> { live: Live<DB>, since: Instant, } impl<DB> Pool<DB> where DB: Database, DB::Connection: crate::Connection<Database = DB>, { pub async fn new(url: &str) -> crate::Result<Self> { Self::builder().build(url).await } async fn with_options(url: &str, options: Options) -> crate::Result<Self> { let inner = SharedPool::new_arc(url, options).await?; Ok(Pool(inner)) } pub fn builder() -> Builder<DB> { Builder::new() } pub async fn acquire(&self) -> crate::Result<impl DerefMut<Target = DB::Connection>> { self.0.acquire().await.map(|conn| Connection { live: Some(conn), pool: Arc::clone(&self.0), }) } pub fn try_acquire(&self) -> Option<impl DerefMut<Target = DB::Connection>> { self.0.try_acquire().map(|conn| Connection { live: Some(conn), pool: Arc::clone(&self.0), }) } pub async fn close(&self) { self.0.close().await; } pub fn size(&self) -> u32 { self.0.size() } pub fn idle(&self) -> usize { self.0.num_idle() } pub fn max_size(&self) -> u32 { self.0.options().max_size } pub fn connect_timeout(&self) -> Duration { self.0.options().connect_timeout } pub fn min_size(&self) -> u32 { self.0.options().min_size } pub fn max_lifetime(&self) -> Option<Duration> { self.0.options().max_lifetime } pub fn idle_timeout(&self) -> Option<Duration> { self.0.options().idle_timeout } } impl<DB> Clone for Pool<DB> where DB: Database, { fn clone(&self) -> Self { Self(Arc::clone(&self.0)) } } impl<DB: Database> fmt::Debug for Pool<DB> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("Pool") .field("url", &self.0.url()) .field("size", &self.0.size()) .field("num_idle", &self.0.num_idle()) .field("is_closed", &self.0.is_closed()) .field("options", self.0.options()) .finish() } } const DEREF_ERR: &str = "(bug) connection already released to pool"; impl<DB: Database> Deref for Connection<DB> { type Target = DB::Connection; fn deref(&self) -> &Self::Target { &self.live.as_ref().expect(DEREF_ERR).raw } } impl<DB: Database> DerefMut for Connection<DB> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.live.as_mut().expect(DEREF_ERR).raw } } impl<DB: Database> Drop for Connection<DB> { fn drop(&mut self) { if let Some(live) = self.live.take() { self.pool.release(live); } } }
use std::{ fmt, ops::{Deref, DerefMut}, sync::Arc, time::{Duration, Instant}, }; use crate::Database; use self::inner::SharedPool; pub use self::options::Builder; use self::options::Options; mod executor; mod inner; mod options; pub struct Pool<DB>(Arc<SharedPool<DB>>) where DB: Database; struct Connection<DB: Database> { live: Option<Live<DB>>, pool: Arc<SharedPool<DB>>, } struct Live<DB: Database> { raw: DB::Connection, created: Instant, } struct Idle<DB: Database> { live: Live<DB>, since: Instant, } impl<DB> Pool<DB> where DB: Database, DB::Connection: crate::Connection<Database = DB>, { pub async fn new(url: &str) -> crate::Result<Self> { Self::builder().build(url).await } async fn with_options(url: &str, options: Options) -> crate::Result<Self> { let inner = SharedPool::new_arc(url, options).await?; Ok(Pool(inner)) } pub fn builder() -> Builder<DB> { Builder::new() } pub async fn acquire(&self) -> crate::Result<impl DerefMut<Target = DB::Connection>> { self.0.acquire().await.map(|conn| Connection { live: Some(conn), pool: Arc::clone(&self.0), }) } pub fn try_acquire(&self) -> Option<impl DerefMut<Target = DB::Connection>> { self.0
f::Target { &mut self.live.as_mut().expect(DEREF_ERR).raw } } impl<DB: Database> Drop for Connection<DB> { fn drop(&mut self) { if let Some(live) = self.live.take() { self.pool.release(live); } } }
.try_acquire().map(|conn| Connection { live: Some(conn), pool: Arc::clone(&self.0), }) } pub async fn close(&self) { self.0.close().await; } pub fn size(&self) -> u32 { self.0.size() } pub fn idle(&self) -> usize { self.0.num_idle() } pub fn max_size(&self) -> u32 { self.0.options().max_size } pub fn connect_timeout(&self) -> Duration { self.0.options().connect_timeout } pub fn min_size(&self) -> u32 { self.0.options().min_size } pub fn max_lifetime(&self) -> Option<Duration> { self.0.options().max_lifetime } pub fn idle_timeout(&self) -> Option<Duration> { self.0.options().idle_timeout } } impl<DB> Clone for Pool<DB> where DB: Database, { fn clone(&self) -> Self { Self(Arc::clone(&self.0)) } } impl<DB: Database> fmt::Debug for Pool<DB> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("Pool") .field("url", &self.0.url()) .field("size", &self.0.size()) .field("num_idle", &self.0.num_idle()) .field("is_closed", &self.0.is_closed()) .field("options", self.0.options()) .finish() } } const DEREF_ERR: &str = "(bug) connection already released to pool"; impl<DB: Database> Deref for Connection<DB> { type Target = DB::Connection; fn deref(&self) -> &Self::Target { &self.live.as_ref().expect(DEREF_ERR).raw } } impl<DB: Database> DerefMut for Connection<DB> { fn deref_mut(&mut self) -> &mut Sel
random
[ { "content": "fn is_beyond_lifetime<DB: Database>(live: &Live<DB>, options: &Options) -> bool {\n\n // check if connection was within max lifetime (or not set)\n\n options\n\n .max_lifetime\n\n .map_or(false, |max| live.created.elapsed() > max)\n\n}\n\n\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 0, "score": 274735.7137802503 }, { "content": "/// Construct a full SQL query that can be chained to bind parameters and executed.\n\n///\n\n/// # Examples\n\n///\n\n/// ```ignore\n\n/// let names: Vec<String> = sqlx::query(\"SELECT name FROM users WHERE active = ?\")\n\n/// .bind(false) // [active = ?]\n\n/// .fetch(&mut connection) // -> Stream<Item = impl Row>\n\n/// .map_ok(|row| row.name(\"name\")) // -> Stream<Item = String>\n\n/// .try_collect().await?; // -> Vec<String>\n\n/// ```\n\npub fn query<DB>(sql: &str) -> Query<DB>\n\nwhere\n\n DB: Database,\n\n{\n\n Query {\n\n database: PhantomData,\n\n arguments: Default::default(),\n\n query: sql,\n\n }\n\n}\n", "file_path": "sqlx-core/src/query.rs", "rank": 3, "score": 215975.3809720811 }, { "content": "fn is_beyond_idle<DB: Database>(idle: &Idle<DB>, options: &Options) -> bool {\n\n // if connection wasn't idle too long (or not set)\n\n options\n\n .idle_timeout\n\n .map_or(false, |timeout| idle.since.elapsed() > timeout)\n\n}\n\n\n\nasync fn check_live<DB: Database>(mut live: Live<DB>, options: &Options) -> Option<Live<DB>> {\n\n // If the connection we pulled has expired, close the connection and\n\n // immediately create a new connection\n\n if is_beyond_lifetime(&live, options) {\n\n // close the connection but don't really care about the result\n\n let _ = live.close().await;\n\n } else if options.test_on_acquire {\n\n // TODO: Check on acquire should be a configuration setting\n\n // Check that the connection is still live\n\n match live.raw.ping().await {\n\n // Connection still seems to respond\n\n Ok(_) => return Some(live),\n\n\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 4, "score": 210366.46788756462 }, { "content": "/// if `max_lifetime` or `idle_timeout` is set, spawn a task that reaps senescent connections\n\nfn spawn_reaper<DB: Database>(pool: &Arc<SharedPool<DB>>)\n\nwhere\n\n DB::Connection: Connection<Database = DB>,\n\n{\n\n let period = match (pool.options.max_lifetime, pool.options.idle_timeout) {\n\n (Some(it), None) | (None, Some(it)) => it,\n\n\n\n (Some(a), Some(b)) => cmp::min(a, b),\n\n\n\n (None, None) => return,\n\n };\n\n\n\n let pool = Arc::clone(&pool);\n\n\n\n task::spawn(async move {\n\n while !pool.is_closed.load(Ordering::Acquire) {\n\n // reap at most the current size minus the minimum idle\n\n let max_reaped = pool\n\n .size\n\n .load(Ordering::Acquire)\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 5, "score": 207941.91606811716 }, { "content": "#[inline]\n\npub fn query_as<DB, T>(query: &str) -> QueryAs<DB, T>\n\nwhere\n\n DB: Database,\n\n T: FromRow<DB::Row>,\n\n{\n\n QueryAs {\n\n query,\n\n args: Default::default(),\n\n map_row: |row| Ok(T::from_row(row)),\n\n }\n\n}\n\n\n", "file_path": "sqlx-core/src/query_as.rs", "rank": 6, "score": 202683.65094595758 }, { "content": "#[doc(hidden)]\n\npub fn query_as_mapped<DB, T>(\n\n query: &str,\n\n map_row: fn(DB::Row) -> crate::Result<T>,\n\n) -> QueryAs<DB, T>\n\nwhere\n\n DB: Database,\n\n{\n\n QueryAs {\n\n query,\n\n args: Default::default(),\n\n map_row,\n\n }\n\n}\n", "file_path": "sqlx-core/src/query_as.rs", "rank": 8, "score": 163526.32614592716 }, { "content": "pub fn quote_query_as<DB: DatabaseExt>(\n\n sql: &str,\n\n out_ty: &Path,\n\n columns: &[RustColumn],\n\n) -> TokenStream {\n\n let instantiations = columns.iter().enumerate().map(\n\n |(\n\n i,\n\n &RustColumn {\n\n ref ident,\n\n ref type_,\n\n ..\n\n },\n\n )| { quote!( #ident: #i.try_get::<#type_>(&row).try_unwrap_optional()? ) },\n\n );\n\n\n\n let db_path = DB::quotable_path();\n\n\n\n quote! {\n\n sqlx::query_as_mapped::<#db_path, _>(#sql, |row| {\n\n use sqlx::row::RowIndex as _;\n\n use sqlx::result_ext::ResultExt as _;\n\n Ok(#out_ty { #(#instantiations),* })\n\n })\n\n }\n\n}\n", "file_path": "sqlx-macros/src/query_macros/output.rs", "rank": 9, "score": 156192.4474719872 }, { "content": "pub fn quote_args<DB: DatabaseExt>(\n\n input: &QueryMacroInput,\n\n describe: &Describe<DB>,\n\n) -> crate::Result<TokenStream> {\n\n if input.args.is_empty() {\n\n return Ok(quote! {\n\n let args = ();\n\n });\n\n }\n\n\n\n let args_check = if DB::PARAM_CHECKING == ParamChecking::Strong {\n\n let param_types = describe\n\n .param_types\n\n .iter()\n\n .zip(&*input.args)\n\n .map(|(type_, expr)| {\n\n get_type_override(expr)\n\n .or_else(|| {\n\n Some(\n\n DB::param_type_for_id(type_)?\n", "file_path": "sqlx-macros/src/query_macros/args.rs", "rank": 10, "score": 156192.4474719872 }, { "content": "/// Represents a single database connection rather than a pool of database connections.\n\n///\n\n/// Prefer running queries from [Pool] unless there is a specific need for a single, continuous\n\n/// connection.\n\npub trait Connection: Executor + Send + 'static {\n\n /// Establish a new database connection.\n\n fn open<T>(url: T) -> BoxFuture<'static, crate::Result<Self>>\n\n where\n\n T: TryInto<Url, Error = crate::Error>,\n\n Self: Sized;\n\n\n\n /// Close this database connection.\n\n fn close(self) -> BoxFuture<'static, crate::Result<()>>;\n\n\n\n /// Verifies a connection to the database is still alive.\n\n fn ping(&mut self) -> BoxFuture<crate::Result<()>> {\n\n Box::pin(self.execute(\"SELECT 1\", Default::default()).map_ok(|_| ()))\n\n }\n\n}\n", "file_path": "sqlx-core/src/connection.rs", "rank": 11, "score": 152664.862679084 }, { "content": "pub fn columns_to_rust<DB: DatabaseExt>(describe: &Describe<DB>) -> crate::Result<Vec<RustColumn>> {\n\n describe\n\n .result_columns\n\n .iter()\n\n .enumerate()\n\n .map(|(i, column)| -> crate::Result<_> {\n\n let name = column\n\n .name\n\n .as_deref()\n\n .ok_or_else(|| format!(\"column at position {} must have a name\", i))?;\n\n\n\n let ident = syn::parse_str::<Ident>(name)\n\n .map_err(|_| format!(\"{:?} is not a valid Rust identifier\", name))?;\n\n\n\n let type_ = <DB as DatabaseExt>::return_type_for_id(&column.type_id)\n\n .ok_or_else(|| format!(\"unknown field type ID: {}\", &column.type_id))?\n\n .parse::<TokenStream>()\n\n .unwrap();\n\n\n\n Ok(RustColumn { ident, type_ })\n\n })\n\n .collect::<crate::Result<Vec<_>>>()\n\n}\n\n\n", "file_path": "sqlx-macros/src/query_macros/output.rs", "rank": 12, "score": 138135.28101383196 }, { "content": "/// Encapsulates query execution on the database.\n\n///\n\n/// Implemented primarily by [crate::Pool].\n\npub trait Executor {\n\n type Database: Database + ?Sized;\n\n\n\n /// Send a raw SQL command to the database.\n\n ///\n\n /// This is intended for queries that cannot or should not be prepared (ex. `BEGIN`).\n\n ///\n\n /// Does not support fetching results.\n\n fn send<'e, 'q: 'e>(&'e mut self, command: &'q str) -> BoxFuture<'e, crate::Result<()>>;\n\n\n\n /// Execute the query, returning the number of rows affected.\n\n fn execute<'e, 'q: 'e>(\n\n &'e mut self,\n\n query: &'q str,\n\n args: <Self::Database as Database>::Arguments,\n\n ) -> BoxFuture<'e, crate::Result<u64>>;\n\n\n\n /// Executes the query and returns a [Stream] of [Row].\n\n fn fetch<'e, 'q: 'e>(\n\n &'e mut self,\n", "file_path": "sqlx-core/src/executor.rs", "rank": 13, "score": 133124.1065023624 }, { "content": "// Hi(str, salt, i):\n\npub fn hi<'a>(s: &'a str, salt: &'a [u8], iter_count: u32) -> Result<[u8; 32]> {\n\n let mut mac = Hmac::<Sha256>::new_varkey(s.as_bytes())\n\n .map_err(|_| protocol_err!(\"HMAC can take key of any size\"))?;\n\n\n\n mac.input(&salt);\n\n mac.input(&1u32.to_be_bytes());\n\n\n\n let mut u = mac.result().code();\n\n let mut hi = u;\n\n\n\n for _ in 1..iter_count {\n\n let mut mac = Hmac::<Sha256>::new_varkey(s.as_bytes())\n\n .map_err(|_| protocol_err!(\" HMAC can take key of any size\"))?;\n\n mac.input(u.as_slice());\n\n u = mac.result().code();\n\n hi = hi.iter().zip(u.iter()).map(|(&a, &b)| a ^ b).collect();\n\n }\n\n\n\n Ok(hi.into())\n\n}\n", "file_path": "sqlx-core/src/postgres/protocol/sasl.rs", "rank": 14, "score": 122630.52200469616 }, { "content": "/// Encode a single value to be sent to the database.\n\npub trait Encode<DB>\n\nwhere\n\n DB: Database + ?Sized,\n\n{\n\n /// Writes the value of `self` into `buf` in the expected format for the database.\n\n fn encode(&self, buf: &mut Vec<u8>);\n\n\n\n fn encode_nullable(&self, buf: &mut Vec<u8>) -> IsNull {\n\n self.encode(buf);\n\n\n\n IsNull::No\n\n }\n\n\n\n fn size_hint(&self) -> usize {\n\n mem::size_of_val(self)\n\n }\n\n}\n\n\n\nimpl<T: ?Sized, DB> Encode<DB> for &'_ T\n\nwhere\n", "file_path": "sqlx-core/src/encode.rs", "rank": 15, "score": 117197.65887884023 }, { "content": "pub trait IntoArguments<DB>\n\nwhere\n\n DB: Database,\n\n{\n\n fn into_arguments(self) -> DB::Arguments;\n\n}\n\n\n\nimpl<DB> IntoArguments<DB> for DB::Arguments\n\nwhere\n\n DB: Database,\n\n{\n\n #[inline]\n\n fn into_arguments(self) -> DB::Arguments {\n\n self\n\n }\n\n}\n\n\n\n#[allow(unused)]\n\nmacro_rules! impl_into_arguments {\n\n ($B:ident: $( ($idx:tt) -> $T:ident );+;) => {\n", "file_path": "sqlx-core/src/arguments.rs", "rank": 16, "score": 117197.65887884023 }, { "content": "/// Decode a single value from the database.\n\npub trait Decode<DB>: Sized\n\nwhere\n\n DB: Database + ?Sized,\n\n{\n\n fn decode(raw: &[u8]) -> Result<Self, DecodeError>;\n\n\n\n /// Creates a new value of this type from a `NULL` SQL value.\n\n ///\n\n /// The default implementation returns [DecodeError::UnexpectedNull].\n\n fn decode_null() -> Result<Self, DecodeError> {\n\n Err(DecodeError::UnexpectedNull)\n\n }\n\n\n\n fn decode_nullable(raw: Option<&[u8]>) -> Result<Self, DecodeError> {\n\n if let Some(raw) = raw {\n\n Self::decode(raw)\n\n } else {\n\n Self::decode_null()\n\n }\n\n }\n", "file_path": "sqlx-core/src/decode.rs", "rank": 17, "score": 111887.20572874017 }, { "content": "#[proc_macro_hack]\n\npub fn query_as(input: TokenStream) -> TokenStream {\n\n #[allow(unused_variables)]\n\n let input = parse_macro_input!(input as QueryAsMacroInput);\n\n async_macro!(db => expand_query_as(input, db))\n\n}\n\n\n", "file_path": "sqlx-macros/src/lib.rs", "rank": 18, "score": 101538.86415303967 }, { "content": "#[proc_macro_hack]\n\npub fn query(input: TokenStream) -> TokenStream {\n\n #[allow(unused_variables)]\n\n let input = parse_macro_input!(input as QueryMacroInput);\n\n async_macro!(db => expand_query(input, db))\n\n}\n\n\n", "file_path": "sqlx-macros/src/lib.rs", "rank": 19, "score": 101538.86415303967 }, { "content": "#[proc_macro_hack]\n\npub fn query_file(input: TokenStream) -> TokenStream {\n\n #[allow(unused_variables)]\n\n let input = parse_macro_input!(input as QueryMacroInput);\n\n async_macro!(db => expand_query_file(input, db))\n\n}\n\n\n", "file_path": "sqlx-macros/src/lib.rs", "rank": 20, "score": 99990.0174917749 }, { "content": "#[proc_macro_hack]\n\npub fn query_file_as(input: TokenStream) -> TokenStream {\n\n #[allow(unused_variables)]\n\n let input = parse_macro_input!(input as QueryAsMacroInput);\n\n async_macro!(db => expand_query_file_as(input, db))\n\n}\n", "file_path": "sqlx-macros/src/lib.rs", "rank": 21, "score": 99990.0174917749 }, { "content": "// TODO: Does this need to be spawned in async-std ?\n\nfn hash_password(password: &str) -> anyhow::Result<String> {\n\n let salt = generate_random_salt();\n\n let hash = argon2::hash_encoded(password.as_bytes(), &salt, &argon2::Config::default())?;\n\n\n\n Ok(hash)\n\n}\n\n\n", "file_path": "examples/realworld-postgres/src/main.rs", "rank": 22, "score": 99632.79906770748 }, { "content": "fn parse(key: &str) -> crate::Result<PublicKey> {\n\n // This takes advantage of the knowledge that we know\n\n // we are receiving a PKCS#8 RSA Public Key at all\n\n // times from MySQL\n\n\n\n if !key.starts_with(\"-----BEGIN PUBLIC KEY-----\\n\") {\n\n return Err(protocol_err!(\n\n \"unexpected format for RSA Public Key from MySQL (expected PKCS#8); first line: {:?}\",\n\n key.splitn(1, '\\n').next()\n\n )\n\n .into());\n\n }\n\n\n\n let key_with_trailer = key.trim_start_matches(\"-----BEGIN PUBLIC KEY-----\\n\");\n\n let trailer_pos = key_with_trailer.find('-').unwrap_or(0);\n\n let inner_key = key_with_trailer[..trailer_pos].replace('\\n', \"\");\n\n\n\n let inner = base64::decode(&inner_key).map_err(|_err| {\n\n // TODO(@abonander): protocol_err doesn't like referring to [err]\n\n protocol_err!(\"unexpected error decoding what should be base64-encoded data\")\n", "file_path": "sqlx-core/src/mysql/rsa.rs", "rank": 23, "score": 99623.98392665456 }, { "content": "fn get_type_override(expr: &Expr) -> Option<TokenStream> {\n\n match expr {\n\n Expr::Cast(cast) => Some(cast.ty.to_token_stream()),\n\n Expr::Type(ascription) => Some(ascription.ty.to_token_stream()),\n\n _ => None,\n\n }\n\n}\n", "file_path": "sqlx-macros/src/query_macros/args.rs", "rank": 24, "score": 99393.59497084208 }, { "content": "// XOR(x, y)\n\n// If len(y) < len(x), wrap around inside y\n\npub fn xor_eq(x: &mut [u8], y: &[u8]) {\n\n let y_len = y.len();\n\n\n\n for i in 0..x.len() {\n\n x[i] ^= y[i % y_len];\n\n }\n\n}\n", "file_path": "sqlx-core/src/mysql/util.rs", "rank": 25, "score": 97101.90788082546 }, { "content": "// Nonce generator\n\n// Nonce is a sequence of random printable bytes\n\nfn nonce() -> String {\n\n let mut rng = rand::thread_rng();\n\n let count = rng.gen_range(64, 128);\n\n // printable = %x21-2B / %x2D-7E\n\n // ;; Printable ASCII except \",\".\n\n // ;; Note that any \"printable\" is also\n\n // ;; a valid \"value\".\n\n let nonce: String = std::iter::repeat(())\n\n .map(|()| {\n\n let mut c = rng.gen_range(0x21, 0x7F) as u8;\n\n\n\n while c == 0x2C {\n\n c = rng.gen_range(0x21, 0x7F) as u8;\n\n }\n\n\n\n c\n\n })\n\n .take(count)\n\n .map(|c| c as char)\n\n .collect();\n", "file_path": "sqlx-core/src/postgres/connection.rs", "rank": 26, "score": 95793.34718782722 }, { "content": "pub trait DatabaseExt: Database {\n\n const DATABASE_PATH: &'static str;\n\n\n\n const PARAM_CHECKING: ParamChecking;\n\n\n\n fn quotable_path() -> syn::Path {\n\n syn::parse_str(Self::DATABASE_PATH).unwrap()\n\n }\n\n\n\n fn param_type_for_id(id: &Self::TypeId) -> Option<&'static str>;\n\n\n\n fn return_type_for_id(id: &Self::TypeId) -> Option<&'static str>;\n\n}\n\n\n\nmacro_rules! impl_database_ext {\n\n ($database:path { $($(#[$meta:meta])? $ty:ty $(| $input:ty)?),*$(,)? }, ParamChecking::$param_checking:ident) => {\n\n impl $crate::database::DatabaseExt for $database {\n\n const DATABASE_PATH: &'static str = stringify!($database);\n\n const PARAM_CHECKING: $crate::database::ParamChecking = $crate::database::ParamChecking::$param_checking;\n\n\n", "file_path": "sqlx-macros/src/database/mod.rs", "rank": 27, "score": 91136.65061392708 }, { "content": "pub fn encrypt<D: Digest>(key: &[u8], message: &[u8]) -> crate::Result<Box<[u8]>> {\n\n let key = std::str::from_utf8(key).map_err(|_err| {\n\n // TODO(@abonander): protocol_err doesn't like referring to [err]\n\n protocol_err!(\"unexpected error decoding what should be UTF-8\")\n\n })?;\n\n\n\n let key = parse(key)?;\n\n\n\n Ok(oaep_encrypt::<_, D>(&mut thread_rng(), &key, message)?.into_boxed_slice())\n\n}\n\n\n", "file_path": "sqlx-core/src/mysql/rsa.rs", "rank": 28, "score": 81466.49963528561 }, { "content": "fn get_token_from_request(req: &Request<PgPool>) -> String {\n\n req.header(\"authorization\")\n\n .unwrap_or_default()\n\n .splitn(2, ' ')\n\n .nth(1)\n\n .unwrap_or_default()\n\n .to_owned()\n\n}\n\n\n\nasync fn authorize(token: &str) -> anyhow::Result<i64> {\n\n let data = jsonwebtoken::decode::<TokenClaims>(\n\n token,\n\n SECRET_KEY.as_ref(),\n\n &jsonwebtoken::Validation::default(),\n\n )?;\n\n\n\n Ok(data.claims.sub)\n\n}\n\n\n", "file_path": "examples/realworld-postgres/src/main.rs", "rank": 29, "score": 80985.65133212059 }, { "content": "use std::{marker::PhantomData, time::Duration};\n\n\n\nuse crate::Database;\n\n\n\nuse super::Pool;\n\n\n\n/// Builder for [Pool].\n\npub struct Builder<DB>\n\nwhere\n\n DB: Database,\n\n{\n\n phantom: PhantomData<DB>,\n\n options: Options,\n\n}\n\n\n\nimpl<DB> Builder<DB>\n\nwhere\n\n DB: Database,\n\n{\n\n /// Get a new builder with default options.\n", "file_path": "sqlx-core/src/pool/options.rs", "rank": 30, "score": 78619.78592377987 }, { "content": " pub fn test_on_acquire(mut self, test: bool) -> Self {\n\n self.options.test_on_acquire = test;\n\n self\n\n }\n\n\n\n /// Spin up the connection pool.\n\n ///\n\n /// If [min_size] was set to a non-zero value, that many connections will be immediately\n\n /// opened and placed into the pool.\n\n pub async fn build(self, url: &str) -> crate::Result<Pool<DB>> {\n\n Pool::with_options(url, self.options).await\n\n }\n\n}\n\n\n\nimpl<DB> Default for Builder<DB>\n\nwhere\n\n DB: Database,\n\n{\n\n fn default() -> Self {\n\n Self::new()\n", "file_path": "sqlx-core/src/pool/options.rs", "rank": 31, "score": 78619.48230069697 }, { "content": " },\n\n }\n\n }\n\n\n\n /// Set the maximum number of connections that this pool should maintain.\n\n pub fn max_size(mut self, max_size: u32) -> Self {\n\n self.options.max_size = max_size;\n\n self\n\n }\n\n\n\n /// Set the amount of time to attempt connecting to the database.\n\n ///\n\n /// If this timeout elapses, [Pool::acquire] will return an error.\n\n pub fn connect_timeout(mut self, connect_timeout: Duration) -> Self {\n\n self.options.connect_timeout = connect_timeout;\n\n self\n\n }\n\n\n\n /// Set the minimum number of connections to maintain at all times.\n\n ///\n", "file_path": "sqlx-core/src/pool/options.rs", "rank": 32, "score": 78606.49659058006 }, { "content": " ///\n\n /// See the source of this method for current defaults.\n\n pub fn new() -> Self {\n\n Self {\n\n phantom: PhantomData,\n\n options: Options {\n\n // pool a maximum of 10 connections to the same database\n\n max_size: 10,\n\n // don't open connections until necessary\n\n min_size: 0,\n\n // try to connect for 10 seconds before erroring\n\n connect_timeout: Duration::from_secs(60),\n\n // reap connections that have been alive > 30 minutes\n\n // prevents unbounded live-leaking of memory due to naive prepared statement caching\n\n // see src/cache.rs for context\n\n max_lifetime: Some(Duration::from_secs(1800)),\n\n // don't reap connections based on idle time\n\n idle_timeout: None,\n\n // If true, test the health of a connection on acquire\n\n test_on_acquire: true,\n", "file_path": "sqlx-core/src/pool/options.rs", "rank": 33, "score": 78605.46178335584 }, { "content": " /// When the pool is built, this many connections will be automatically spun up.\n\n ///\n\n /// If any connection is reaped by [max_lifetime] or [idle_timeout] and it brings\n\n /// the connection count below this amount, a new connection will be opened to replace it.\n\n pub fn min_size(mut self, min_size: u32) -> Self {\n\n self.options.min_size = min_size;\n\n self\n\n }\n\n\n\n /// Set the maximum lifetime of individual connections.\n\n ///\n\n /// Any connection with a lifetime greater than this will be closed.\n\n ///\n\n /// When set to `None`, all connections live until either reaped by [idle_timeout]\n\n /// or explicitly disconnected.\n\n ///\n\n /// Infinite connections are not recommended due to the unfortunate reality of memory/resource\n\n /// leaks on the database-side. It is better to retire connections periodically\n\n /// (even if only once daily) to allow the database the opportunity to clean up data structures\n\n /// (parse trees, query metadata caches, thread-local storage, etc.) that are associated with a\n", "file_path": "sqlx-core/src/pool/options.rs", "rank": 34, "score": 78603.45917201666 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct Options {\n\n pub max_size: u32,\n\n pub connect_timeout: Duration,\n\n pub min_size: u32,\n\n pub max_lifetime: Option<Duration>,\n\n pub idle_timeout: Option<Duration>,\n\n pub test_on_acquire: bool,\n\n}\n", "file_path": "sqlx-core/src/pool/options.rs", "rank": 35, "score": 78603.1865569174 }, { "content": " /// session.\n\n pub fn max_lifetime(mut self, max_lifetime: impl Into<Option<Duration>>) -> Self {\n\n self.options.max_lifetime = max_lifetime.into();\n\n self\n\n }\n\n\n\n /// Set a maximum idle duration for individual connections.\n\n ///\n\n /// Any connection with an idle duration longer than this will be closed.\n\n ///\n\n /// For usage-based database server billing, this can be a cost saver.\n\n pub fn idle_timeout(mut self, idle_timeout: impl Into<Option<Duration>>) -> Self {\n\n self.options.idle_timeout = idle_timeout.into();\n\n self\n\n }\n\n\n\n /// If true, the health of a connection will be verified by a call to `Connection::ping`\n\n /// before returning the connection.\n\n ///\n\n /// Defaults to `true`.\n", "file_path": "sqlx-core/src/pool/options.rs", "rank": 36, "score": 78602.56819562794 }, { "content": "use std::cmp;\n\nuse std::sync::atomic::{AtomicBool, AtomicU32, Ordering};\n\nuse std::sync::Arc;\n\nuse std::time::Instant;\n\n\n\nuse async_std::{future::timeout, task};\n\nuse crossbeam_queue::{ArrayQueue, SegQueue};\n\nuse futures_channel::oneshot::{channel, Sender};\n\n\n\nuse super::{Idle, Live, Options};\n\nuse crate::{error::Error, Connection, Database};\n\n\n\npub(super) struct SharedPool<DB>\n\nwhere\n\n DB: Database,\n\n{\n\n url: String,\n\n idle: ArrayQueue<Idle<DB>>,\n\n waiters: SegQueue<Sender<Live<DB>>>,\n\n size: AtomicU32,\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 37, "score": 78545.27378443354 }, { "content": " async fn eventually_connect(&self, deadline: Instant) -> crate::Result<Live<DB>> {\n\n loop {\n\n // [connect] will raise an error when past deadline\n\n // [connect] returns None if its okay to retry\n\n if let Some(conn) = self.connect(deadline).await? {\n\n return Ok(conn);\n\n }\n\n }\n\n }\n\n\n\n async fn connect(&self, deadline: Instant) -> crate::Result<Option<Live<DB>>> {\n\n // FIXME: Code between `-` is duplicate with [acquire]\n\n // ---------------------------------\n\n\n\n // get the time between the deadline and now and use that as our timeout\n\n let until = deadline\n\n .checked_duration_since(Instant::now())\n\n .ok_or(Error::PoolTimedOut(None))?;\n\n\n\n // If pool was closed while waiting for a connection,\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 38, "score": 78545.26445838307 }, { "content": " live,\n\n since: Instant::now(),\n\n });\n\n }\n\n\n\n pub(super) async fn acquire(&self) -> crate::Result<Live<DB>> {\n\n let start = Instant::now();\n\n let deadline = start + self.options.connect_timeout;\n\n\n\n // Unless the pool has been closed ...\n\n while !self.is_closed.load(Ordering::Acquire) {\n\n // Attempt to immediately acquire a connection. This will return Some\n\n // if there is an idle connection in our channel.\n\n if let Some(idle) = self.idle.pop().ok() {\n\n if let Some(live) = check_live(idle.live, &self.options).await {\n\n return Ok(live);\n\n }\n\n }\n\n\n\n let size = self.size.load(Ordering::Acquire);\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 39, "score": 78544.40463051022 }, { "content": " is_closed: AtomicBool,\n\n options: Options,\n\n}\n\n\n\nimpl<DB> SharedPool<DB>\n\nwhere\n\n DB: Database,\n\n DB::Connection: Connection<Database = DB>,\n\n{\n\n pub(super) async fn new_arc(url: &str, options: Options) -> crate::Result<Arc<Self>> {\n\n let pool = Arc::new(Self {\n\n url: url.to_owned(),\n\n idle: ArrayQueue::new(options.max_size as usize),\n\n waiters: SegQueue::new(),\n\n size: AtomicU32::new(0),\n\n is_closed: AtomicBool::new(false),\n\n options,\n\n });\n\n\n\n // If a minimum size was configured for the pool,\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 40, "score": 78538.53758239005 }, { "content": " // establish N connections\n\n // TODO: Should we do this in the background?\n\n for _ in 0..pool.options.min_size {\n\n let live = pool\n\n .eventually_connect(Instant::now() + pool.options.connect_timeout)\n\n .await?;\n\n\n\n // Ignore error here, we are capping this loop by min_size which we\n\n // already should make sure is less than max_size\n\n let _ = pool.idle.push(Idle {\n\n live,\n\n since: Instant::now(),\n\n });\n\n }\n\n\n\n spawn_reaper(&pool);\n\n\n\n Ok(pool)\n\n }\n\n\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 41, "score": 78538.36689554427 }, { "content": " // release the connection\n\n if self.is_closed.load(Ordering::Acquire) {\n\n self.size.fetch_sub(1, Ordering::AcqRel); // ?\n\n\n\n return Err(Error::PoolClosed);\n\n }\n\n\n\n // ---------------------------------\n\n\n\n // result here is `Result<Result<DB, Error>, TimeoutError>`\n\n match timeout(until, DB::Connection::open(&self.url)).await {\n\n // successfully established connection\n\n Ok(Ok(raw)) => {\n\n Ok(Some(Live {\n\n raw,\n\n // remember when it was created so we can expire it\n\n // if there is a [max_lifetime] set\n\n created: Instant::now(),\n\n }))\n\n }\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 42, "score": 78538.34088721385 }, { "content": "\n\n if size >= self.options.max_size {\n\n // Too many open connections\n\n // Wait until one is available\n\n let (tx, rx) = channel();\n\n\n\n self.waiters.push(tx);\n\n\n\n // get the time between the deadline and now and use that as our timeout\n\n let until = deadline\n\n .checked_duration_since(Instant::now())\n\n .ok_or(Error::PoolTimedOut(None))?;\n\n\n\n // don't sleep forever\n\n let live = match timeout(until, rx).await {\n\n // A connection was returned to the pool\n\n Ok(Ok(live)) => live,\n\n\n\n // Pool dropped without dropping waiter\n\n Ok(Err(_)) => unreachable!(),\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 43, "score": 78536.78063984799 }, { "content": "}\n\n\n\nimpl<DB: Database> Idle<DB>\n\nwhere\n\n DB::Connection: Connection<Database = DB>,\n\n{\n\n async fn close(self) {\n\n self.live.close().await;\n\n }\n\n}\n\n\n\nimpl<DB: Database> Live<DB>\n\nwhere\n\n DB::Connection: Connection<Database = DB>,\n\n{\n\n async fn close(self) {\n\n let _ = self.raw.close().await;\n\n }\n\n}\n\n\n\n// NOTE: Function names here are bizzare. Helpful help would be appreciated.\n\n\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 44, "score": 78535.44646540364 }, { "content": "\n\n // Timed out waiting for a connection\n\n // Error is not forwarded as its useless context\n\n Err(_) => {\n\n return Err(Error::PoolTimedOut(None));\n\n }\n\n };\n\n\n\n // If pool was closed while waiting for a connection,\n\n // release the connection\n\n if self.is_closed.load(Ordering::Acquire) {\n\n live.close().await;\n\n self.size.fetch_sub(1, Ordering::AcqRel);\n\n\n\n return Err(Error::PoolClosed);\n\n }\n\n\n\n match check_live(live, &self.options).await {\n\n Some(live) => return Ok(live),\n\n\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 45, "score": 78530.50734557412 }, { "content": " .saturating_sub(pool.options.min_size);\n\n\n\n // collect connections to reap\n\n let (reap, keep) = (0..max_reaped)\n\n // only connections waiting in the queue\n\n .filter_map(|_| pool.idle.pop().ok())\n\n .partition::<Vec<_>, _>(|conn| {\n\n is_beyond_idle(conn, &pool.options)\n\n || is_beyond_lifetime(&conn.live, &pool.options)\n\n });\n\n\n\n for conn in keep {\n\n // return these connections to the pool first\n\n pool.idle.push(conn).expect(\"unreachable: pool overflowed\");\n\n }\n\n\n\n for conn in reap {\n\n conn.close().await;\n\n pool.size.fetch_sub(1, Ordering::AcqRel);\n\n }\n\n\n\n task::sleep(period).await;\n\n }\n\n });\n\n}\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 46, "score": 78529.50450067395 }, { "content": "\n\n pub(super) async fn close(&self) {\n\n self.is_closed.store(true, Ordering::Release);\n\n\n\n while self.size.load(Ordering::Acquire) > 0 {\n\n // don't block on the receiver because we own one Sender so it should never return\n\n // `None`; a `select!()` would also work but that produces more complicated code\n\n // and a timeout isn't necessarily appropriate\n\n while let Ok(idle) = self.idle.pop() {\n\n idle.close().await;\n\n self.size.fetch_sub(1, Ordering::AcqRel);\n\n }\n\n\n\n task::yield_now().await\n\n }\n\n }\n\n\n\n #[inline]\n\n pub(super) fn try_acquire(&self) -> Option<Live<DB>> {\n\n if self.is_closed.load(Ordering::Acquire) {\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 47, "score": 78527.77981541133 }, { "content": " return None;\n\n }\n\n\n\n Some(self.idle.pop().ok()?.live)\n\n }\n\n\n\n pub(super) fn release(&self, mut live: Live<DB>) {\n\n // Try waiters in (FIFO) order until one is still waiting ..\n\n while let Ok(waiter) = self.waiters.pop() {\n\n live = match waiter.send(live) {\n\n // successfully released\n\n Ok(()) => return,\n\n\n\n Err(live) => live,\n\n };\n\n }\n\n\n\n // .. if there were no waiters still waiting, just push the connection\n\n // back to the idle queue\n\n let _ = self.idle.push(Idle {\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 48, "score": 78527.45823250072 }, { "content": " pub fn options(&self) -> &Options {\n\n &self.options\n\n }\n\n\n\n pub(super) fn url(&self) -> &str {\n\n &self.url\n\n }\n\n\n\n pub(super) fn size(&self) -> u32 {\n\n self.size.load(Ordering::Acquire)\n\n }\n\n\n\n pub(super) fn num_idle(&self) -> usize {\n\n // NOTE: This is very expensive\n\n self.waiters.len()\n\n }\n\n\n\n pub(super) fn is_closed(&self) -> bool {\n\n self.is_closed.load(Ordering::Acquire)\n\n }\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 49, "score": 78526.38064729275 }, { "content": " // an error here means the other end has hung up or we lost connectivity\n\n // either way we're fine to just discard the connection\n\n // the error itself here isn't necessarily unexpected so WARN is too strong\n\n Err(e) => log::info!(\"ping on idle connection returned error: {}\", e),\n\n }\n\n\n\n // make sure the idle connection is gone explicitly before we open one\n\n // this will close the resources for the stream on our side\n\n drop(live);\n\n } else {\n\n // No need to re-connect\n\n return Some(live);\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 50, "score": 78522.4445420323 }, { "content": " // Need to re-connect\n\n None => {}\n\n }\n\n } else if self.size.compare_and_swap(size, size + 1, Ordering::AcqRel) != size {\n\n // size was incremented while we compared it just above\n\n continue;\n\n }\n\n\n\n // pool has slots available; open a new connection\n\n match self.connect(deadline).await {\n\n Ok(Some(conn)) => return Ok(conn),\n\n // [size] is internally decremented on _retry_ and _error_\n\n Ok(None) => continue,\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n\n\n Err(Error::PoolClosed)\n\n }\n\n\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 51, "score": 78521.83679345799 }, { "content": "\n\n // IO error while connecting, this should definitely be logged\n\n // and we should attempt to retry\n\n Ok(Err(crate::Error::Io(e))) => {\n\n log::warn!(\"error establishing a connection: {}\", e);\n\n\n\n Ok(None)\n\n }\n\n\n\n // Any other error while connection should immediately\n\n // terminate and bubble the error up\n\n Ok(Err(e)) => Err(e),\n\n\n\n // timed out\n\n Err(e) => {\n\n self.size.fetch_sub(1, Ordering::AcqRel); // ?\n\n Err(Error::PoolTimedOut(Some(Box::new(e))))\n\n }\n\n }\n\n }\n", "file_path": "sqlx-core/src/pool/inner.rs", "rank": 52, "score": 78520.96637459047 }, { "content": " query: &'q str,\n\n args: DB::Arguments,\n\n ) -> BoxFuture<'e, crate::Result<Option<DB::Row>>> {\n\n Box::pin(\n\n async move { <&Pool<DB> as Executor>::fetch_optional(&mut &*self, query, args).await },\n\n )\n\n }\n\n\n\n fn describe<'e, 'q: 'e>(\n\n &'e mut self,\n\n query: &'q str,\n\n ) -> BoxFuture<'e, crate::Result<Describe<Self::Database>>> {\n\n Box::pin(async move { <&Pool<DB> as Executor>::describe(&mut &*self, query).await })\n\n }\n\n}\n\n\n\nimpl<DB> Executor for &'_ Pool<DB>\n\nwhere\n\n DB: Database,\n\n{\n", "file_path": "sqlx-core/src/pool/executor.rs", "rank": 60, "score": 78246.9942999539 }, { "content": "use futures_core::{future::BoxFuture, stream::BoxStream};\n\nuse futures_util::StreamExt;\n\n\n\nuse crate::{describe::Describe, executor::Executor, pool::Pool, Database};\n\n\n\nimpl<DB> Executor for Pool<DB>\n\nwhere\n\n DB: Database,\n\n{\n\n type Database = DB;\n\n\n\n fn send<'e, 'q: 'e>(&'e mut self, commands: &'q str) -> BoxFuture<'e, crate::Result<()>> {\n\n Box::pin(async move { <&Pool<DB> as Executor>::send(&mut &*self, commands).await })\n\n }\n\n\n\n fn execute<'e, 'q: 'e>(\n\n &'e mut self,\n\n query: &'q str,\n\n args: DB::Arguments,\n\n ) -> BoxFuture<'e, crate::Result<u64>> {\n", "file_path": "sqlx-core/src/pool/executor.rs", "rank": 61, "score": 78245.99291816082 }, { "content": " Box::pin(async move { <&Pool<DB> as Executor>::execute(&mut &*self, query, args).await })\n\n }\n\n\n\n fn fetch<'e, 'q: 'e>(\n\n &'e mut self,\n\n query: &'q str,\n\n args: DB::Arguments,\n\n ) -> BoxStream<'e, crate::Result<DB::Row>> {\n\n Box::pin(async_stream::try_stream! {\n\n let mut self_ = &*self;\n\n let mut s = <&Pool<DB> as Executor>::fetch(&mut self_, query, args);\n\n\n\n while let Some(row) = s.next().await.transpose()? {\n\n yield row;\n\n }\n\n })\n\n }\n\n\n\n fn fetch_optional<'e, 'q: 'e>(\n\n &'e mut self,\n", "file_path": "sqlx-core/src/pool/executor.rs", "rank": 62, "score": 78243.99946713085 }, { "content": " let mut live = self.acquire().await?;\n\n let mut s = live.fetch(query, args);\n\n\n\n while let Some(row) = s.next().await.transpose()? {\n\n yield row;\n\n }\n\n })\n\n }\n\n\n\n fn fetch_optional<'e, 'q: 'e>(\n\n &'e mut self,\n\n query: &'q str,\n\n args: DB::Arguments,\n\n ) -> BoxFuture<'e, crate::Result<Option<DB::Row>>> {\n\n Box::pin(async move { self.acquire().await?.fetch_optional(query, args).await })\n\n }\n\n\n\n fn describe<'e, 'q: 'e>(\n\n &'e mut self,\n\n query: &'q str,\n\n ) -> BoxFuture<'e, crate::Result<Describe<Self::Database>>> {\n\n Box::pin(async move { self.acquire().await?.describe(query).await })\n\n }\n\n}\n", "file_path": "sqlx-core/src/pool/executor.rs", "rank": 63, "score": 78240.50893550218 }, { "content": " type Database = DB;\n\n\n\n fn send<'e, 'q: 'e>(&'e mut self, commands: &'q str) -> BoxFuture<'e, crate::Result<()>> {\n\n Box::pin(async move { self.acquire().await?.send(commands).await })\n\n }\n\n\n\n fn execute<'e, 'q: 'e>(\n\n &'e mut self,\n\n query: &'q str,\n\n args: DB::Arguments,\n\n ) -> BoxFuture<'e, crate::Result<u64>> {\n\n Box::pin(async move { self.acquire().await?.execute(query, args).await })\n\n }\n\n\n\n fn fetch<'e, 'q: 'e>(\n\n &'e mut self,\n\n query: &'q str,\n\n args: DB::Arguments,\n\n ) -> BoxStream<'e, crate::Result<DB::Row>> {\n\n Box::pin(async_stream::try_stream! {\n", "file_path": "sqlx-core/src/pool/executor.rs", "rank": 64, "score": 78234.71594069643 }, { "content": "#[derive(Debug)]\n\nstruct Account {\n\n id: i32,\n\n name: Option<String>,\n\n}\n\n\n\n#[async_std::test]\n\nasync fn test_query_as() -> sqlx::Result<()> {\n\n let mut conn = sqlx::postgres::connect(&dotenv::var(\"DATABASE_URL\").unwrap()).await?;\n\n\n\n let account = sqlx::query_as!(\n\n Account,\n\n \"SELECT * from (VALUES (1, null)) accounts(id, name)\"\n\n )\n\n .fetch_one(&mut conn)\n\n .await?;\n\n\n\n assert_eq!(None, account.name);\n\n\n\n println!(\"{:?}\", account);\n\n\n", "file_path": "tests/postgres-macros.rs", "rank": 65, "score": 68023.97385790627 }, { "content": "#[derive(serde::Serialize)]\n\nstruct User {\n\n email: String,\n\n token: String,\n\n username: String,\n\n}\n\n\n\n// Registration\n\n// https://github.com/gothinkster/realworld/tree/master/api#registration\n\n\n\n// #[post(\"/api/users\")]\n\nasync fn register(mut req: Request<PgPool>) -> Response {\n\n #[derive(serde::Deserialize)]\n\n struct RegisterRequestBody {\n\n username: String,\n\n email: String,\n\n password: String,\n\n }\n\n\n\n let body: RegisterRequestBody = req.body_json().await.unwrap();\n\n let hash = hash_password(&body.password).unwrap();\n", "file_path": "examples/realworld-postgres/src/main.rs", "rank": 66, "score": 65849.35593088262 }, { "content": "#[derive(serde::Serialize, serde::Deserialize)]\n\nstruct TokenClaims {\n\n sub: i64,\n\n exp: i64,\n\n}\n\n\n", "file_path": "examples/realworld-postgres/src/main.rs", "rank": 67, "score": 64847.05857552183 }, { "content": "#[derive(Debug)]\n\nstruct PublicKey {\n\n n: BigUint,\n\n e: BigUint,\n\n}\n\n\n", "file_path": "sqlx-core/src/mysql/rsa.rs", "rank": 68, "score": 64847.05857552183 }, { "content": "#[test]\n\nfn test_tycons_ext() {\n\n if false {\n\n let _: u64 = TyCons::new(&Some(5u64)).ty_cons();\n\n let _: u64 = TyCons::new(&Some(&5u64)).ty_cons();\n\n let _: u64 = TyCons::new(&&5u64).ty_cons();\n\n let _: u64 = TyCons::new(&5u64).ty_cons();\n\n }\n\n}\n", "file_path": "src/ty_cons.rs", "rank": 69, "score": 62026.46358407136 }, { "content": "pub trait Buf {\n\n fn advance(&mut self, cnt: usize);\n\n\n\n fn get_uint<T: ByteOrder>(&mut self, n: usize) -> io::Result<u64>;\n\n\n\n fn get_u8(&mut self) -> io::Result<u8>;\n\n\n\n fn get_u16<T: ByteOrder>(&mut self) -> io::Result<u16>;\n\n\n\n fn get_i16<T: ByteOrder>(&mut self) -> io::Result<i16>;\n\n\n\n fn get_u24<T: ByteOrder>(&mut self) -> io::Result<u32>;\n\n\n\n fn get_i32<T: ByteOrder>(&mut self) -> io::Result<i32>;\n\n\n\n fn get_u32<T: ByteOrder>(&mut self) -> io::Result<u32>;\n\n\n\n fn get_u64<T: ByteOrder>(&mut self) -> io::Result<u64>;\n\n\n\n fn get_str(&mut self, len: usize) -> io::Result<&str>;\n", "file_path": "sqlx-core/src/io/buf.rs", "rank": 70, "score": 59620.91861136071 }, { "content": "pub trait ToBuf {\n\n fn to_buf(&self) -> &[u8];\n\n}\n\n\n\nimpl ToBuf for [u8] {\n\n fn to_buf(&self) -> &[u8] {\n\n self\n\n }\n\n}\n\n\n\nimpl ToBuf for u8 {\n\n fn to_buf(&self) -> &[u8] {\n\n slice::from_ref(self)\n\n }\n\n}\n", "file_path": "sqlx-core/src/io/buf.rs", "rank": 71, "score": 59620.91861136071 }, { "content": "/// Information about how a database stores metadata about given SQL types.\n\npub trait HasTypeMetadata {\n\n /// The actual type used to represent metadata.\n\n type TypeMetadata: PartialEq<Self::TypeId>;\n\n\n\n /// The Rust type of table identifiers.\n\n type TableId: Display;\n\n\n\n /// The Rust type of type identifiers.\n\n type TypeId: Display;\n\n}\n\n\n", "file_path": "sqlx-core/src/types.rs", "rank": 72, "score": 59620.91861136071 }, { "content": "#[test]\n\nfn test_encode_date() {\n\n let mut buf = Vec::new();\n\n\n\n let date = NaiveDate::from_ymd(2000, 1, 1);\n\n Encode::<Postgres>::encode(&date, &mut buf);\n\n assert_eq!(buf, [0; 4]);\n\n buf.clear();\n\n\n\n let date2 = NaiveDate::from_ymd(2001, 1, 1);\n\n Encode::<Postgres>::encode(&date2, &mut buf);\n\n // 2000 was a leap year\n\n assert_eq!(buf, 366i32.to_be_bytes());\n\n buf.clear();\n\n\n\n let date3 = NaiveDate::from_ymd(2019, 12, 11);\n\n Encode::<Postgres>::encode(&date3, &mut buf);\n\n assert_eq!(buf, 7284i32.to_be_bytes());\n\n buf.clear();\n\n}\n\n\n", "file_path": "sqlx-core/src/postgres/types/chrono.rs", "rank": 73, "score": 59199.9079513098 }, { "content": "#[test]\n\nfn test_decode_datetime() {\n\n let buf = [0u8; 8];\n\n let date: NaiveDateTime = Decode::<Postgres>::decode(&buf).unwrap();\n\n assert_eq!(date.to_string(), \"2000-01-01 00:00:00\");\n\n\n\n let buf = 3_600_000_000i64.to_be_bytes();\n\n let date: NaiveDateTime = Decode::<Postgres>::decode(&buf).unwrap();\n\n assert_eq!(date.to_string(), \"2000-01-01 01:00:00\");\n\n\n\n let buf = 629_377_265_000_000i64.to_be_bytes();\n\n let date: NaiveDateTime = Decode::<Postgres>::decode(&buf).unwrap();\n\n assert_eq!(date.to_string(), \"2019-12-11 11:01:05\");\n\n}\n\n\n", "file_path": "sqlx-core/src/postgres/types/chrono.rs", "rank": 74, "score": 59199.9079513098 }, { "content": "fn scramble_sha1(\n\n password: &str,\n\n seed: &[u8],\n\n) -> GenericArray<u8, <Sha1 as FixedOutput>::OutputSize> {\n\n // SHA1( password ) ^ SHA1( seed + SHA1( SHA1( password ) ) )\n\n // https://mariadb.com/kb/en/connection/#mysql_native_password-plugin\n\n\n\n let mut ctx = Sha1::new();\n\n\n\n ctx.input(password);\n\n\n\n let mut pw_hash = ctx.result_reset();\n\n\n\n ctx.input(&pw_hash);\n\n\n\n let pw_hash_hash = ctx.result_reset();\n\n\n\n ctx.input(seed);\n\n ctx.input(pw_hash_hash);\n\n\n\n let pw_seed_hash_hash = ctx.result();\n\n\n\n xor_eq(&mut pw_hash, &pw_seed_hash_hash);\n\n\n\n pw_hash\n\n}\n\n\n", "file_path": "sqlx-core/src/mysql/protocol/auth_plugin.rs", "rank": 75, "score": 59199.9079513098 }, { "content": "#[test]\n\nfn test_encode_date() {\n\n let mut buf = Vec::new();\n\n let date: NaiveDate = \"2010-10-17\".parse().unwrap();\n\n Encode::<MySql>::encode(&date, &mut buf);\n\n assert_eq!(*buf, [4, 218, 7, 10, 17]);\n\n}\n\n\n", "file_path": "sqlx-core/src/mysql/types/chrono.rs", "rank": 76, "score": 59199.9079513098 }, { "content": "#[test]\n\nfn test_encode_datetime() {\n\n let mut buf = Vec::new();\n\n\n\n let date = postgres_epoch();\n\n Encode::<Postgres>::encode(&date, &mut buf);\n\n assert_eq!(buf, [0; 8]);\n\n buf.clear();\n\n\n\n // one hour past epoch\n\n let date2 = postgres_epoch() + Duration::hours(1);\n\n Encode::<Postgres>::encode(&date2, &mut buf);\n\n assert_eq!(buf, 3_600_000_000i64.to_be_bytes());\n\n buf.clear();\n\n\n\n // some random date\n\n let date3: NaiveDateTime = \"2019-12-11T11:01:05\".parse().unwrap();\n\n let expected = dbg!((date3 - postgres_epoch().naive_utc())\n\n .num_microseconds()\n\n .unwrap());\n\n Encode::<Postgres>::encode(&date3, &mut buf);\n\n assert_eq!(buf, expected.to_be_bytes());\n\n buf.clear();\n\n}\n\n\n", "file_path": "sqlx-core/src/postgres/types/chrono.rs", "rank": 77, "score": 59199.9079513098 }, { "content": "#[test]\n\nfn test_decode_date() {\n\n let buf = [0; 4];\n\n let date: NaiveDate = Decode::<Postgres>::decode(&buf).unwrap();\n\n assert_eq!(date.to_string(), \"2000-01-01\");\n\n\n\n let buf = 366i32.to_be_bytes();\n\n let date: NaiveDate = Decode::<Postgres>::decode(&buf).unwrap();\n\n assert_eq!(date.to_string(), \"2001-01-01\");\n\n\n\n let buf = 7284i32.to_be_bytes();\n\n let date: NaiveDate = Decode::<Postgres>::decode(&buf).unwrap();\n\n assert_eq!(date.to_string(), \"2019-12-11\");\n\n}\n", "file_path": "sqlx-core/src/postgres/types/chrono.rs", "rank": 78, "score": 59199.9079513098 }, { "content": "fn scramble_sha256(\n\n password: &str,\n\n seed: &[u8],\n\n) -> GenericArray<u8, <Sha256 as FixedOutput>::OutputSize> {\n\n // XOR(SHA256(password), SHA256(seed, SHA256(SHA256(password))))\n\n // https://mariadb.com/kb/en/caching_sha2_password-authentication-plugin/#sha-2-encrypted-password\n\n let mut ctx = Sha256::new();\n\n\n\n ctx.input(password);\n\n\n\n let mut pw_hash = ctx.result_reset();\n\n\n\n ctx.input(&pw_hash);\n\n\n\n let pw_hash_hash = ctx.result_reset();\n\n\n\n ctx.input(seed);\n\n ctx.input(pw_hash_hash);\n\n\n\n let pw_seed_hash_hash = ctx.result();\n\n\n\n xor_eq(&mut pw_hash, &pw_seed_hash_hash);\n\n\n\n pw_hash\n\n}\n", "file_path": "sqlx-core/src/mysql/protocol/auth_plugin.rs", "rank": 79, "score": 59199.9079513098 }, { "content": "#[test]\n\nfn test_decode_date() {\n\n let buf = [4, 218, 7, 10, 17];\n\n let date = <NaiveDate as Decode<MySql>>::decode(&buf).unwrap();\n\n assert_eq!(date.to_string(), \"2010-10-17\");\n\n}\n", "file_path": "sqlx-core/src/mysql/types/chrono.rs", "rank": 80, "score": 59199.9079513098 }, { "content": "pub trait Decode {\n\n fn decode(buf: &[u8]) -> crate::Result<Self>\n\n where\n\n Self: Sized;\n\n}\n", "file_path": "sqlx-core/src/postgres/protocol/decode.rs", "rank": 81, "score": 58677.78084387117 }, { "content": "pub trait Encode {\n\n fn encode(&self, buf: &mut Vec<u8>);\n\n}\n", "file_path": "sqlx-core/src/postgres/protocol/encode.rs", "rank": 82, "score": 58677.78084387117 }, { "content": "pub trait Encode {\n\n fn encode(&self, buf: &mut Vec<u8>, capabilities: Capabilities);\n\n}\n\n\n\nimpl Encode for &'_ [u8] {\n\n fn encode(&self, buf: &mut Vec<u8>, _: Capabilities) {\n\n buf.put_bytes(self);\n\n }\n\n}\n", "file_path": "sqlx-core/src/mysql/protocol/encode.rs", "rank": 83, "score": 58677.78084387117 }, { "content": "pub trait Decode {\n\n fn decode(buf: &[u8]) -> crate::Result<Self>\n\n where\n\n Self: Sized;\n\n}\n", "file_path": "sqlx-core/src/mysql/protocol/decode.rs", "rank": 84, "score": 58677.78084387117 }, { "content": "#[test]\n\nfn test_decode_date_time() {\n\n // test values from https://dev.mysql.com/doc/internals/en/binary-protocol-value.html\n\n let buf = [11, 218, 7, 10, 17, 19, 27, 30, 1, 0, 0, 0];\n\n let date1 = <NaiveDateTime as Decode<MySql>>::decode(&buf).unwrap();\n\n assert_eq!(date1.to_string(), \"2010-10-17 19:27:30.000001\");\n\n\n\n let buf = [7, 218, 7, 10, 17, 19, 27, 30];\n\n let date2 = <NaiveDateTime as Decode<MySql>>::decode(&buf).unwrap();\n\n assert_eq!(date2.to_string(), \"2010-10-17 19:27:30\");\n\n\n\n let buf = [4, 218, 7, 10, 17];\n\n let date3 = <NaiveDateTime as Decode<MySql>>::decode(&buf).unwrap();\n\n assert_eq!(date3.to_string(), \"2010-10-17 00:00:00\");\n\n}\n\n\n", "file_path": "sqlx-core/src/mysql/types/chrono.rs", "rank": 85, "score": 58348.82032568868 }, { "content": "#[test]\n\nfn test_encode_date_time() {\n\n let mut buf = Vec::new();\n\n\n\n // test values from https://dev.mysql.com/doc/internals/en/binary-protocol-value.html\n\n let date1: NaiveDateTime = \"2010-10-17T19:27:30.000001\".parse().unwrap();\n\n Encode::<MySql>::encode(&date1, &mut buf);\n\n assert_eq!(*buf, [11, 218, 7, 10, 17, 19, 27, 30, 1, 0, 0, 0]);\n\n\n\n buf.clear();\n\n\n\n let date2: NaiveDateTime = \"2010-10-17T19:27:30\".parse().unwrap();\n\n Encode::<MySql>::encode(&date2, &mut buf);\n\n assert_eq!(*buf, [7, 218, 7, 10, 17, 19, 27, 30]);\n\n\n\n buf.clear();\n\n\n\n let date3: NaiveDateTime = \"2010-10-17T00:00:00\".parse().unwrap();\n\n Encode::<MySql>::encode(&date3, &mut buf);\n\n assert_eq!(*buf, [4, 218, 7, 10, 17]);\n\n}\n\n\n", "file_path": "sqlx-core/src/mysql/types/chrono.rs", "rank": 86, "score": 58348.82032568868 }, { "content": "pub trait BufMut {\n\n fn advance(&mut self, cnt: usize);\n\n\n\n fn put_u8(&mut self, val: u8);\n\n\n\n fn put_u16<T: ByteOrder>(&mut self, val: u16);\n\n\n\n fn put_i16<T: ByteOrder>(&mut self, val: i16);\n\n\n\n fn put_u24<T: ByteOrder>(&mut self, val: u32);\n\n\n\n fn put_i32<T: ByteOrder>(&mut self, val: i32);\n\n\n\n fn put_u32<T: ByteOrder>(&mut self, val: u32);\n\n\n\n fn put_u64<T: ByteOrder>(&mut self, val: u64);\n\n\n\n fn put_bytes(&mut self, val: &[u8]);\n\n\n\n fn put_str(&mut self, val: &str);\n", "file_path": "sqlx-core/src/io/buf_mut.rs", "rank": 87, "score": 57781.36733275268 }, { "content": "/// A **record** that can be built from a row returned from by the database.\n\npub trait FromRow<R>\n\nwhere\n\n R: Row,\n\n{\n\n fn from_row(row: R) -> Self;\n\n}\n\n\n\n#[allow(unused_macros)]\n\nmacro_rules! impl_from_row_for_row {\n\n ($R:ty) => {\n\n impl crate::row::FromRow<$R> for $R {\n\n #[inline]\n\n fn from_row(row: $R) -> Self {\n\n row\n\n }\n\n }\n\n };\n\n}\n", "file_path": "sqlx-core/src/row.rs", "rank": 88, "score": 57663.675814047696 }, { "content": "pub trait BufExt {\n\n fn get_uint_lenenc<T: ByteOrder>(&mut self) -> io::Result<Option<u64>>;\n\n\n\n fn get_str_lenenc<T: ByteOrder>(&mut self) -> io::Result<Option<&str>>;\n\n\n\n fn get_bytes_lenenc<T: ByteOrder>(&mut self) -> io::Result<Option<&[u8]>>;\n\n}\n\n\n\nimpl BufExt for &'_ [u8] {\n\n fn get_uint_lenenc<T: ByteOrder>(&mut self) -> io::Result<Option<u64>> {\n\n Ok(match self.get_u8()? {\n\n 0xFB => None,\n\n 0xFC => Some(u64::from(self.get_u16::<T>()?)),\n\n 0xFD => Some(u64::from(self.get_u24::<T>()?)),\n\n 0xFE => Some(self.get_u64::<T>()?),\n\n\n\n value => Some(u64::from(value)),\n\n })\n\n }\n\n\n", "file_path": "sqlx-core/src/mysql/io/buf_ext.rs", "rank": 89, "score": 56928.28983739104 }, { "content": "#[doc(hidden)]\n\npub trait TyConsExt: Sized {\n\n type Cons;\n\n fn ty_cons(self) -> Self::Cons {\n\n panic!(\"should not be run, only for type resolution\")\n\n }\n\n}\n\n\n\nimpl<T> TyCons<Option<&'_ T>> {\n\n pub fn ty_cons(self) -> T {\n\n panic!(\"should not be run, only for type resolution\")\n\n }\n\n}\n\n\n\n// no overlap with the following impls because of the `: Sized` bound\n\nimpl<T: Sized> TyConsExt for TyCons<&'_ T> {\n\n type Cons = T;\n\n}\n\n\n\nimpl TyConsExt for TyCons<&'_ str> {\n\n type Cons = String;\n", "file_path": "src/ty_cons.rs", "rank": 90, "score": 56720.53804655816 }, { "content": "fn url() -> anyhow::Result<String> {\n\n Ok(dotenv::var(\"DATABASE_URL\")?)\n\n}\n\n\n\nasync fn connect() -> anyhow::Result<MySqlConnection> {\n\n Ok(MySqlConnection::open(url()?).await?)\n\n}\n", "file_path": "tests/mysql.rs", "rank": 91, "score": 56264.35190248145 }, { "content": "pub trait BufMutExt {\n\n fn put_uint_lenenc<T: ByteOrder, U: Into<Option<u64>>>(&mut self, val: U);\n\n\n\n fn put_str_lenenc<T: ByteOrder>(&mut self, val: &str);\n\n\n\n fn put_bytes_lenenc<T: ByteOrder>(&mut self, val: &[u8]);\n\n}\n\n\n\nimpl BufMutExt for Vec<u8> {\n\n fn put_uint_lenenc<T: ByteOrder, U: Into<Option<u64>>>(&mut self, value: U) {\n\n if let Some(value) = value.into() {\n\n // https://mariadb.com/kb/en/library/protocol-data-types/#length-encoded-integers\n\n if value > 0xFF_FF_FF {\n\n // Integer value is encoded in the next 8 bytes (9 bytes total)\n\n self.push(0xFE);\n\n self.put_u64::<T>(value);\n\n } else if value > u64::from(u16::MAX) {\n\n // Integer value is encoded in the next 3 bytes (4 bytes total)\n\n self.push(0xFD);\n\n self.put_u24::<T>(value as u32);\n", "file_path": "sqlx-core/src/mysql/io/buf_mut_ext.rs", "rank": 92, "score": 55340.15239939925 }, { "content": "pub trait ResultExt<T>: Sized {\n\n fn try_unwrap_optional(self) -> crate::Result<T>;\n\n}\n\n\n\nimpl<T> ResultExt<T> for crate::Result<T> {\n\n fn try_unwrap_optional(self) -> crate::Result<T> {\n\n self\n\n }\n\n}\n\n\n\nimpl<T> ResultExt<Option<T>> for crate::Result<T> {\n\n fn try_unwrap_optional(self) -> crate::Result<Option<T>> {\n\n match self {\n\n Ok(val) => Ok(Some(val)),\n\n Err(Error::Decode(DecodeError::UnexpectedNull)) => Ok(None),\n\n Err(e) => Err(e),\n\n }\n\n }\n\n}\n", "file_path": "src/result_ext.rs", "rank": 93, "score": 55076.69821367183 }, { "content": "fn generate_random_salt() -> [u8; 16] {\n\n let mut salt = [0; 16];\n\n thread_rng().fill_bytes(&mut salt);\n\n\n\n salt\n\n}\n\n\n", "file_path": "examples/realworld-postgres/src/main.rs", "rank": 94, "score": 55024.18165110335 }, { "content": "pub trait RowIndex<R: ?Sized>\n\nwhere\n\n R: Row,\n\n{\n\n fn try_get<T>(&self, row: &R) -> crate::Result<T>\n\n where\n\n R::Database: HasSqlType<T>,\n\n T: Decode<R::Database>;\n\n}\n\n\n", "file_path": "sqlx-core/src/row.rs", "rank": 95, "score": 54180.284702553334 }, { "content": "/// A database driver.\n\n///\n\n/// This trait encapsulates a complete driver implementation to a specific\n\n/// database (e.g., MySQL, Postgres).\n\npub trait Database: HasTypeMetadata + 'static {\n\n /// The concrete `Connection` implementation for this database.\n\n type Connection: Connection<Database = Self>;\n\n\n\n /// The concrete `Arguments` implementation for this database.\n\n type Arguments: Arguments<Database = Self>;\n\n\n\n /// The concrete `Row` implementation for this database.\n\n type Row: Row<Database = Self>;\n\n}\n", "file_path": "sqlx-core/src/database.rs", "rank": 96, "score": 54180.284702553334 }, { "content": "fn postgres_epoch() -> DateTime<Utc> {\n\n Utc.ymd(2000, 1, 1).and_hms(0, 0, 0)\n\n}\n\n\n", "file_path": "sqlx-core/src/postgres/types/chrono.rs", "rank": 97, "score": 53362.18011645509 }, { "content": "type Error = Box<dyn std::error::Error>;\n\n\n", "file_path": "sqlx-macros/src/lib.rs", "rank": 98, "score": 52945.68110441865 }, { "content": "/// Represents a single row of the result set.\n\npub trait Row: Unpin + Send + 'static {\n\n type Database: Database + ?Sized;\n\n\n\n /// Returns `true` if the row contains no values.\n\n fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n\n\n /// Returns the number of values in the row.\n\n fn len(&self) -> usize;\n\n\n\n /// Returns the value at the `index`; can either be an integer ordinal or a column name.\n\n fn get<T, I>(&self, index: I) -> T\n\n where\n\n Self::Database: HasSqlType<T>,\n\n I: RowIndex<Self>,\n\n T: Decode<Self::Database>;\n\n}\n\n\n", "file_path": "sqlx-core/src/row.rs", "rank": 99, "score": 52780.152464828716 } ]
Rust
src/subcommand/launch.rs
chipsenkbeil/distant
c6c07c5c2ce6ef5c1499c08ce077a14c2c558716
use crate::{ exit::{ExitCode, ExitCodeError}, msg::{MsgReceiver, MsgSender}, opt::{CommonOpt, Format, LaunchSubcommand, SessionOutput}, session::CliSession, utils, }; use derive_more::{Display, Error, From}; use distant_core::{ PlainCodec, RelayServer, Session, SessionInfo, SessionInfoFile, Transport, TransportListener, XChaCha20Poly1305Codec, }; use log::*; use std::{path::Path, string::FromUtf8Error}; use tokio::{io, process::Command, runtime::Runtime, time::Duration}; #[derive(Debug, Display, Error, From)] pub enum Error { #[display(fmt = "Missing data for session")] MissingSessionData, Fork(#[error(not(source))] i32), Io(io::Error), Utf8(FromUtf8Error), } impl ExitCodeError for Error { fn to_exit_code(&self) -> ExitCode { match self { Self::MissingSessionData => ExitCode::NoInput, Self::Fork(_) => ExitCode::OsErr, Self::Io(x) => x.to_exit_code(), Self::Utf8(_) => ExitCode::DataErr, } } } pub fn run(cmd: LaunchSubcommand, opt: CommonOpt) -> Result<(), Error> { let rt = Runtime::new()?; let session_output = cmd.session; let format = cmd.format; let is_daemon = !cmd.foreground; let session_file = cmd.session_data.session_file.clone(); let session_socket = cmd.session_data.session_socket.clone(); let fail_if_socket_exists = cmd.fail_if_socket_exists; let timeout = opt.to_timeout_duration(); let shutdown_after = cmd.to_shutdown_after_duration(); let session = rt.block_on(async { spawn_remote_server(cmd, opt).await })?; match session_output { SessionOutput::File => { debug!("Outputting session to {:?}", session_file); rt.block_on(async { SessionInfoFile::new(session_file, session).save().await })? } SessionOutput::Keep => { debug!("Entering interactive loop over stdin"); rt.block_on(async { keep_loop(session, format, timeout).await })? } SessionOutput::Pipe => { debug!("Piping session to stdout"); println!("{}", session.to_unprotected_string()) } #[cfg(unix)] SessionOutput::Socket if is_daemon => { debug!( "Forking and entering interactive loop over unix socket {:?}", session_socket ); drop(rt); run_daemon_socket( session_socket, session, timeout, fail_if_socket_exists, shutdown_after, )?; } #[cfg(unix)] SessionOutput::Socket => { debug!( "Entering interactive loop over unix socket {:?}", session_socket ); rt.block_on(async { socket_loop( session_socket, session, timeout, fail_if_socket_exists, shutdown_after, ) .await })? } } Ok(()) } #[cfg(unix)] fn run_daemon_socket( session_socket: impl AsRef<Path>, session: SessionInfo, timeout: Duration, fail_if_socket_exists: bool, shutdown_after: Option<Duration>, ) -> Result<(), Error> { use fork::{daemon, Fork}; match daemon(false, false) { Ok(Fork::Child) => { let rt = Runtime::new()?; rt.block_on(async { socket_loop( session_socket, session, timeout, fail_if_socket_exists, shutdown_after, ) .await })? } Ok(_) => {} Err(x) => return Err(Error::Fork(x)), } Ok(()) } async fn keep_loop(info: SessionInfo, format: Format, duration: Duration) -> io::Result<()> { let addr = info.to_socket_addr().await?; let codec = XChaCha20Poly1305Codec::from(info.key); match Session::tcp_connect_timeout(addr, codec, duration).await { Ok(session) => { let cli_session = CliSession::new_for_stdin(utils::new_tenant(), session, format); cli_session.wait().await } Err(x) => Err(x), } } #[cfg(unix)] async fn socket_loop( socket_path: impl AsRef<Path>, info: SessionInfo, duration: Duration, fail_if_socket_exists: bool, shutdown_after: Option<Duration>, ) -> io::Result<()> { debug!("Connecting to {} {}", info.host, info.port); let addr = info.to_socket_addr().await?; let codec = XChaCha20Poly1305Codec::from(info.key); let session = Session::tcp_connect_timeout(addr, codec, duration).await?; if !fail_if_socket_exists && socket_path.as_ref().exists() { debug!("Removing old unix socket instance"); tokio::fs::remove_file(socket_path.as_ref()).await?; } debug!("Binding to unix socket: {:?}", socket_path.as_ref()); let listener = tokio::net::UnixListener::bind(socket_path)?; let stream = TransportListener::initialize(listener, |stream| Transport::new(stream, PlainCodec::new())) .into_stream(); let server = RelayServer::initialize(session, Box::pin(stream), shutdown_after)?; server .wait() .await .map_err(|x| io::Error::new(io::ErrorKind::Other, x)) } async fn spawn_remote_server(cmd: LaunchSubcommand, opt: CommonOpt) -> Result<SessionInfo, Error> { #[cfg(feature = "ssh2")] if cmd.external_ssh { external_spawn_remote_server(cmd, opt).await } else { native_spawn_remote_server(cmd, opt).await } #[cfg(not(feature = "ssh2"))] external_spawn_remote_server(cmd, opt).await } #[cfg(feature = "ssh2")] async fn native_spawn_remote_server( cmd: LaunchSubcommand, _opt: CommonOpt, ) -> Result<SessionInfo, Error> { trace!("native_spawn_remote_server({:?})", cmd); use distant_ssh2::{ IntoDistantSessionOpts, Ssh2AuthEvent, Ssh2AuthHandler, Ssh2Session, Ssh2SessionOpts, }; let host = cmd.host; let mut opts = Ssh2SessionOpts::default(); if let Some(path) = cmd.identity_file { opts.identity_files.push(path); } opts.port = Some(cmd.port); opts.user = Some(cmd.username); debug!("Connecting to {} {:#?}", host, opts); let mut ssh_session = Ssh2Session::connect(host.as_str(), opts)?; #[derive(Debug, serde::Serialize, serde::Deserialize)] #[serde(tag = "type")] enum SshMsg { #[serde(rename = "ssh_authenticate")] Authenticate(Ssh2AuthEvent), #[serde(rename = "ssh_authenticate_answer")] AuthenticateAnswer { answers: Vec<String> }, #[serde(rename = "ssh_banner")] Banner { text: String }, #[serde(rename = "ssh_host_verify")] HostVerify { host: String }, #[serde(rename = "ssh_host_verify_answer")] HostVerifyAnswer { answer: bool }, #[serde(rename = "ssh_error")] Error { msg: String }, } debug!("Authenticating against {}", host); ssh_session .authenticate(match cmd.format { Format::Shell => Ssh2AuthHandler::default(), Format::Json => { let tx = MsgSender::from_stdout(); let tx_2 = tx.clone(); let tx_3 = tx.clone(); let tx_4 = tx.clone(); let rx = MsgReceiver::from_stdin(); let rx_2 = rx.clone(); Ssh2AuthHandler { on_authenticate: Box::new(move |ev| { let _ = tx.send_blocking(&SshMsg::Authenticate(ev)); let msg: SshMsg = rx.recv_blocking()?; match msg { SshMsg::AuthenticateAnswer { answers } => Ok(answers), x => { return Err(io::Error::new( io::ErrorKind::InvalidInput, format!("Invalid response received: {:?}", x), )) } } }), on_banner: Box::new(move |banner| { let _ = tx_2.send_blocking(&SshMsg::Banner { text: banner.to_string(), }); }), on_host_verify: Box::new(move |host| { let _ = tx_3.send_blocking(&SshMsg::HostVerify { host: host.to_string(), })?; let msg: SshMsg = rx_2.recv_blocking()?; match msg { SshMsg::HostVerifyAnswer { answer } => Ok(answer), x => { return Err(io::Error::new( io::ErrorKind::InvalidInput, format!("Invalid response received: {:?}", x), )) } } }), on_error: Box::new(move |err| { let _ = tx_4.send_blocking(&SshMsg::Error { msg: err.to_string(), }); }), } } }) .await?; debug!("Mapping session for {}", host); let session_info = ssh_session .into_distant_session_info(IntoDistantSessionOpts { binary: cmd.distant, args: cmd.extra_server_args.unwrap_or_default(), ..Default::default() }) .await?; Ok(session_info) } async fn external_spawn_remote_server( cmd: LaunchSubcommand, _opt: CommonOpt, ) -> Result<SessionInfo, Error> { let distant_command = format!( "{} listen --host {} {}", cmd.distant, cmd.bind_server, cmd.extra_server_args.unwrap_or_default(), ); let ssh_command = format!( "{} -o StrictHostKeyChecking=no ssh://{}@{}:{} {} '{}'", cmd.ssh, cmd.username, cmd.host.as_str(), cmd.port, cmd.identity_file .map(|f| format!("-i {}", f.as_path().display())) .unwrap_or_default(), if cmd.no_shell { distant_command.trim().to_string() } else { format!("echo {} | $SHELL -l", distant_command.trim()) }, ); let out = Command::new("sh") .arg("-c") .arg(ssh_command) .output() .await?; if !out.status.success() { return Err(Error::from(io::Error::new( io::ErrorKind::Other, String::from_utf8(out.stderr)?.trim().to_string(), ))); } let out = String::from_utf8(out.stdout)?.trim().to_string(); let mut info = out .lines() .find_map(|line| line.parse::<SessionInfo>().ok()) .ok_or(Error::MissingSessionData)?; info.host = cmd.host; Ok(info) }
use crate::{ exit::{ExitCode, ExitCodeError}, msg::{MsgReceiver, MsgSender}, opt::{CommonOpt, Format, LaunchSubcommand, SessionOutput}, session::CliSession, utils, }; use derive_more::{Display, Error, From}; use distant_core::{ PlainCodec, RelayServer, Session, SessionInfo, SessionInfoFile, Transport, TransportListener, XChaCha20Poly1305Codec, }; use log::*; use std::{path::Path, string::FromUtf8Error}; use tokio::{io, process::Command, runtime::Runtime, time::Duration}; #[derive(Debug, Display, Error, From)] pub enum Error { #[display(fmt = "Missing data for session")] MissingSessionData, Fork(#[error(not(source))] i32), Io(io::Error), Utf8(FromUtf8Error), } impl ExitCodeError for Error { fn to_exit_code(&self) -> ExitCode { match self { Self::MissingSessionData => ExitCode::NoInput, Self::Fork(_) => ExitCode::OsErr, Self::Io(x) => x.to_exit_code(), Self::Utf8(_) => ExitCode::DataErr, } } } pub fn run(cmd: LaunchSubcommand, opt: CommonOpt) -> Result<(), Error> { let rt = Runtime::new()?; let session_output = cmd.session; let format = cmd.format; let is_daemon = !cmd.foreground; let session_file = cmd.session_data.session_file.clone(); let session_socket = cmd.session_data.session_socket.clone(); let fail_if_socket_exists = cmd.fail_if_socket_exists; let timeout = opt.to_timeout_duration(); let shutdown_after = cmd.to_shutdown_after_duration(); let session = rt.block_on(async { spawn_remote_server(cmd, opt).await })?; match session_output { SessionOutput::File => { debug!("Outputting session to {:?}", session_file); rt.block_on(async { SessionInfoFile::new(session_file, session).save().await })? } SessionOutput::Keep => { debug!("Entering interactive loop over stdin"); rt.block_on(async { keep_loop(session, format, timeout).await })? } SessionOutput::Pipe => { debug!("Piping session to stdout"); println!("{}", session.to_unprotected_string()) } #[cfg(unix)] SessionOutput::Socket if is_daemon => { debug!( "Forking and entering interactive loop over unix socket {:?}", session_socket ); drop(rt); run_daemon_socket( session_socket, session, timeout, fail_if_socket_exists, shutdown_after, )?; } #[cfg(unix)] SessionOutput::Socket => { debug!( "Entering interactive loop over unix socket {:?}", session_socket ); rt.block_on(async { socket_loop( session_socket, session, timeout, fail_if_socket_exists, shutdown_after, ) .await })? } } Ok(()) } #[cfg(unix)] fn run_daemon_socket( session_socket: impl AsRef<Path>, session: SessionInfo, timeout: Duration, fail_if_socket_exists: bool, shutdown_after: Option<Duration>, ) -> Result<(), Error> { use fork::{daemon, Fork}; match daemon(false, false) { Ok(Fork::Child) => { let rt = Runtime::new()?; rt.block_on(async { socket_loop( session_socket, session, timeout, fail_if_socket_exists, shutdown_after, ) .await })? } Ok(_) => {} Err(x) => return Err(Error::Fork(x)), } Ok(()) }
#[cfg(unix)] async fn socket_loop( socket_path: impl AsRef<Path>, info: SessionInfo, duration: Duration, fail_if_socket_exists: bool, shutdown_after: Option<Duration>, ) -> io::Result<()> { debug!("Connecting to {} {}", info.host, info.port); let addr = info.to_socket_addr().await?; let codec = XChaCha20Poly1305Codec::from(info.key); let session = Session::tcp_connect_timeout(addr, codec, duration).await?; if !fail_if_socket_exists && socket_path.as_ref().exists() { debug!("Removing old unix socket instance"); tokio::fs::remove_file(socket_path.as_ref()).await?; } debug!("Binding to unix socket: {:?}", socket_path.as_ref()); let listener = tokio::net::UnixListener::bind(socket_path)?; let stream = TransportListener::initialize(listener, |stream| Transport::new(stream, PlainCodec::new())) .into_stream(); let server = RelayServer::initialize(session, Box::pin(stream), shutdown_after)?; server .wait() .await .map_err(|x| io::Error::new(io::ErrorKind::Other, x)) } async fn spawn_remote_server(cmd: LaunchSubcommand, opt: CommonOpt) -> Result<SessionInfo, Error> { #[cfg(feature = "ssh2")] if cmd.external_ssh { external_spawn_remote_server(cmd, opt).await } else { native_spawn_remote_server(cmd, opt).await } #[cfg(not(feature = "ssh2"))] external_spawn_remote_server(cmd, opt).await } #[cfg(feature = "ssh2")] async fn native_spawn_remote_server( cmd: LaunchSubcommand, _opt: CommonOpt, ) -> Result<SessionInfo, Error> { trace!("native_spawn_remote_server({:?})", cmd); use distant_ssh2::{ IntoDistantSessionOpts, Ssh2AuthEvent, Ssh2AuthHandler, Ssh2Session, Ssh2SessionOpts, }; let host = cmd.host; let mut opts = Ssh2SessionOpts::default(); if let Some(path) = cmd.identity_file { opts.identity_files.push(path); } opts.port = Some(cmd.port); opts.user = Some(cmd.username); debug!("Connecting to {} {:#?}", host, opts); let mut ssh_session = Ssh2Session::connect(host.as_str(), opts)?; #[derive(Debug, serde::Serialize, serde::Deserialize)] #[serde(tag = "type")] enum SshMsg { #[serde(rename = "ssh_authenticate")] Authenticate(Ssh2AuthEvent), #[serde(rename = "ssh_authenticate_answer")] AuthenticateAnswer { answers: Vec<String> }, #[serde(rename = "ssh_banner")] Banner { text: String }, #[serde(rename = "ssh_host_verify")] HostVerify { host: String }, #[serde(rename = "ssh_host_verify_answer")] HostVerifyAnswer { answer: bool }, #[serde(rename = "ssh_error")] Error { msg: String }, } debug!("Authenticating against {}", host); ssh_session .authenticate(match cmd.format { Format::Shell => Ssh2AuthHandler::default(), Format::Json => { let tx = MsgSender::from_stdout(); let tx_2 = tx.clone(); let tx_3 = tx.clone(); let tx_4 = tx.clone(); let rx = MsgReceiver::from_stdin(); let rx_2 = rx.clone(); Ssh2AuthHandler { on_authenticate: Box::new(move |ev| { let _ = tx.send_blocking(&SshMsg::Authenticate(ev)); let msg: SshMsg = rx.recv_blocking()?; match msg { SshMsg::AuthenticateAnswer { answers } => Ok(answers), x => { return Err(io::Error::new( io::ErrorKind::InvalidInput, format!("Invalid response received: {:?}", x), )) } } }), on_banner: Box::new(move |banner| { let _ = tx_2.send_blocking(&SshMsg::Banner { text: banner.to_string(), }); }), on_host_verify: Box::new(move |host| { let _ = tx_3.send_blocking(&SshMsg::HostVerify { host: host.to_string(), })?; let msg: SshMsg = rx_2.recv_blocking()?; match msg { SshMsg::HostVerifyAnswer { answer } => Ok(answer), x => { return Err(io::Error::new( io::ErrorKind::InvalidInput, format!("Invalid response received: {:?}", x), )) } } }), on_error: Box::new(move |err| { let _ = tx_4.send_blocking(&SshMsg::Error { msg: err.to_string(), }); }), } } }) .await?; debug!("Mapping session for {}", host); let session_info = ssh_session .into_distant_session_info(IntoDistantSessionOpts { binary: cmd.distant, args: cmd.extra_server_args.unwrap_or_default(), ..Default::default() }) .await?; Ok(session_info) } async fn external_spawn_remote_server( cmd: LaunchSubcommand, _opt: CommonOpt, ) -> Result<SessionInfo, Error> { let distant_command = format!( "{} listen --host {} {}", cmd.distant, cmd.bind_server, cmd.extra_server_args.unwrap_or_default(), ); let ssh_command = format!( "{} -o StrictHostKeyChecking=no ssh://{}@{}:{} {} '{}'", cmd.ssh, cmd.username, cmd.host.as_str(), cmd.port, cmd.identity_file .map(|f| format!("-i {}", f.as_path().display())) .unwrap_or_default(), if cmd.no_shell { distant_command.trim().to_string() } else { format!("echo {} | $SHELL -l", distant_command.trim()) }, ); let out = Command::new("sh") .arg("-c") .arg(ssh_command) .output() .await?; if !out.status.success() { return Err(Error::from(io::Error::new( io::ErrorKind::Other, String::from_utf8(out.stderr)?.trim().to_string(), ))); } let out = String::from_utf8(out.stdout)?.trim().to_string(); let mut info = out .lines() .find_map(|line| line.parse::<SessionInfo>().ok()) .ok_or(Error::MissingSessionData)?; info.host = cmd.host; Ok(info) }
async fn keep_loop(info: SessionInfo, format: Format, duration: Duration) -> io::Result<()> { let addr = info.to_socket_addr().await?; let codec = XChaCha20Poly1305Codec::from(info.key); match Session::tcp_connect_timeout(addr, codec, duration).await { Ok(session) => { let cli_session = CliSession::new_for_stdin(utils::new_tenant(), session, format); cli_session.wait().await } Err(x) => Err(x), } }
function_block-full_function
[ { "content": "pub fn run(cmd: ActionSubcommand, opt: CommonOpt) -> Result<(), Error> {\n\n let rt = tokio::runtime::Runtime::new()?;\n\n\n\n rt.block_on(async { run_async(cmd, opt).await })\n\n}\n\n\n\nasync fn run_async(cmd: ActionSubcommand, opt: CommonOpt) -> Result<(), Error> {\n\n let method = cmd.method;\n\n let ssh_connection = cmd.ssh_connection.clone();\n\n let session_input = cmd.session;\n\n let timeout = opt.to_timeout_duration();\n\n let session_file = cmd.session_data.session_file.clone();\n\n let session_socket = cmd.session_data.session_socket.clone();\n\n\n\n CommandRunner {\n\n method,\n\n ssh_connection,\n\n session_input,\n\n session_file,\n\n session_socket,\n", "file_path": "src/subcommand/action.rs", "rank": 1, "score": 375794.846526138 }, { "content": "pub fn run(cmd: LspSubcommand, opt: CommonOpt) -> Result<(), Error> {\n\n let rt = tokio::runtime::Runtime::new()?;\n\n\n\n rt.block_on(async { run_async(cmd, opt).await })\n\n}\n\n\n\nasync fn run_async(cmd: LspSubcommand, opt: CommonOpt) -> Result<(), Error> {\n\n let method = cmd.method;\n\n let timeout = opt.to_timeout_duration();\n\n let ssh_connection = cmd.ssh_connection.clone();\n\n let session_input = cmd.session;\n\n let session_file = cmd.session_data.session_file.clone();\n\n let session_socket = cmd.session_data.session_socket.clone();\n\n\n\n CommandRunner {\n\n method,\n\n ssh_connection,\n\n session_input,\n\n session_file,\n\n session_socket,\n", "file_path": "src/subcommand/lsp.rs", "rank": 2, "score": 375794.846526138 }, { "content": "pub fn run(cmd: ListenSubcommand, opt: CommonOpt) -> Result<(), Error> {\n\n if cmd.foreground {\n\n let rt = tokio::runtime::Runtime::new()?;\n\n rt.block_on(async { run_async(cmd, opt, false).await })?;\n\n } else {\n\n run_daemon(cmd, opt)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/subcommand/listen.rs", "rank": 3, "score": 375794.846526138 }, { "content": "#[cfg(unix)]\n\nfn run_daemon(cmd: ListenSubcommand, opt: CommonOpt) -> Result<(), Error> {\n\n use fork::{daemon, Fork};\n\n\n\n // NOTE: We keep the stdin, stdout, stderr open so we can print out the pid with the parent\n\n match daemon(false, true) {\n\n Ok(Fork::Child) => {\n\n let rt = tokio::runtime::Runtime::new()?;\n\n rt.block_on(async { run_async(cmd, opt, true).await })?;\n\n Ok(())\n\n }\n\n Ok(Fork::Parent(pid)) => {\n\n info!(\"[distant detached, pid = {}]\", pid);\n\n if fork::close_fd().is_err() {\n\n Err(Error::Fork)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n Err(_) => Err(Error::Fork),\n\n }\n", "file_path": "src/subcommand/listen.rs", "rank": 4, "score": 311711.21664101793 }, { "content": "fn init_logging(opt: &opt::CommonOpt, is_remote_process: bool) -> flexi_logger::LoggerHandle {\n\n use flexi_logger::{FileSpec, LevelFilter, LogSpecification, Logger};\n\n let modules = &[\"distant\", \"distant_core\"];\n\n\n\n // Disable logging for everything but our binary, which is based on verbosity\n\n let mut builder = LogSpecification::builder();\n\n builder.default(LevelFilter::Off);\n\n\n\n // For each module, configure logging\n\n for module in modules {\n\n builder.module(module, opt.log_level.to_log_level_filter());\n\n\n\n // If quiet, we suppress all logging output\n\n //\n\n // NOTE: For a process request, unless logging to a file, we also suppress logging output\n\n // to avoid unexpected results when being treated like a process\n\n //\n\n // Without this, CI tests can sporadically fail when getting the exit code of a\n\n // process because an error log is provided about failing to broadcast a response\n\n // on the client side\n", "file_path": "src/lib.rs", "rank": 5, "score": 287382.6556523981 }, { "content": "#[cfg(windows)]\n\nfn run_daemon(_cmd: ListenSubcommand, _opt: CommonOpt) -> Result<(), Error> {\n\n use std::{\n\n ffi::OsString,\n\n iter,\n\n process::{Command, Stdio},\n\n };\n\n let mut args = std::env::args_os();\n\n let program = args.next().ok_or(Error::Fork)?;\n\n\n\n // Ensure that forked server runs in foreground, otherwise we would fork bomb ourselves\n\n let args = args.chain(iter::once(OsString::from(\"--foreground\")));\n\n\n\n let child = Command::new(program)\n\n .args(args)\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::inherit())\n\n .stderr(Stdio::inherit())\n\n .spawn()?;\n\n info!(\"[distant detached, pid = {}]\", child.id());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/subcommand/listen.rs", "rank": 6, "score": 281343.9141718217 }, { "content": "/// Initializes logging (should only call once)\n\npub fn init_logging(path: impl Into<PathBuf>) -> flexi_logger::LoggerHandle {\n\n use flexi_logger::{FileSpec, LevelFilter, LogSpecification, Logger};\n\n let modules = &[\"distant\", \"distant_core\"];\n\n\n\n // Disable logging for everything but our binary, which is based on verbosity\n\n let mut builder = LogSpecification::builder();\n\n builder.default(LevelFilter::Off);\n\n\n\n // For each module, configure logging\n\n for module in modules {\n\n builder.module(module, LevelFilter::Trace);\n\n }\n\n\n\n // Create our logger, but don't initialize yet\n\n let logger = Logger::with(builder.build())\n\n .format_for_files(flexi_logger::opt_format)\n\n .log_to_file(FileSpec::try_from(path).expect(\"Failed to create log file spec\"));\n\n\n\n logger.start().expect(\"Failed to initialize logger\")\n\n}\n\n\n", "file_path": "tests/cli/utils.rs", "rank": 7, "score": 273693.0754012469 }, { "content": "type StatusResult = Result<(bool, Option<i32>), RemoteProcessError>;\n\n\n\n#[derive(Debug, Display, Error, From)]\n\npub enum RemoteProcessError {\n\n /// When attempting to relay stdout/stderr over channels, but the channels fail\n\n ChannelDead,\n\n\n\n /// When the communication over the wire has issues\n\n TransportError(TransportError),\n\n\n\n /// When the stream of responses from the server closes without receiving\n\n /// an indicator of the process' exit status\n\n UnexpectedEof,\n\n\n\n /// When attempting to wait on the remote process, but the internal task joining failed\n\n WaitFailed(JoinError),\n\n}\n\n\n\n/// Represents a process on a remote machine\n\n#[derive(Debug)]\n", "file_path": "distant-core/src/client/process.rs", "rank": 8, "score": 265372.33558901574 }, { "content": "fn has_session(id: usize) -> LuaResult<bool> {\n\n Ok(SESSION_MAP\n\n .read()\n\n .map_err(|x| x.to_string().to_lua_err())?\n\n .contains_key(&id))\n\n}\n\n\n", "file_path": "distant-lua/src/session.rs", "rank": 9, "score": 232874.68258526386 }, { "content": "/// Return a random u32\n\npub fn rand_u32() -> LuaResult<u32> {\n\n static RAND: OnceCell<Mutex<Rand32>> = OnceCell::new();\n\n\n\n Ok(RAND\n\n .get_or_try_init::<_, SystemTimeError>(|| {\n\n Ok(Mutex::new(Rand32::new(\n\n SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs(),\n\n )))\n\n })\n\n .to_lua_err()?\n\n .lock()\n\n .map_err(|x| x.to_string())\n\n .to_lua_err()?\n\n .rand_u32())\n\n}\n", "file_path": "distant-lua/src/utils.rs", "rank": 10, "score": 224398.43675041466 }, { "content": "/// Spawns a task on the global runtime for a future that returns a `LuaResult<T>`\n\npub fn spawn<F, T>(f: F) -> impl Future<Output = LuaResult<T>>\n\nwhere\n\n F: Future<Output = Result<T, LuaError>> + Send + 'static,\n\n T: Send + 'static,\n\n{\n\n futures::future::ready(get_runtime()).and_then(|rt| {\n\n rt.spawn(f).map(|result| match result {\n\n Ok(x) => x.to_lua_err(),\n\n Err(x) => Err(x).to_lua_err(),\n\n })\n\n })\n\n}\n", "file_path": "distant-lua/src/runtime.rs", "rank": 11, "score": 223673.29314775905 }, { "content": "fn with_session<T>(id: usize, f: impl FnOnce(&DistantSession) -> T) -> LuaResult<T> {\n\n let lock = SESSION_MAP.read().map_err(|x| x.to_string().to_lua_err())?;\n\n let session = lock.get(&id).ok_or_else(|| {\n\n io::Error::new(\n\n io::ErrorKind::NotConnected,\n\n format!(\"No session connected with id {}\", id),\n\n )\n\n .to_lua_err()\n\n })?;\n\n\n\n Ok(f(session))\n\n}\n\n\n", "file_path": "distant-lua/src/session.rs", "rank": 12, "score": 222933.91873802946 }, { "content": "/// Makes a Lua table containing the log functions\n\npub fn make_log_tbl(lua: &Lua) -> LuaResult<LuaTable> {\n\n let tbl = lua.create_table()?;\n\n\n\n tbl.set(\n\n \"init\",\n\n lua.create_function(|lua, opts: LuaValue| init_logger(lua.from_value(opts)?))?,\n\n )?;\n\n\n\n set_log_fn!(lua, tbl, error);\n\n set_log_fn!(lua, tbl, warn);\n\n set_log_fn!(lua, tbl, info);\n\n set_log_fn!(lua, tbl, debug);\n\n set_log_fn!(lua, tbl, trace);\n\n\n\n Ok(tbl)\n\n}\n", "file_path": "distant-lua/src/log.rs", "rank": 13, "score": 219720.0265733551 }, { "content": "/// Makes a Lua table containing the utils functions\n\npub fn make_utils_tbl(lua: &Lua) -> LuaResult<LuaTable> {\n\n let tbl = lua.create_table()?;\n\n\n\n tbl.set(\n\n \"nvim_wrap_async\",\n\n lua.create_function(|lua, (async_fn, millis): (LuaFunction, Option<u64>)| {\n\n nvim_wrap_async(lua, async_fn, millis.unwrap_or(NVIM_POLL_TIMEOUT))\n\n })?,\n\n )?;\n\n tbl.set(\n\n \"wrap_async\",\n\n lua.create_function(|lua, (async_fn, schedule_fn)| wrap_async(lua, async_fn, schedule_fn))?,\n\n )?;\n\n tbl.set(\"rand_u32\", lua.create_function(|_, ()| rand_u32())?)?;\n\n\n\n Ok(tbl)\n\n}\n\n\n", "file_path": "distant-lua/src/utils.rs", "rank": 14, "score": 219573.66272967926 }, { "content": "/// Makes a Lua table containing the session functions\n\npub fn make_session_tbl(lua: &Lua) -> LuaResult<LuaTable> {\n\n let tbl = lua.create_table()?;\n\n\n\n // get_all() -> Vec<Session>\n\n tbl.set(\"get_all\", lua.create_function(|_, ()| Session::all())?)?;\n\n\n\n // get_by_id(id: usize) -> Option<Session>\n\n tbl.set(\n\n \"get_by_id\",\n\n lua.create_function(|_, id: usize| {\n\n let exists = has_session(id)?;\n\n if exists {\n\n Ok(Some(Session::new(id)))\n\n } else {\n\n Ok(None)\n\n }\n\n })?,\n\n )?;\n\n\n\n // launch(opts: LaunchOpts) -> Session\n", "file_path": "distant-lua/src/session.rs", "rank": 15, "score": 218474.5079033681 }, { "content": "fn init_logger(opts: LogOpts) -> LuaResult<()> {\n\n let mut loggers: Vec<Box<dyn SharedLogger>> = Vec::new();\n\n let config = ConfigBuilder::new()\n\n .add_filter_allow_str(\"distant_core\")\n\n .add_filter_allow_str(\"distant_ssh2\")\n\n .add_filter_allow_str(\"distant_lua\")\n\n .build();\n\n\n\n if opts.terminal {\n\n loggers.push(TermLogger::new(\n\n opts.level.into(),\n\n config.clone(),\n\n TerminalMode::Mixed,\n\n ColorChoice::Auto,\n\n ));\n\n }\n\n\n\n if let Some(path) = opts.file {\n\n loggers.push(WriteLogger::new(\n\n opts.level.into(),\n\n config,\n\n File::create(path)?,\n\n ));\n\n }\n\n\n\n CombinedLogger::init(loggers).to_lua_err()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "distant-lua/src/log.rs", "rank": 16, "score": 210007.36617484147 }, { "content": "/// Represents an error that can be converted into an exit code\n\npub trait ExitCodeError: std::error::Error {\n\n fn to_exit_code(&self) -> ExitCode;\n\n\n\n /// Indicates if the error message associated with this exit code error\n\n /// should be printed, or if this is just used to reflect the exit code\n\n /// when the process exits\n\n fn is_silent(&self) -> bool {\n\n false\n\n }\n\n\n\n fn to_i32(&self) -> i32 {\n\n self.to_exit_code().to_i32()\n\n }\n\n}\n\n\n\nimpl ExitCodeError for std::io::Error {\n\n fn to_exit_code(&self) -> ExitCode {\n\n use std::io::ErrorKind;\n\n match self.kind() {\n\n ErrorKind::ConnectionAborted\n", "file_path": "src/exit.rs", "rank": 17, "score": 209346.6794814064 }, { "content": "fn format_shell(data: ResponseData) -> ResponseOut {\n\n match data {\n\n ResponseData::Ok => ResponseOut::None,\n\n ResponseData::Error(Error { kind, description }) => {\n\n ResponseOut::StderrLine(format!(\"Failed ({}): '{}'.\", kind, description))\n\n }\n\n ResponseData::Blob { data } => {\n\n ResponseOut::StdoutLine(String::from_utf8_lossy(&data).to_string())\n\n }\n\n ResponseData::Text { data } => ResponseOut::StdoutLine(data),\n\n ResponseData::DirEntries { entries, .. } => ResponseOut::StdoutLine(\n\n entries\n\n .into_iter()\n\n .map(|entry| {\n\n format!(\n\n \"{}{}\",\n\n entry.path.as_os_str().to_string_lossy(),\n\n if entry.file_type.is_dir() {\n\n // NOTE: This can be different from the server if\n\n // the server OS is unix and the client is\n", "file_path": "src/output.rs", "rank": 18, "score": 197856.0086360915 }, { "content": "fn deserialize_from_slice<T: DeserializeOwned>(slice: &[u8]) -> Result<T, DeserializeError> {\n\n ciborium::de::from_reader(slice).map_err(|x| DeserializeError(x.to_string()))\n\n}\n\n\n\n#[derive(Debug, Display, Error, From)]\n\npub enum TransportError {\n\n CryptoError(SecretKeyError),\n\n IoError(io::Error),\n\n SerializeError(SerializeError),\n\n DeserializeError(DeserializeError),\n\n}\n\n\n", "file_path": "distant-core/src/net/transport/mod.rs", "rank": 19, "score": 196535.23790073948 }, { "content": "fn serialize_to_vec<T: Serialize>(value: &T) -> Result<Vec<u8>, SerializeError> {\n\n let mut v = Vec::new();\n\n\n\n let _ = ciborium::ser::into_writer(value, &mut v).map_err(|x| SerializeError(x.to_string()))?;\n\n\n\n Ok(v)\n\n}\n\n\n", "file_path": "distant-core/src/net/transport/mod.rs", "rank": 20, "score": 194903.1924422696 }, { "content": "// Generates a new tenant name\n\npub fn new_tenant() -> String {\n\n format!(\"tenant_{}{}\", rand::random::<u16>(), rand::random::<u8>())\n\n}\n", "file_path": "src/utils.rs", "rank": 21, "score": 194568.64947306635 }, { "content": "pub fn friendly_recv_line(\n\n receiver: &mpsc::Receiver<String>,\n\n duration: Duration,\n\n) -> io::Result<String> {\n\n let start = Instant::now();\n\n loop {\n\n if let Ok(line) = receiver.try_recv() {\n\n break Ok(line);\n\n }\n\n\n\n if start.elapsed() > duration {\n\n return Err(io::Error::new(\n\n io::ErrorKind::TimedOut,\n\n format!(\"Failed to receive line after {}s\", duration.as_secs_f32()),\n\n ));\n\n }\n\n\n\n std::thread::yield_now();\n\n }\n\n}\n\n\n", "file_path": "tests/cli/utils.rs", "rank": 22, "score": 194356.47386676286 }, { "content": "/// Wraps an async function and a scheduler function such that\n\n/// a new function is returned that takes a callback when the async\n\n/// function completes as well as zero or more arguments to provide\n\n/// to the async function when first executing it\n\n///\n\n/// ```lua\n\n/// local f = wrap_async(some_async_fn, schedule_fn)\n\n/// f(arg1, arg2, ..., function(success, res) end)\n\n/// ```\n\npub fn wrap_async<'a>(\n\n lua: &'a Lua,\n\n async_fn: LuaFunction<'a>,\n\n schedule_fn: LuaFunction<'a>,\n\n) -> LuaResult<LuaFunction<'a>> {\n\n let pending = pending(lua)?;\n\n lua.load(chunk! {\n\n return function(...)\n\n local args = {...}\n\n local cb = table.remove(args)\n\n\n\n assert(type(cb) == \"function\", \"Invalid type for cb\")\n\n local schedule = function(...) return $schedule_fn(...) end\n\n\n\n // Wrap the async function in a coroutine so we can poll it\n\n local thread = coroutine.create(function(...) return $async_fn(...) end)\n\n\n\n // Start the future by peforming the first poll\n\n local status, res = coroutine.resume(thread, unpack(args))\n\n\n", "file_path": "distant-lua/src/utils.rs", "rank": 23, "score": 191757.3032010063 }, { "content": "/// Creates a random tenant name\n\npub fn random_tenant() -> String {\n\n format!(\"test-tenant-{}\", rand::random::<u16>())\n\n}\n\n\n", "file_path": "tests/cli/utils.rs", "rank": 24, "score": 191747.6565546924 }, { "content": "/// Specialty function that performs wrap_async using `vim.defer_fn` from neovim\n\npub fn nvim_wrap_async<'a>(\n\n lua: &'a Lua,\n\n async_fn: LuaFunction<'a>,\n\n millis: u64,\n\n) -> LuaResult<LuaFunction<'a>> {\n\n let schedule_fn = lua\n\n .load(chunk! {\n\n function(cb)\n\n return vim.defer_fn(cb, $millis)\n\n end\n\n })\n\n .eval()?;\n\n wrap_async(lua, async_fn, schedule_fn)\n\n}\n\n\n", "file_path": "distant-lua/src/utils.rs", "rank": 25, "score": 189049.76931649403 }, { "content": "/// Creates a function that produces a session within the provided Lua environment\n\n/// using the given distant server context, returning the session's id\n\npub fn make_function<'a>(lua: &'a Lua, ctx: &'_ DistantServerCtx) -> LuaResult<LuaFunction<'a>> {\n\n let addr = ctx.addr;\n\n let host = addr.ip().to_string();\n\n let port = addr.port();\n\n let key = ctx.key.clone();\n\n\n\n lua.load(chunk! {\n\n local distant = require(\"distant_lua\")\n\n local thread = coroutine.create(distant.session.connect_async)\n\n\n\n local status, res = coroutine.resume(thread, {\n\n host = $host,\n\n port = $port,\n\n key = $key,\n\n timeout = 15000,\n\n })\n\n\n\n // Block until the connection finishes\n\n local session = nil\n\n while status do\n", "file_path": "distant-lua-tests/tests/common/session.rs", "rank": 26, "score": 184803.57847525214 }, { "content": "pub fn make() -> LuaResult<Lua> {\n\n let (dylib_path, dylib_ext, separator);\n\n if cfg!(target_os = \"macos\") {\n\n dylib_path = env::var(\"DYLD_FALLBACK_LIBRARY_PATH\").unwrap();\n\n dylib_ext = \"dylib\";\n\n separator = \":\";\n\n } else if cfg!(target_os = \"linux\") {\n\n dylib_path = env::var(\"LD_LIBRARY_PATH\").unwrap();\n\n dylib_ext = \"so\";\n\n separator = \":\";\n\n } else if cfg!(target_os = \"windows\") {\n\n dylib_path = env::var(\"PATH\").unwrap();\n\n dylib_ext = \"dll\";\n\n separator = \";\";\n\n } else {\n\n panic!(\"unknown target os\");\n\n };\n\n\n\n let mut cpath = dylib_path\n\n .split(separator)\n", "file_path": "distant-lua-tests/tests/common/lua.rs", "rank": 27, "score": 179166.12405519286 }, { "content": "/// Produces a regex predicate using the given string\n\npub fn regex_pred(s: &str) -> predicates::str::RegexPredicate {\n\n predicate::str::is_match(s).unwrap()\n\n}\n\n\n", "file_path": "tests/cli/utils.rs", "rank": 28, "score": 173056.02578075047 }, { "content": "#[rstest]\n\nfn should_return_error_if_sending_stdin_to_dead_process(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n\n\n // Spawn a process that will exit immediately, but is a valid process\n\n let cmd = SCRIPT_RUNNER.to_string();\n\n let args = vec![SLEEP_SH.to_str().unwrap().to_string(), String::from(\"0\")];\n\n\n\n let wait_fn = lua\n\n .create_function(|_, ()| {\n\n std::thread::sleep(std::time::Duration::from_millis(50));\n\n Ok(())\n\n })\n\n .unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local proc = session:spawn({ cmd = $cmd, args = $args })\n\n\n", "file_path": "distant-lua-tests/tests/lua/sync/spawn.rs", "rank": 29, "score": 170582.12277457662 }, { "content": "#[rstest]\n\nfn should_return_error_if_sending_stdin_to_dead_process(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n let schedule_fn = poll::make_function(&lua).unwrap();\n\n\n\n // Spawn a process that will exit immediately, but is a valid process\n\n let cmd = SCRIPT_RUNNER.to_string();\n\n let args = vec![SLEEP_SH.to_str().unwrap().to_string(), String::from(\"0\")];\n\n\n\n let wait_fn = lua\n\n .create_function(|_, ()| {\n\n std::thread::sleep(std::time::Duration::from_millis(50));\n\n Ok(())\n\n })\n\n .unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local distant = require(\"distant_lua\")\n", "file_path": "distant-lua-tests/tests/lua/async/spawn.rs", "rank": 30, "score": 170582.12277457662 }, { "content": "/// Retrieves the global runtime, initializing it if not initialized, and returning\n\n/// an error if failed to initialize\n\npub fn get_runtime() -> LuaResult<&'static tokio::runtime::Runtime> {\n\n static RUNTIME: OnceCell<tokio::runtime::Runtime> = OnceCell::new();\n\n RUNTIME.get_or_try_init(|| {\n\n tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .build()\n\n .map_err(|x| x.to_lua_err())\n\n })\n\n}\n\n\n", "file_path": "distant-lua/src/runtime.rs", "rank": 31, "score": 168533.91351554784 }, { "content": "/// Produces a new command for distant using the given subcommand\n\npub fn distant_subcommand(ctx: &DistantServerCtx, subcommand: &str) -> Command {\n\n let mut cmd = Command::new(cargo_bin(env!(\"CARGO_PKG_NAME\")));\n\n cmd.arg(subcommand)\n\n .args(&[\"--session\", \"environment\"])\n\n .env(\"DISTANT_HOST\", ctx.addr.ip().to_string())\n\n .env(\"DISTANT_PORT\", ctx.addr.port().to_string())\n\n .env(\"DISTANT_KEY\", ctx.key.as_str())\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped());\n\n cmd\n\n}\n\n\n", "file_path": "tests/cli/utils.rs", "rank": 32, "score": 168504.93478402452 }, { "content": "/// Blocks using the global runtime for a future that returns `LuaResult<T>`\n\npub fn block_on<F, T>(future: F) -> LuaResult<T>\n\nwhere\n\n F: Future<Output = Result<T, LuaError>>,\n\n{\n\n get_runtime()?.block_on(future)\n\n}\n\n\n", "file_path": "distant-lua/src/runtime.rs", "rank": 33, "score": 166802.43899238572 }, { "content": "/// Creates a function that can be passed as the schedule function for `wrap_async`\n\npub fn make_function(lua: &Lua) -> LuaResult<LuaFunction> {\n\n let sleep = lua.create_function(|_, ()| {\n\n thread::sleep(Duration::from_millis(10));\n\n Ok(())\n\n })?;\n\n\n\n lua.load(chunk! {\n\n local cb = ...\n\n $sleep()\n\n cb()\n\n })\n\n .into_function()\n\n}\n", "file_path": "distant-lua-tests/tests/common/poll.rs", "rank": 34, "score": 166163.61811617578 }, { "content": "/// Creates a new thread that performs stdin reads in a blocking fashion, returning\n\n/// a handle to the thread and a receiver that will be sent input as it becomes available\n\npub fn spawn_channel(buffer: usize) -> (thread::JoinHandle<()>, mpsc::Receiver<String>) {\n\n let (tx, rx) = mpsc::channel(1);\n\n\n\n // NOTE: Using blocking I/O per tokio's advice to read from stdin line-by-line and then\n\n // pass the results to a separate async handler to forward to the remote process\n\n let handle = thread::spawn(move || {\n\n let mut stdin = BufReader::new(io::stdin());\n\n\n\n // Maximum chunk that we expect to read at any one time\n\n let mut buf = vec![0; buffer];\n\n\n\n loop {\n\n match stdin.read(&mut buf) {\n\n Ok(0) | Err(_) => break,\n\n Ok(n) => {\n\n match String::from_utf8(buf[..n].to_vec()) {\n\n Ok(text) => {\n\n if let Err(x) = tx.blocking_send(text) {\n\n error!(\"Stdin channel closed: {}\", x);\n\n break;\n", "file_path": "src/stdin.rs", "rank": 35, "score": 165264.9270334801 }, { "content": "pub fn spawn_line_reader<T>(mut reader: T) -> mpsc::Receiver<String>\n\nwhere\n\n T: std::io::Read + Send + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel();\n\n std::thread::spawn(move || {\n\n let mut buf = String::new();\n\n let mut tmp = [0; 1024];\n\n while let Ok(n) = reader.read(&mut tmp) {\n\n if n == 0 {\n\n break;\n\n }\n\n\n\n let data = String::from_utf8_lossy(&tmp[..n]);\n\n buf.push_str(data.as_ref());\n\n\n\n // Send all complete lines\n\n if let Some(idx) = buf.rfind('\\n') {\n\n let remaining = buf.split_off(idx + 1);\n\n for line in buf.lines() {\n", "file_path": "tests/cli/utils.rs", "rank": 36, "score": 162994.31220770313 }, { "content": "fn get_session_channel(id: usize) -> LuaResult<SessionChannel> {\n\n with_session(id, |session| session.clone_channel())\n\n}\n\n\n\n/// Holds a reference to the session to perform remote operations\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub struct Session {\n\n id: usize,\n\n}\n\n\n\nimpl Session {\n\n /// Creates a new session referencing the given distant session with the specified id\n\n pub fn new(id: usize) -> Self {\n\n Self { id }\n\n }\n\n\n\n /// Retrieves all sessions\n\n pub fn all() -> LuaResult<Vec<Self>> {\n\n Ok(SESSION_MAP\n\n .read()\n", "file_path": "distant-lua/src/session.rs", "rank": 37, "score": 155901.39808494513 }, { "content": "/// Main entrypoint into the program\n\npub fn run() {\n\n let opt = opt::Opt::load();\n\n let logger = init_logging(&opt.common, opt.subcommand.is_remote_process());\n\n if let Err(x) = opt.subcommand.run(opt.common) {\n\n if !x.is_silent() {\n\n error!(\"Exiting due to error: {}\", x);\n\n }\n\n logger.flush();\n\n logger.shutdown();\n\n\n\n std::process::exit(x.to_i32());\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 38, "score": 154775.35836180183 }, { "content": "fn get_session_details(id: usize) -> LuaResult<Option<SessionDetails>> {\n\n with_session(id, |session| session.details().cloned())\n\n}\n\n\n", "file_path": "distant-lua/src/session.rs", "rank": 40, "score": 153221.7584585977 }, { "content": "fn read_lsp_messages(input: &[u8]) -> io::Result<(Option<Vec<u8>>, Vec<LspData>)> {\n\n let mut queue = Vec::new();\n\n\n\n // Continue to read complete messages from the input until we either fail to parse or we reach\n\n // end of input, resetting cursor position back to last successful parse as otherwise the\n\n // cursor may have moved partially from lsp successfully reading the start of a message\n\n let mut cursor = Cursor::new(input);\n\n let mut pos = 0;\n\n while let Ok(data) = LspData::from_buf_reader(&mut cursor) {\n\n queue.push(data);\n\n pos = cursor.position();\n\n }\n\n cursor.set_position(pos);\n\n\n\n // Keep remainder of bytes not processed as LSP message in buffer\n\n let remainder = if (cursor.position() as usize) < cursor.get_ref().len() {\n\n let mut buf = Vec::new();\n\n cursor.read_to_end(&mut buf)?;\n\n Some(buf)\n\n } else {\n", "file_path": "distant-core/src/client/lsp/mod.rs", "rank": 41, "score": 151993.8429350218 }, { "content": "/// Interface representing a two-way data stream\n\n///\n\n/// Enables splitting into separate, functioning halves that can read and write respectively\n\npub trait DataStream: AsyncRead + AsyncWrite + Unpin {\n\n type Read: AsyncRead + Send + Unpin + 'static;\n\n type Write: AsyncWrite + Send + Unpin + 'static;\n\n\n\n /// Returns a textual description of the connection associated with this stream\n\n fn to_connection_tag(&self) -> String;\n\n\n\n /// Splits this stream into read and write halves\n\n fn into_split(self) -> (Self::Read, Self::Write);\n\n}\n\n\n\n/// Represents a transport of data across the network\n\n#[derive(Debug)]\n\npub struct Transport<T, U>(Framed<T, U>)\n\nwhere\n\n T: DataStream,\n\n U: Codec;\n\n\n\nimpl<T, U> Transport<T, U>\n\nwhere\n", "file_path": "distant-core/src/net/transport/mod.rs", "rank": 42, "score": 150963.35002717865 }, { "content": "fn to_other_error<E>(err: E) -> io::Error\n\nwhere\n\n E: Into<Box<dyn std::error::Error + Send + Sync>>,\n\n{\n\n io::Error::new(io::ErrorKind::Other, err)\n\n}\n\n\n\n#[derive(Default)]\n\npub(crate) struct State {\n\n processes: HashMap<usize, Process>,\n\n}\n\n\n", "file_path": "distant-ssh2/src/handler.rs", "rank": 43, "score": 145942.1370431715 }, { "content": "#[derive(Copy, Clone, Debug, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\nenum LogLevel {\n\n Off,\n\n Error,\n\n Warn,\n\n Info,\n\n Debug,\n\n Trace,\n\n}\n\n\n\nimpl From<LogLevel> for LevelFilter {\n\n fn from(level: LogLevel) -> Self {\n\n match level {\n\n LogLevel::Off => Self::Off,\n\n LogLevel::Error => Self::Error,\n\n LogLevel::Warn => Self::Warn,\n\n LogLevel::Info => Self::Info,\n\n LogLevel::Debug => Self::Debug,\n\n LogLevel::Trace => Self::Trace,\n\n }\n\n }\n\n}\n\n\n", "file_path": "distant-lua/src/log.rs", "rank": 44, "score": 142544.19158618496 }, { "content": "#[fixture]\n\npub fn sshd() -> &'static Sshd {\n\n static SSHD: OnceCell<Sshd> = OnceCell::new();\n\n\n\n SSHD.get_or_init(|| Sshd::spawn(Default::default()).unwrap())\n\n}\n\n\n\n#[fixture]\n\npub async fn session(sshd: &'_ Sshd, _logger: &'_ flexi_logger::LoggerHandle) -> Session {\n\n let port = sshd.port;\n\n\n\n let mut ssh2_session = Ssh2Session::connect(\n\n \"127.0.0.1\",\n\n Ssh2SessionOpts {\n\n port: Some(port),\n\n identity_files: vec![sshd.tmp.child(\"id_rsa\").path().to_path_buf()],\n\n identities_only: Some(true),\n\n user: Some(USERNAME.to_string()),\n\n user_known_hosts_files: vec![sshd.tmp.child(\"known_hosts\").path().to_path_buf()],\n\n ..Default::default()\n\n },\n", "file_path": "distant-ssh2/tests/sshd.rs", "rank": 45, "score": 142116.79900960665 }, { "content": "#[fixture]\n\npub fn ctx() -> &'static DistantServerCtx {\n\n static CTX: OnceCell<DistantServerCtx> = OnceCell::new();\n\n\n\n CTX.get_or_init(DistantServerCtx::initialize)\n\n}\n\n\n", "file_path": "tests/cli/fixtures.rs", "rank": 46, "score": 140389.47204923513 }, { "content": "#[rstest]\n\nfn should_return_error_on_failure(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let src = temp.child(\"src\");\n\n let dst = temp.child(\"dst\");\n\n let src_path = src.path().to_str().unwrap();\n\n let dst_path = dst.path().to_str().unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local status, _ = pcall(session.rename, session, {\n\n src = $src_path,\n\n dst = $dst_path,\n\n })\n\n assert(not status, \"Unexpectedly succeeded!\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n\n\n // Also, verify that destination does not exist\n\n dst.assert(predicate::path::missing());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/sync/rename.rs", "rank": 47, "score": 138761.3863281987 }, { "content": "#[rstest]\n\nfn should_return_error_on_failure(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n let schedule_fn = poll::make_function(&lua).unwrap();\n\n\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let src = temp.child(\"src\");\n\n let dst = temp.child(\"dst\");\n\n let src_path = src.path().to_str().unwrap();\n\n let dst_path = dst.path().to_str().unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local f = require(\"distant_lua\").utils.wrap_async(\n\n session.rename_async,\n\n $schedule_fn\n\n )\n\n\n\n // Because of our scheduler, the invocation turns async -> sync\n", "file_path": "distant-lua-tests/tests/lua/async/rename.rs", "rank": 48, "score": 138761.3863281987 }, { "content": "#[rstest]\n\nfn should_return_error_on_failure(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n let schedule_fn = poll::make_function(&lua).unwrap();\n\n\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"missing-file\");\n\n let file_path = file.path().to_str().unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local f = require(\"distant_lua\").utils.wrap_async(\n\n session.remove_async,\n\n $schedule_fn\n\n )\n\n\n\n // Because of our scheduler, the invocation turns async -> sync\n\n local err\n\n f(session, { path = $file_path }, function(success, res)\n", "file_path": "distant-lua-tests/tests/lua/async/remove.rs", "rank": 49, "score": 138761.3863281987 }, { "content": "#[rstest]\n\nfn should_return_error_on_failure(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n\n\n let cmd = DOES_NOT_EXIST_BIN.to_str().unwrap().to_string();\n\n let args: Vec<String> = Vec::new();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local status, _ = pcall(session.spawn, session, {\n\n cmd = $cmd,\n\n args = $args\n\n })\n\n assert(not status, \"Unexpectedly succeeded!\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/sync/spawn.rs", "rank": 50, "score": 138761.3863281987 }, { "content": "#[rstest]\n\nfn should_return_error_on_failure(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n let schedule_fn = poll::make_function(&lua).unwrap();\n\n\n\n let cmd = DOES_NOT_EXIST_BIN.to_str().unwrap().to_string();\n\n let args: Vec<String> = Vec::new();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local distant = require(\"distant_lua\")\n\n local f = distant.utils.wrap_async(session.spawn_async, $schedule_fn)\n\n\n\n // Because of our scheduler, the invocation turns async -> sync\n\n local err\n\n f(session, { cmd = $cmd, args = $args }, function(success, res)\n\n if not success then\n\n err = res\n\n end\n\n end)\n\n assert(err, \"Unexpectedly succeeded\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/async/spawn.rs", "rank": 51, "score": 138761.3863281987 }, { "content": "#[rstest]\n\nfn should_return_error_on_failure(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"missing-file\");\n\n let file_path = file.path().to_str().unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local status, _ = pcall(session.remove, session, { path = $file_path })\n\n assert(not status, \"Unexpectedly succeeded!\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n\n\n // Also, verify that path does not exist\n\n file.assert(predicate::path::missing());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/sync/remove.rs", "rank": 52, "score": 138761.3863281987 }, { "content": "#[rstest]\n\nfn should_return_error_on_failure(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n let schedule_fn = poll::make_function(&lua).unwrap();\n\n\n\n let cmd = DOES_NOT_EXIST_BIN.to_str().unwrap().to_string();\n\n let args: Vec<String> = Vec::new();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local distant = require(\"distant_lua\")\n\n local f = distant.utils.wrap_async(session.spawn_wait_async, $schedule_fn)\n\n\n\n // Because of our scheduler, the invocation turns async -> sync\n\n local err\n\n f(session, { cmd = $cmd, args = $args }, function(success, res)\n\n if not success then\n\n err = res\n\n end\n\n end)\n\n assert(err, \"Unexpectedly succeeded\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/async/spawn_wait.rs", "rank": 53, "score": 136686.63285405334 }, { "content": "#[rstest]\n\nfn should_return_error_on_failure(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n\n\n let cmd = DOES_NOT_EXIST_BIN.to_str().unwrap().to_string();\n\n let args: Vec<String> = Vec::new();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local status, _ = pcall(session.spawn_wait, session, {\n\n cmd = $cmd,\n\n args = $args\n\n })\n\n assert(not status, \"Unexpectedly succeeded!\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/sync/spawn_wait.rs", "rank": 54, "score": 136686.63285405334 }, { "content": "#[fixture]\n\npub fn ctx() -> &'static DistantServerCtx {\n\n static CTX: OnceCell<DistantServerCtx> = OnceCell::new();\n\n\n\n CTX.get_or_init(DistantServerCtx::initialize)\n\n}\n", "file_path": "distant-lua-tests/tests/common/fixtures.rs", "rank": 55, "score": 135591.80676424789 }, { "content": "#[fixture]\n\npub fn logger() -> &'static flexi_logger::LoggerHandle {\n\n static LOGGER: OnceCell<flexi_logger::LoggerHandle> = OnceCell::new();\n\n\n\n LOGGER.get_or_init(|| {\n\n // flexi_logger::Logger::try_with_str(\"off, distant_core=trace, distant_ssh2=trace\")\n\n flexi_logger::Logger::try_with_str(\"off, distant_core=warn, distant_ssh2=warn\")\n\n .expect(\"Failed to load env\")\n\n .start()\n\n .expect(\"Failed to start logger\")\n\n })\n\n}\n\n\n", "file_path": "distant-ssh2/tests/sshd.rs", "rank": 56, "score": 134935.90245913627 }, { "content": "#[rstest]\n\n#[cfg_attr(windows, ignore)]\n\nfn should_return_process_that_can_retrieve_stdout(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n let schedule_fn = poll::make_function(&lua).unwrap();\n\n\n\n let cmd = SCRIPT_RUNNER.to_string();\n\n let args = vec![\n\n ECHO_ARGS_TO_STDOUT_SH.to_str().unwrap().to_string(),\n\n String::from(\"some stdout\"),\n\n ];\n\n\n\n let wait_fn = lua\n\n .create_function(|_, ()| {\n\n std::thread::sleep(std::time::Duration::from_millis(50));\n\n Ok(())\n\n })\n\n .unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n", "file_path": "distant-lua-tests/tests/lua/async/spawn.rs", "rank": 57, "score": 134923.86602907666 }, { "content": "#[rstest]\n\n#[cfg_attr(windows, ignore)]\n\nfn should_return_process_that_can_retrieve_stdout(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n\n\n let cmd = SCRIPT_RUNNER.to_string();\n\n let args = vec![\n\n ECHO_ARGS_TO_STDOUT_SH.to_str().unwrap().to_string(),\n\n String::from(\"some stdout\"),\n\n ];\n\n\n\n let wait_fn = lua\n\n .create_function(|_, ()| {\n\n std::thread::sleep(std::time::Duration::from_millis(50));\n\n Ok(())\n\n })\n\n .unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n", "file_path": "distant-lua-tests/tests/lua/sync/spawn.rs", "rank": 58, "score": 134923.86602907666 }, { "content": "#[rstest]\n\nfn should_return_error_if_directory_does_not_exist(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let dir = temp.child(\"test-dir\");\n\n let dir_path = dir.path().to_str().unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local status, _ = pcall(session.read_dir, session, { path = $dir_path })\n\n assert(not status, \"Unexpectedly succeeded\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/sync/read_dir.rs", "rank": 59, "score": 134683.50331527446 }, { "content": "#[rstest]\n\nfn should_return_error_when_killing_dead_process(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n\n\n // Spawn a process that will exit immediately, but is a valid process\n\n let cmd = SCRIPT_RUNNER.to_string();\n\n let args = vec![SLEEP_SH.to_str().unwrap().to_string(), String::from(\"0\")];\n\n\n\n let wait_fn = lua\n\n .create_function(|_, ()| {\n\n std::thread::sleep(std::time::Duration::from_millis(50));\n\n Ok(())\n\n })\n\n .unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local proc = session:spawn({ cmd = $cmd, args = $args })\n\n\n\n // Wait briefly to ensure the process dies\n\n $wait_fn()\n\n\n\n local status, _ = pcall(proc.kill, proc)\n\n assert(not status, \"Unexpectedly succeeded\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/sync/spawn.rs", "rank": 60, "score": 134683.50331527446 }, { "content": "#[rstest]\n\nfn should_return_error_when_killing_dead_process(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n let schedule_fn = poll::make_function(&lua).unwrap();\n\n\n\n // Spawn a process that will exit immediately, but is a valid process\n\n let cmd = SCRIPT_RUNNER.to_string();\n\n let args = vec![SLEEP_SH.to_str().unwrap().to_string(), String::from(\"0\")];\n\n\n\n let wait_fn = lua\n\n .create_function(|_, ()| {\n\n std::thread::sleep(std::time::Duration::from_millis(50));\n\n Ok(())\n\n })\n\n .unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local distant = require(\"distant_lua\")\n", "file_path": "distant-lua-tests/tests/lua/async/spawn.rs", "rank": 61, "score": 134683.50331527446 }, { "content": "#[rstest]\n\nfn should_return_error_if_directory_does_not_exist(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n let schedule_fn = poll::make_function(&lua).unwrap();\n\n\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let dir = temp.child(\"test-dir\");\n\n let dir_path = dir.path().to_str().unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local f = require(\"distant_lua\").utils.wrap_async(\n\n session.read_dir_async,\n\n $schedule_fn\n\n )\n\n\n\n // Because of our scheduler, the invocation turns async -> sync\n\n local err\n\n f(session, { path = $dir_path }, function(success, res)\n\n if not success then\n\n err = res\n\n end\n\n end)\n\n assert(err, \"Unexpectedly succeeded\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/async/read_dir.rs", "rank": 62, "score": 134683.50331527446 }, { "content": "enum SessionParams {\n\n Tcp {\n\n addr: SocketAddr,\n\n codec: XChaCha20Poly1305Codec,\n\n lsp_data: Option<LspData>,\n\n },\n\n #[cfg(unix)]\n\n Socket { path: PathBuf, codec: PlainCodec },\n\n}\n\n\n\nasync fn retrieve_session_params(\n\n session_input: SessionInput,\n\n session_file: impl AsRef<Path>,\n\n session_socket: impl AsRef<Path>,\n\n) -> io::Result<SessionParams> {\n\n Ok(match session_input {\n\n SessionInput::Environment => {\n\n let info = SessionInfo::from_environment()?;\n\n let addr = info.to_socket_addr().await?;\n\n let codec = XChaCha20Poly1305Codec::from(info.key);\n", "file_path": "src/subcommand/mod.rs", "rank": 63, "score": 133184.14409756547 }, { "content": "#[fixture]\n\npub fn lsp_cmd(ctx: &'_ DistantServerCtx) -> Command {\n\n ctx.new_cmd(\"lsp\")\n\n}\n", "file_path": "tests/cli/fixtures.rs", "rank": 64, "score": 132912.21475830238 }, { "content": "#[fixture]\n\npub fn action_cmd(ctx: &'_ DistantServerCtx) -> Command {\n\n ctx.new_cmd(\"action\")\n\n}\n\n\n", "file_path": "tests/cli/fixtures.rs", "rank": 65, "score": 132912.21475830238 }, { "content": "#[rstest]\n\nfn should_return_error_if_fails_to_read_file(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n let schedule_fn = poll::make_function(&lua).unwrap();\n\n\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"missing-file\");\n\n let file_path = file.path();\n\n let file_path_str = file_path.to_str().unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local f = require(\"distant_lua\").utils.wrap_async(\n\n session.read_file_async,\n\n $schedule_fn\n\n )\n\n\n\n // Because of our scheduler, the invocation turns async -> sync\n\n local err\n", "file_path": "distant-lua-tests/tests/lua/async/read_file.rs", "rank": 66, "score": 132748.35177792248 }, { "content": "#[rstest]\n\nfn should_return_error_if_fails_to_read_file(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"missing-file\");\n\n let file_path = file.path();\n\n let file_path_str = file_path.to_str().unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local status, _ = pcall(session.read_file, session, { path = $file_path_str })\n\n assert(not status, \"Unexpectedly succeeded!\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/sync/read_file.rs", "rank": 67, "score": 132748.35177792248 }, { "content": "/// Provides convenience functions on top of a [`SessionChannel`]\n\npub trait SessionChannelExt {\n\n /// Appends to a remote file using the data from a collection of bytes\n\n fn append_file(\n\n &mut self,\n\n tenant: impl Into<String>,\n\n path: impl Into<PathBuf>,\n\n data: impl Into<Vec<u8>>,\n\n ) -> AsyncReturn<'_, ()>;\n\n\n\n /// Appends to a remote file using the data from a string\n\n fn append_file_text(\n\n &mut self,\n\n tenant: impl Into<String>,\n\n path: impl Into<PathBuf>,\n\n data: impl Into<String>,\n\n ) -> AsyncReturn<'_, ()>;\n\n\n\n /// Copies a remote file or directory from src to dst\n\n fn copy(\n\n &mut self,\n", "file_path": "distant-core/src/client/session/ext.rs", "rank": 68, "score": 132070.90662077355 }, { "content": "#[rstest]\n\nfn should_return_error_if_fails_to_read_file(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"missing-file\");\n\n let file_path = file.path();\n\n let file_path_str = file_path.to_str().unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local status, _ = pcall(session.read_file_text, session, { path = $file_path_str })\n\n assert(not status, \"Unexpectedly succeeded!\")\n\n })\n\n .exec();\n\n assert!(result.is_ok(), \"Failed: {}\", result.unwrap_err());\n\n}\n\n\n", "file_path": "distant-lua-tests/tests/lua/sync/read_file_text.rs", "rank": 69, "score": 130877.77563538287 }, { "content": "#[rstest]\n\nfn should_return_error_if_fails_to_read_file(ctx: &'_ DistantServerCtx) {\n\n let lua = lua::make().unwrap();\n\n let new_session = session::make_function(&lua, ctx).unwrap();\n\n let schedule_fn = poll::make_function(&lua).unwrap();\n\n\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"missing-file\");\n\n let file_path = file.path();\n\n let file_path_str = file_path.to_str().unwrap();\n\n\n\n let result = lua\n\n .load(chunk! {\n\n local session = $new_session()\n\n local f = require(\"distant_lua\").utils.wrap_async(\n\n session.read_file_text_async,\n\n $schedule_fn\n\n )\n\n\n\n // Because of our scheduler, the invocation turns async -> sync\n\n local err\n", "file_path": "distant-lua-tests/tests/lua/async/read_file_text.rs", "rank": 70, "score": 130877.77563538287 }, { "content": "/// Represents abstraction of a codec that implements specific encoder and decoder for distant\n\npub trait Codec:\n\n for<'a> Encoder<&'a [u8], Error = io::Error> + Decoder<Item = Vec<u8>, Error = io::Error> + Clone\n\n{\n\n fn encode(&mut self, item: &[u8], dst: &mut BytesMut) -> io::Result<()>;\n\n fn decode(&mut self, src: &mut BytesMut) -> io::Result<Option<Vec<u8>>>;\n\n}\n\n\n\nmacro_rules! impl_traits_for_codec {\n\n ($type:ident) => {\n\n impl<'a> tokio_util::codec::Encoder<&'a [u8]> for $type {\n\n type Error = io::Error;\n\n\n\n fn encode(&mut self, item: &'a [u8], dst: &mut BytesMut) -> Result<(), Self::Error> {\n\n Codec::encode(self, item, dst)\n\n }\n\n }\n\n\n\n impl tokio_util::codec::Decoder for $type {\n\n type Item = Vec<u8>;\n\n type Error = io::Error;\n", "file_path": "distant-core/src/net/transport/codec/mod.rs", "rank": 71, "score": 123719.1200012339 }, { "content": "type SendFn = Arc<Mutex<Box<dyn FnMut(&[u8]) -> io::Result<()>>>>;\n", "file_path": "src/msg.rs", "rank": 72, "score": 119283.7489015534 }, { "content": "type RecvFn = Arc<Mutex<Box<dyn FnMut(&mut String) -> io::Result<()> + Send>>>;\n\n\n\n/// Sends JSON messages over stdout\n\n#[derive(Clone)]\n\npub struct MsgSender {\n\n send: SendFn,\n\n}\n\n\n\nimpl<F> From<F> for MsgSender\n\nwhere\n\n F: FnMut(&[u8]) -> io::Result<()> + 'static,\n\n{\n\n fn from(f: F) -> Self {\n\n Self {\n\n send: Arc::new(Mutex::new(Box::new(f))),\n\n }\n\n }\n\n}\n\n\n\nimpl MsgSender {\n", "file_path": "src/msg.rs", "rank": 73, "score": 114871.23670181322 }, { "content": "// Adapted from\n\n// https://github.com/rust-lang/cargo/blob/485670b3983b52289a2f353d589c57fae2f60f82/tests/testsuite/support/mod.rs#L507\n\nfn target_dir() -> PathBuf {\n\n env::current_exe()\n\n .ok()\n\n .map(|mut path| {\n\n path.pop();\n\n if path.ends_with(\"deps\") {\n\n path.pop();\n\n }\n\n path\n\n })\n\n .unwrap()\n\n}\n", "file_path": "tests/cli/utils.rs", "rank": 74, "score": 112324.12039897298 }, { "content": "fn spawn_read_task<S>(mut stream: S) -> (JoinHandle<()>, mpsc::Receiver<io::Result<Vec<u8>>>)\n\nwhere\n\n S: Stream<Item = Vec<u8>> + Send + Unpin + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel::<io::Result<Vec<u8>>>(1);\n\n let read_task = tokio::spawn(async move {\n\n let mut task_buf: Option<Vec<u8>> = None;\n\n\n\n while let Some(data) = stream.next().await {\n\n // Create or insert into our buffer\n\n match &mut task_buf {\n\n Some(buf) => buf.extend(data),\n\n None => task_buf = Some(data),\n\n }\n\n\n\n // Read LSP messages from our internal buffer\n\n let buf = task_buf.take().unwrap();\n\n let (remainder, queue) = match read_lsp_messages(&buf) {\n\n Ok(x) => x,\n\n Err(x) => {\n", "file_path": "distant-core/src/client/lsp/mod.rs", "rank": 75, "score": 110707.23513947522 }, { "content": "fn cargo_bin_str(name: &str) -> PathBuf {\n\n let env_var = format!(\"CARGO_BIN_EXE_{}\", name);\n\n std::env::var_os(&env_var)\n\n .map(|p| p.into())\n\n .unwrap_or_else(|| target_dir().join(format!(\"{}{}\", name, env::consts::EXE_SUFFIX)))\n\n}\n\n\n", "file_path": "tests/cli/utils.rs", "rank": 76, "score": 105232.93544976664 }, { "content": "#[rstest]\n\nfn yield_an_error_when_fails(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let src = temp.child(\"dir\");\n\n let dst = temp.child(\"dir2\");\n\n\n\n // distant action rename {src} {dst}\n\n action_cmd\n\n .args(&[\"rename\", src.to_str().unwrap(), dst.to_str().unwrap()])\n\n .assert()\n\n .code(ExitCode::Software.to_i32())\n\n .stdout(\"\")\n\n .stderr(FAILURE_LINE.clone());\n\n\n\n src.assert(predicate::path::missing());\n\n dst.assert(predicate::path::missing());\n\n}\n\n\n", "file_path": "tests/cli/action/rename.rs", "rank": 77, "score": 103495.63913373718 }, { "content": "#[rstest]\n\nfn yield_an_error_when_fails(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n // Don't create file\n\n let file = temp.child(\"file\");\n\n\n\n // distant action metadata {path}\n\n action_cmd\n\n .args(&[\"metadata\", file.to_str().unwrap()])\n\n .assert()\n\n .code(ExitCode::Software.to_i32())\n\n .stdout(\"\")\n\n .stderr(FAILURE_LINE.clone());\n\n}\n\n\n", "file_path": "tests/cli/action/metadata.rs", "rank": 78, "score": 103495.63913373718 }, { "content": "#[rstest]\n\nfn yield_an_error_when_fails(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n // Make a non-empty directory\n\n let dir = temp.child(\"dir\");\n\n dir.create_dir_all().unwrap();\n\n dir.child(\"file\").touch().unwrap();\n\n\n\n // distant action remove {path}\n\n action_cmd\n\n .args(&[\"remove\", dir.to_str().unwrap()])\n\n .assert()\n\n .code(ExitCode::Software.to_i32())\n\n .stdout(\"\")\n\n .stderr(FAILURE_LINE.clone());\n\n\n\n dir.assert(predicate::path::exists());\n\n dir.assert(predicate::path::is_dir());\n\n}\n\n\n", "file_path": "tests/cli/action/remove.rs", "rank": 79, "score": 103495.63913373718 }, { "content": "#[rstest]\n\nfn yield_an_error_when_fails(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let src = temp.child(\"dir\");\n\n let dst = temp.child(\"dir2\");\n\n\n\n // distant action copy {src} {dst}\n\n action_cmd\n\n .args(&[\"copy\", src.to_str().unwrap(), dst.to_str().unwrap()])\n\n .assert()\n\n .code(ExitCode::Software.to_i32())\n\n .stdout(\"\")\n\n .stderr(FAILURE_LINE.clone());\n\n\n\n src.assert(predicate::path::missing());\n\n dst.assert(predicate::path::missing());\n\n}\n\n\n", "file_path": "tests/cli/action/copy.rs", "rank": 80, "score": 103495.63913373718 }, { "content": "#[cfg(feature = \"structopt\")]\n\nfn parse_byte_vec(src: &str) -> ByteVec {\n\n src.as_bytes().to_vec()\n\n}\n\n\n\n/// Represents the request to be performed on the remote machine\n\n#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\", deny_unknown_fields)]\n\npub struct Request {\n\n /// A name tied to the requester (tenant)\n\n pub tenant: String,\n\n\n\n /// A unique id associated with the request\n\n pub id: usize,\n\n\n\n /// The main payload containing a collection of data comprising one or more actions\n\n pub payload: Vec<RequestData>,\n\n}\n\n\n\nimpl Request {\n\n /// Creates a new request, generating a unique id for it\n", "file_path": "distant-core/src/data.rs", "rank": 81, "score": 103339.94574320196 }, { "content": "#[derive(Clone, Debug, Deserialize)]\n\n#[serde(default)]\n\nstruct LogOpts {\n\n /// Indicating whether or not to log to terminal\n\n terminal: bool,\n\n\n\n /// Path to file to store logs\n\n file: Option<PathBuf>,\n\n\n\n /// Base level at which to write logs\n\n /// (e.g. if debug then trace would not be logged)\n\n level: LogLevel,\n\n}\n\n\n\nimpl Default for LogOpts {\n\n fn default() -> Self {\n\n Self {\n\n terminal: false,\n\n file: None,\n\n level: LogLevel::Warn,\n\n }\n\n }\n\n}\n\n\n", "file_path": "distant-lua/src/log.rs", "rank": 82, "score": 102990.62167671513 }, { "content": "#[rstest]\n\nfn should_capture_and_print_stdout(mut action_cmd: Command) {\n\n // distant action proc-run {cmd} [args]\n\n action_cmd\n\n .args(&[\"proc-run\", \"--\"])\n\n .arg(SCRIPT_RUNNER.as_str())\n\n .arg(ECHO_ARGS_TO_STDOUT_SH.to_str().unwrap())\n\n .arg(\"hello world\")\n\n .assert()\n\n .success()\n\n .stdout(\"hello world\")\n\n .stderr(\"\");\n\n}\n\n\n", "file_path": "tests/cli/action/proc_run.rs", "rank": 83, "score": 102217.53163284223 }, { "content": "#[rstest]\n\nfn should_report_ok_when_done(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"test-file\");\n\n file.write_str(FILE_CONTENTS).unwrap();\n\n\n\n // distant action file-append {path} -- {contents}\n\n action_cmd\n\n .args(&[\n\n \"file-append\",\n\n file.to_str().unwrap(),\n\n \"--\",\n\n APPENDED_FILE_CONTENTS,\n\n ])\n\n .assert()\n\n .success()\n\n .stdout(\"\")\n\n .stderr(\"\");\n\n\n\n // NOTE: We wait a little bit to give the OS time to fully write to file\n\n std::thread::sleep(std::time::Duration::from_millis(100));\n\n\n\n // Because we're talking to a local server, we can verify locally\n\n file.assert(format!(\"{}{}\", FILE_CONTENTS, APPENDED_FILE_CONTENTS));\n\n}\n\n\n", "file_path": "tests/cli/action/file_append.rs", "rank": 84, "score": 102213.48684191665 }, { "content": "#[rstest]\n\nfn should_report_ok_when_done(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"test-file\");\n\n\n\n // distant action file-write {path} -- {contents}\n\n action_cmd\n\n .args(&[\"file-write\", file.to_str().unwrap(), \"--\", FILE_CONTENTS])\n\n .assert()\n\n .success()\n\n .stdout(\"\")\n\n .stderr(\"\");\n\n\n\n // NOTE: We wait a little bit to give the OS time to fully write to file\n\n std::thread::sleep(std::time::Duration::from_millis(100));\n\n\n\n // Because we're talking to a local server, we can verify locally\n\n file.assert(FILE_CONTENTS);\n\n}\n\n\n", "file_path": "tests/cli/action/file_write.rs", "rank": 85, "score": 102213.48684191665 }, { "content": "#[rstest]\n\nfn should_report_ok_when_done(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let dir = temp.child(\"dir\");\n\n\n\n // distant action dir-create {path}\n\n action_cmd\n\n .args(&[\"dir-create\", dir.to_str().unwrap()])\n\n .assert()\n\n .success()\n\n .stdout(\"\")\n\n .stderr(\"\");\n\n\n\n dir.assert(predicate::path::exists());\n\n dir.assert(predicate::path::is_dir());\n\n}\n\n\n", "file_path": "tests/cli/action/dir_create.rs", "rank": 86, "score": 102213.48684191665 }, { "content": "#[rstest]\n\nfn should_support_json_output_for_error(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let src = temp.child(\"dir\");\n\n let dst = temp.child(\"dir2\");\n\n\n\n let req = Request {\n\n id: rand::random(),\n\n tenant: random_tenant(),\n\n payload: vec![RequestData::Rename {\n\n src: src.to_path_buf(),\n\n dst: dst.to_path_buf(),\n\n }],\n\n };\n\n\n\n // distant action --format json --interactive\n\n let cmd = action_cmd\n\n .args(&[\"--format\", \"json\"])\n\n .arg(\"--interactive\")\n\n .write_stdin(format!(\"{}\\n\", serde_json::to_string(&req).unwrap()))\n", "file_path": "tests/cli/action/rename.rs", "rank": 87, "score": 101959.34269741534 }, { "content": "#[rstest]\n\nfn yield_an_error_when_fails(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"missing-dir\").child(\"missing-file\");\n\n\n\n // distant action file-append {path} -- {contents}\n\n action_cmd\n\n .args(&[\n\n \"file-append\",\n\n file.to_str().unwrap(),\n\n \"--\",\n\n APPENDED_FILE_CONTENTS,\n\n ])\n\n .assert()\n\n .code(ExitCode::Software.to_i32())\n\n .stdout(\"\")\n\n .stderr(FAILURE_LINE.clone());\n\n\n\n // Because we're talking to a local server, we can verify locally\n\n file.assert(predicates::path::missing());\n\n}\n\n\n", "file_path": "tests/cli/action/file_append.rs", "rank": 88, "score": 101959.34269741534 }, { "content": "#[rstest]\n\nfn yield_an_error_when_fails(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"missing-dir\").child(\"missing-file\");\n\n\n\n // distant action file-write {path} -- {contents}\n\n action_cmd\n\n .args(&[\"file-write\", file.to_str().unwrap(), \"--\", FILE_CONTENTS])\n\n .assert()\n\n .code(ExitCode::Software.to_i32())\n\n .stdout(\"\")\n\n .stderr(FAILURE_LINE.clone());\n\n\n\n // Because we're talking to a local server, we can verify locally\n\n file.assert(predicates::path::missing());\n\n}\n\n\n", "file_path": "tests/cli/action/file_write.rs", "rank": 89, "score": 101959.34269741534 }, { "content": "#[rstest]\n\nfn yield_an_error_when_fails(mut action_cmd: Command) {\n\n let temp = make_directory();\n\n let dir = temp.child(\"missing-dir\");\n\n\n\n // distant action dir-read {path}\n\n action_cmd\n\n .args(&[\"dir-read\", dir.to_str().unwrap()])\n\n .assert()\n\n .code(ExitCode::Software.to_i32())\n\n .stdout(\"\")\n\n .stderr(FAILURE_LINE.clone());\n\n}\n\n\n", "file_path": "tests/cli/action/dir_read.rs", "rank": 90, "score": 101959.34269741534 }, { "content": "#[rstest]\n\nfn should_support_json_output_for_error(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n // Don't create file\n\n let file = temp.child(\"file\");\n\n\n\n let req = Request {\n\n id: rand::random(),\n\n tenant: random_tenant(),\n\n payload: vec![RequestData::Metadata {\n\n path: file.to_path_buf(),\n\n canonicalize: false,\n\n resolve_file_type: false,\n\n }],\n\n };\n\n\n\n // distant action --format json --interactive\n\n let cmd = action_cmd\n\n .args(&[\"--format\", \"json\"])\n\n .arg(\"--interactive\")\n", "file_path": "tests/cli/action/metadata.rs", "rank": 91, "score": 101959.34269741534 }, { "content": "#[rstest]\n\nfn should_support_json_output_for_error(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n // Make a non-empty directory so we fail to remove it\n\n let dir = temp.child(\"dir\");\n\n dir.create_dir_all().unwrap();\n\n dir.child(\"file\").touch().unwrap();\n\n\n\n let req = Request {\n\n id: rand::random(),\n\n tenant: random_tenant(),\n\n payload: vec![RequestData::Remove {\n\n path: dir.to_path_buf(),\n\n force: false,\n\n }],\n\n };\n\n\n\n // distant action --format json --interactive\n\n let cmd = action_cmd\n\n .args(&[\"--format\", \"json\"])\n", "file_path": "tests/cli/action/remove.rs", "rank": 92, "score": 101959.34269741534 }, { "content": "#[rstest]\n\nfn yield_an_error_when_fails(mut action_cmd: Command) {\n\n // distant action proc-run {cmd} [args]\n\n action_cmd\n\n .args(&[\"proc-run\", \"--\"])\n\n .arg(DOES_NOT_EXIST_BIN.to_str().unwrap())\n\n .assert()\n\n .code(ExitCode::IoError.to_i32())\n\n .stdout(\"\")\n\n .stderr(\"\");\n\n}\n\n\n", "file_path": "tests/cli/action/proc_run.rs", "rank": 93, "score": 101959.34269741534 }, { "content": "#[rstest]\n\nfn should_support_json_output_for_error(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let src = temp.child(\"dir\");\n\n let dst = temp.child(\"dir2\");\n\n\n\n let req = Request {\n\n id: rand::random(),\n\n tenant: random_tenant(),\n\n payload: vec![RequestData::Copy {\n\n src: src.to_path_buf(),\n\n dst: dst.to_path_buf(),\n\n }],\n\n };\n\n\n\n // distant action --format json --interactive\n\n let cmd = action_cmd\n\n .args(&[\"--format\", \"json\"])\n\n .arg(\"--interactive\")\n\n .write_stdin(format!(\"{}\\n\", serde_json::to_string(&req).unwrap()))\n", "file_path": "tests/cli/action/copy.rs", "rank": 94, "score": 101959.34269741534 }, { "content": "#[rstest]\n\nfn yield_an_error_when_fails(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"missing-file\");\n\n\n\n // distant action file-read {path}\n\n action_cmd\n\n .args(&[\"file-read\", file.to_str().unwrap()])\n\n .assert()\n\n .code(ExitCode::Software.to_i32())\n\n .stdout(\"\")\n\n .stderr(FAILURE_LINE.clone());\n\n}\n\n\n", "file_path": "tests/cli/action/file_read.rs", "rank": 95, "score": 101959.34269741534 }, { "content": "#[rstest]\n\nfn yield_an_error_when_fails(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let dir = temp.child(\"missing-dir\").child(\"dir\");\n\n\n\n // distant action dir-create {path}\n\n action_cmd\n\n .args(&[\"dir-create\", dir.to_str().unwrap()])\n\n .assert()\n\n .code(ExitCode::Software.to_i32())\n\n .stdout(\"\")\n\n .stderr(FAILURE_LINE.clone());\n\n\n\n dir.assert(predicate::path::missing());\n\n}\n\n\n", "file_path": "tests/cli/action/dir_create.rs", "rank": 96, "score": 101959.34269741534 }, { "content": "#[mlua::lua_module]\n\nfn distant_lua(lua: &Lua) -> LuaResult<LuaTable> {\n\n let exports = lua.create_table()?;\n\n\n\n // Provide a static pending type used when consumer wants to use async functions\n\n // directly without wrapping them with a scheduler\n\n exports.set(\"pending\", utils::pending(lua)?)?;\n\n\n\n // API modules available for users\n\n exports.set(\"log\", log::make_log_tbl(lua)?)?;\n\n exports.set(\"session\", session::make_session_tbl(lua)?)?;\n\n exports.set(\"utils\", utils::make_utils_tbl(lua)?)?;\n\n exports.set(\"version\", make_version_tbl(lua)?)?;\n\n\n\n Ok(exports)\n\n}\n\n\n\nmacro_rules! set_nonempty_env {\n\n ($tbl:ident, $key:literal, $env_key:literal) => {{\n\n let value = env!($env_key);\n\n if !value.is_empty() {\n\n $tbl.set($key, value)?;\n\n }\n\n }};\n\n}\n\n\n", "file_path": "distant-lua/src/lib.rs", "rank": 97, "score": 101283.81432996053 }, { "content": "#[rstest]\n\nfn should_use_absolute_paths_if_specified(mut action_cmd: Command) {\n\n let temp = make_directory();\n\n\n\n // NOTE: Our root path is always canonicalized, so the absolute path\n\n // provided is our canonicalized root path prepended\n\n let root_path = temp.to_path_buf().canonicalize().unwrap();\n\n\n\n // distant action dir-read --absolute {path}\n\n action_cmd\n\n .args(&[\"dir-read\", \"--absolute\", temp.to_str().unwrap()])\n\n .assert()\n\n .success()\n\n .stdout(format!(\n\n \"{}\\n\",\n\n vec![\n\n format!(\"{}/{}\", root_path.to_str().unwrap(), \"dir1/\"),\n\n format!(\"{}/{}\", root_path.to_str().unwrap(), \"dir2/\"),\n\n format!(\"{}/{}\", root_path.to_str().unwrap(), \"file1\"),\n\n format!(\"{}/{}\", root_path.to_str().unwrap(), \"file2\"),\n\n ]\n\n .join(\"\\n\")\n\n ))\n\n .stderr(\"\");\n\n}\n\n\n", "file_path": "tests/cli/action/dir_read.rs", "rank": 98, "score": 100744.50153419763 }, { "content": "#[rstest]\n\nfn should_report_ok_when_done(mut action_cmd: Command) {\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n let file = temp.child(\"test-file\");\n\n file.write_str(FILE_CONTENTS).unwrap();\n\n\n\n // distant action file-append-text {path} -- {contents}\n\n action_cmd\n\n .args(&[\n\n \"file-append-text\",\n\n file.to_str().unwrap(),\n\n \"--\",\n\n APPENDED_FILE_CONTENTS,\n\n ])\n\n .assert()\n\n .success()\n\n .stdout(\"\")\n\n .stderr(\"\");\n\n\n\n // NOTE: We wait a little bit to give the OS time to fully write to file\n\n std::thread::sleep(std::time::Duration::from_millis(100));\n\n\n\n // Because we're talking to a local server, we can verify locally\n\n file.assert(format!(\"{}{}\", FILE_CONTENTS, APPENDED_FILE_CONTENTS));\n\n}\n\n\n", "file_path": "tests/cli/action/file_append_text.rs", "rank": 99, "score": 100728.4958970173 } ]
Rust
crates/brix_processor/tests/to_case.rs
miapolis/brix
4a3939db28fff05a7a45fa32f1a75cd9abc29be3
use lazy_static::lazy_static; use std::collections::HashMap; mod common; lazy_static! { static ref TO_CASE_CONTEXT: HashMap<String, String> = { let mut map = HashMap::new(); map.insert(String::from("one"), String::from("tHIS iS tOGGLE cASE")); map.insert(String::from("two"), String::from("ThisIsPascalCase")); map.insert(String::from("three"), String::from("thisIsCamelCase")); map.insert(String::from("four"), String::from("ThisIsUpperCamelCase")); map.insert(String::from("five"), String::from("this_is_snake_case")); map.insert( String::from("six"), String::from("THIS_IS_UPPER_SNAKE_CASE"), ); map.insert( String::from("seven"), String::from("THIS_IS_SCREAMING_SNAKE_CASE"), ); map.insert(String::from("eight"), String::from("this-is-kebab-case")); map.insert(String::from("nine"), String::from("THIS-IS-COBOL-CASE")); map.insert(String::from("ten"), String::from("This-Is-Train-Case")); map.insert(String::from("eleven"), String::from("thisisflatcase")); map.insert(String::from("twelve"), String::from("THISISUPPERFLATCASE")); map.insert( String::from("thirteen"), String::from("tHiS iS aLtErNaTiNg CaSe"), ); map }; static ref TO_CASE_ASSERTIONS: Vec<&'static str> = vec![ "t hIS i s t oGGLE c aSE", "tHIS iS pASCAL cASE", "tHIS iS cAMEL cASE", "tHIS iS uPPER cAMEL cASE", "tHIS iS sNAKE cASE", "tHIS iS uPPER sNAKE cASE", "tHIS iS sCREAMING sNAKE cASE", "tHIS iS kEBAB cASE", "tHIS iS cOBOL cASE", "tHIS iS tRAIN cASE", "tHISISFLATCASE", "tHISISUPPERFLATCASE", "t hI s i s a lT eR nA tI nG cA sE", "THisISTOggleCAse", "ThisIsPascalCase", "ThisIsCamelCase", "ThisIsUpperCamelCase", "ThisIsSnakeCase", "ThisIsUpperSnakeCase", "ThisIsScreamingSnakeCase", "ThisIsKebabCase", "ThisIsCobolCase", "ThisIsTrainCase", "Thisisflatcase", "Thisisupperflatcase", "THiSISALtErNaTiNgCaSe", "tHisISTOggleCAse", "thisIsPascalCase", "thisIsCamelCase", "thisIsUpperCamelCase", "thisIsSnakeCase", "thisIsUpperSnakeCase", "thisIsScreamingSnakeCase", "thisIsKebabCase", "thisIsCobolCase", "thisIsTrainCase", "thisisflatcase", "thisisupperflatcase", "tHiSISALtErNaTiNgCaSe", "THisISTOggleCAse", "ThisIsPascalCase", "ThisIsCamelCase", "ThisIsUpperCamelCase", "ThisIsSnakeCase", "ThisIsUpperSnakeCase", "ThisIsScreamingSnakeCase", "ThisIsKebabCase", "ThisIsCobolCase", "ThisIsTrainCase", "Thisisflatcase", "Thisisupperflatcase", "THiSISALtErNaTiNgCaSe", "t_his_i_s_t_oggle_c_ase", "this_is_pascal_case", "this_is_camel_case", "this_is_upper_camel_case", "this_is_snake_case", "this_is_upper_snake_case", "this_is_screaming_snake_case", "this_is_kebab_case", "this_is_cobol_case", "this_is_train_case", "thisisflatcase", "thisisupperflatcase", "t_hi_s_i_s_a_lt_er_na_ti_ng_ca_se", "T_HIS_I_S_T_OGGLE_C_ASE", "THIS_IS_PASCAL_CASE", "THIS_IS_CAMEL_CASE", "THIS_IS_UPPER_CAMEL_CASE", "THIS_IS_SNAKE_CASE", "THIS_IS_UPPER_SNAKE_CASE", "THIS_IS_SCREAMING_SNAKE_CASE", "THIS_IS_KEBAB_CASE", "THIS_IS_COBOL_CASE", "THIS_IS_TRAIN_CASE", "THISISFLATCASE", "THISISUPPERFLATCASE", "T_HI_S_I_S_A_LT_ER_NA_TI_NG_CA_SE", "T_HIS_I_S_T_OGGLE_C_ASE", "THIS_IS_PASCAL_CASE", "THIS_IS_CAMEL_CASE", "THIS_IS_UPPER_CAMEL_CASE", "THIS_IS_SNAKE_CASE", "THIS_IS_UPPER_SNAKE_CASE", "THIS_IS_SCREAMING_SNAKE_CASE", "THIS_IS_KEBAB_CASE", "THIS_IS_COBOL_CASE", "THIS_IS_TRAIN_CASE", "THISISFLATCASE", "THISISUPPERFLATCASE", "T_HI_S_I_S_A_LT_ER_NA_TI_NG_CA_SE", "t-his-i-s-t-oggle-c-ase", "this-is-pascal-case", "this-is-camel-case", "this-is-upper-camel-case", "this-is-snake-case", "this-is-upper-snake-case", "this-is-screaming-snake-case", "this-is-kebab-case", "this-is-cobol-case", "this-is-train-case", "thisisflatcase", "thisisupperflatcase", "t-hi-s-i-s-a-lt-er-na-ti-ng-ca-se", "T-HIS-I-S-T-OGGLE-C-ASE", "THIS-IS-PASCAL-CASE", "THIS-IS-CAMEL-CASE", "THIS-IS-UPPER-CAMEL-CASE", "THIS-IS-SNAKE-CASE", "THIS-IS-UPPER-SNAKE-CASE", "THIS-IS-SCREAMING-SNAKE-CASE", "THIS-IS-KEBAB-CASE", "THIS-IS-COBOL-CASE", "THIS-IS-TRAIN-CASE", "THISISFLATCASE", "THISISUPPERFLATCASE", "T-HI-S-I-S-A-LT-ER-NA-TI-NG-CA-SE", "T-His-I-S-T-Oggle-C-Ase", "This-Is-Pascal-Case", "This-Is-Camel-Case", "This-Is-Upper-Camel-Case", "This-Is-Snake-Case", "This-Is-Upper-Snake-Case", "This-Is-Screaming-Snake-Case", "This-Is-Kebab-Case", "This-Is-Cobol-Case", "This-Is-Train-Case", "Thisisflatcase", "Thisisupperflatcase", "T-Hi-S-I-S-A-Lt-Er-Na-Ti-Ng-Ca-Se", "thisistogglecase", "thisispascalcase", "thisiscamelcase", "thisisuppercamelcase", "thisissnakecase", "thisisuppersnakecase", "thisisscreamingsnakecase", "thisiskebabcase", "thisiscobolcase", "thisistraincase", "thisisflatcase", "thisisupperflatcase", "thisisalternatingcase", "THISISTOGGLECASE", "THISISPASCALCASE", "THISISCAMELCASE", "THISISUPPERCAMELCASE", "THISISSNAKECASE", "THISISUPPERSNAKECASE", "THISISSCREAMINGSNAKECASE", "THISISKEBABCASE", "THISISCOBOLCASE", "THISISTRAINCASE", "THISISFLATCASE", "THISISUPPERFLATCASE", "THISISALTERNATINGCASE", "t HiS i S t OgGlE c AsE", "tHiS iS pAsCaL cAsE", "tHiS iS cAmEl CaSe", "tHiS iS uPpEr CaMeL cAsE", "tHiS iS sNaKe CaSe", "tHiS iS uPpEr SnAkE cAsE", "tHiS iS sCrEaMiNg SnAkE cAsE", "tHiS iS kEbAb CaSe", "tHiS iS cObOl CaSe", "tHiS iS tRaIn CaSe", "tHiSiSfLaTcAsE", "tHiSiSuPpErFlAtCaSe", "t Hi S i S a Lt Er Na Ti Ng Ca Se", ]; } #[test] fn to_case() { let core = common::setup(); let context = brix_processor::create_context(TO_CASE_CONTEXT.clone()); let contents = common::load_file("case").unwrap(); let result = core.process(contents, context).unwrap(); assert!(common::line_assert(result, TO_CASE_ASSERTIONS.to_vec())) }
use lazy_static::lazy_static; use std::collections::HashMap; mod common; lazy_static! { static ref TO_CASE_CONTEXT: HashMap<String, String> = { let mut map = HashMap::new(); map.insert(String::from("one"), String::from("tHIS iS tOGGLE cASE")); map.insert(String::from("two"), String::from("ThisIsPascalCase")); map.insert(String::from("three"), String::from("thisIsCamelCase")); map.insert(String::from("four"), String::from("ThisIsUpperCamelCase")); map.insert(String::from("five"), String::from("this_is_snake_case")); map.insert( String::from("six"), String::from("THIS_IS_UPPER_SNAKE_CASE"), ); map.insert( String::from("seven"), String::from("THIS_IS_SCREAMING_SNAKE_CASE"), ); map.insert(String::from("eight"), String::from("this-is-kebab-case")); map.insert(String::from("nine"), String::from("THIS-IS-COBOL-CASE")); map.insert(String::from("ten"), String::from("This-Is-Train-Case")); map.insert(String::from("eleven"), String::from("thisisflatcase")); map.insert(String::from("twelve"), String::from("THISISUPPERFLATCASE")); map.insert( String::from("thirteen"), String::from("tHiS iS aLtErNaTiNg CaSe"), ); map }; static ref TO_CASE_ASSERTIONS: Vec<&'static str> = vec![ "t hIS i s t oGGLE c aSE", "tHIS iS pASCAL cASE", "tHIS iS cAMEL cASE", "tHIS iS uPPER cAMEL cASE", "tHIS iS sNAKE cASE", "tHIS iS uPPER sNAKE cASE", "tHIS iS sCREAMING sNAKE cASE", "tHIS iS kEBAB cASE", "tHIS iS cOBOL cASE", "tHIS iS tRAIN cASE", "tHISISFLATCASE", "tHISISUPPERFLATCASE", "t hI s i s a lT eR nA tI nG cA sE", "THisISTOggleCAse", "ThisIsPascalCase", "ThisIsCamelCase", "ThisIsUpperCamelCase", "ThisIsSnakeCase", "ThisIsUpperSnakeCase", "ThisIsScreamingSnakeCase", "ThisIsKebabCase", "ThisIsCobolCase", "ThisIsTrainCase", "Thisisflatcase", "Thisisupperflatcase", "THiSISALtErNaTiNgCaSe", "tHisISTOggleCAse", "thisIsPascalCase", "thisIsCamelCase", "thisIsUpperCamelCase", "thisIsSnakeCase", "thisIsUpperSnakeCase", "thisIsScreamingSnakeCase", "thisIsKebabCase", "thisIsCobolCase", "thisIsTrainCase", "thisisflatcase", "thisisupperflatcase", "tHiSISALtErNaTiNgCaSe", "THisISTOggleCAse", "ThisIsPascalCase", "ThisIsCamelCase", "ThisIsUpperCamelCase", "ThisIsSnakeCase", "ThisIsUpperSnakeCase", "ThisIsScreamingSnakeCase", "ThisIsKebabCase", "ThisIsCobolCase", "ThisIsTrainCase", "Thisisflatcase", "Thisisupperflatcase", "THiSISALtErNaTiNgCaSe", "t_his_i_s_t_oggle_c_ase", "this_is_pascal_case", "this_is_camel_case", "this_is_upper_camel_case", "this_is_snake_case", "this_is_upper_snake_case", "this_is_screaming_snake_case", "this_is_kebab_case", "this_is_cobol_case", "this_is_train_case", "thisisflatcase", "thisisupperflatcase", "t_hi_s_i_s_a_lt_er_na_ti_ng_ca_se", "T_HIS_I_S_T_OGGLE_C_ASE", "THIS_IS_PASCAL_CASE", "THIS_IS_CAMEL_CASE", "THIS_IS_UPPER_CAMEL_CASE", "THIS_IS_SNAKE_CASE", "THIS_IS_UPPER_SNAKE_CASE", "THIS_IS_SCREAMING_SNAKE_CASE", "THIS_IS_KEBAB_CASE", "THIS_IS_COBOL_CASE", "THIS_IS_TRAIN_CASE", "THISISFLATCASE", "THISISUPPERFLATCASE", "T_HI_S_I_S_A_LT_ER_NA_TI_NG_CA_SE", "T_HIS_I_S_T_OGGLE_C_ASE", "THIS_IS_PASCAL_CASE", "THIS_IS_CAMEL_CASE", "THIS_IS_UPPER_CAMEL_CASE", "THIS_IS_SNAKE_CASE", "THIS_IS_UPPER_SNAKE_CASE", "THIS_IS_SCREAMING_SNAKE_CASE", "THIS_IS_KEBAB_CASE", "THIS_IS_COBOL_CASE", "THIS_IS_TRAIN_CASE", "THISISFLATCASE", "THISISUPPERFLATCASE", "T_HI_S_I_S_A_LT_ER_NA_TI_NG_CA_SE", "t-his-i-s-t-oggle-c-ase", "this-is-pascal-case", "this-is-camel-case", "this-is-upper-camel-case", "this-is-snake-case", "this-is-upper-snake-case", "this-is-screaming-snake-case", "this-is-kebab-case", "this-is-cobol-case", "this-is-train-case", "thisisflatcase", "thisisupperflatcase", "t-hi-s-i-s-a-lt-er-na-ti-ng-ca-se", "T-HIS-I-S-T-OGGLE-C-ASE", "THIS-IS-PASCAL-CASE", "THIS-IS-CAMEL-CASE", "THIS-IS-UPPER-CAMEL-CASE", "THIS-IS-SNAKE-CASE", "THIS-IS-UPPER-SNAKE-CASE", "THIS-IS-SCREAMING-SNAKE-CASE", "THIS-IS-KEBAB-CASE", "THIS-IS-COBOL-CASE", "THIS-IS-TRAIN-CASE", "THISISFLATCASE", "THISISUPPERFLATCASE", "T-HI-S-I-S-A-LT-ER-NA-TI-NG-CA-SE", "T-His-I-S-T-Oggle-C-Ase", "This-Is-Pascal-Case", "This-Is-Camel-Case", "This-Is-Upper-Camel-Case", "This-Is-Snake-Case", "This-Is-Upper-Snake-Case", "This-Is-Screaming-Snake-Case", "This-Is-Kebab-Case", "This-Is-Cobol-Case", "This-Is-Train-Case", "Thisisflatcase", "Thisisupperflatcase", "T-Hi-S-I-S-A-Lt-Er-Na-Ti-Ng-Ca-Se", "thisistogglecase", "thisispascalcase", "thisiscamelcase", "thisisuppercamelcase", "thisissnakecase", "thisisuppersnakecase", "thisisscreamingsnakecase", "thisiskebabcase", "thisiscobolcase", "thisistraincase", "thisisflatcase", "thisisupperflatcase", "thisisalternatingcase", "THISISTOGGLECASE", "THISISPASCALCASE", "THISISCAMELCASE", "THISISUPPERCAMELCASE", "THISISSNAKECASE", "THISISUPPERSNAKECASE", "THISISSCREAMINGSNAKECASE", "THISISKEBABCASE", "THISISCOBOLCASE", "THISISTRAINCASE", "THISISFLATCASE", "THISISUPPERFLATCASE", "THISISALTERNATINGCASE", "t HiS i S t OgGlE c AsE", "tHiS iS pAsCaL cAsE", "tHiS iS cAmEl CaSe", "tHiS iS uPpEr CaMeL cAsE", "tHiS iS sNaKe CaSe", "tHiS iS uPpEr SnAkE cAsE", "tHiS iS sCrEaMiNg SnAkE cAsE", "tHiS iS kEbAb CaSe", "tHiS iS cObOl CaSe", "tHiS iS tRaIn CaSe", "tHiSiSfLaTcAsE", "tHiSiSuPpErFlAtCaSe", "t Hi S i S a Lt Er Na Ti Ng Ca Se", ]; } #[test]
fn to_case() { let core = common::setup(); let context = brix_processor::create_context(TO_CASE_CONTEXT.clone()); let contents = common::load_file("case").unwrap(); let result = core.process(contents, context).unwrap(); assert!(common::line_assert(result, TO_CASE_ASSERTIONS.to_vec())) }
function_block-full_function
[ { "content": "pub fn line_assert(contents: String, assertion: Vec<&str>) -> bool {\n\n let mut itr = 0;\n\n for line in contents.lines() {\n\n if assertion[itr] != line {\n\n return false;\n\n }\n\n itr += 1;\n\n }\n\n return true;\n\n}\n", "file_path": "crates/brix_processor/tests/common.rs", "rank": 0, "score": 150714.27955861244 }, { "content": "pub fn load_file(hbs_name: &str) -> Result<String, std::io::Error> {\n\n let path = PathBuf::from(format!(\"tests/templates/{}.hbs\", hbs_name));\n\n read_to_string(path)\n\n}\n\n\n", "file_path": "crates/brix_processor/tests/common.rs", "rank": 1, "score": 108616.0614185862 }, { "content": "/// Current type used for contexts.\n\ntype StdContext = HashMap<String, String>;\n\n\n\n/// Struct containing all types of contexts used in commands.\n\npub struct ContextMap {\n\n pub cli_positional: StdContext,\n\n pub config_global: StdContext,\n\n pub command_local: StdContext,\n\n}\n\n\n\nimpl ContextMap {\n\n /// Merges all contexts together into a single map respecting the priority of each.\n\n pub fn do_merge(&self) -> StdContext {\n\n let mut merged = StdContext::new();\n\n // Start with cli positional args\n\n for (key, value) in self.cli_positional.clone().into_iter() {\n\n merged.insert(key, value);\n\n }\n\n // Global is lowest priority\n\n for (key, value) in self.config_global.clone().into_iter() {\n\n merged.insert(key, value);\n\n }\n\n // Local overrides global context\n\n for (key, value) in self.command_local.clone().into_iter() {\n\n merged.insert(key, value);\n\n }\n\n\n\n merged\n\n }\n\n}\n\n\n", "file_path": "crates/brix_common/src/context.rs", "rank": 2, "score": 107702.34399546358 }, { "content": "/// Create a valid context map by serializing into JSON.\n\npub fn create_context(data: HashMap<String, String>) -> Map<String, Json> {\n\n let mut res = Map::new();\n\n for (key, value) in data.into_iter() {\n\n res.insert(key, json!(value));\n\n }\n\n res\n\n}\n", "file_path": "crates/brix_processor/src/lib.rs", "rank": 3, "score": 101614.59740488284 }, { "content": "/// Utility function for displaying Windows paths more or less the same as Unix paths.\n\npub fn display_path(path: &str) -> String {\n\n let path = path.replace(\"//\", \"/\");\n\n let path = path.replace(\"\\\\\\\\\", \"/\");\n\n path.replace(\"\\\\\", \"/\")\n\n}\n", "file_path": "crates/brix_core/util.rs", "rank": 4, "score": 83640.64881288531 }, { "content": "/// Basic wrapper around the `dialoguer::Select` prompt with stdout.\n\npub fn do_select(items: Vec<&str>) -> Result<usize, BrixError> {\n\n let selection = Select::new()\n\n .items(&items)\n\n .default(0)\n\n .interact_on_opt(&Term::stdout())?;\n\n\n\n match selection {\n\n Some(index) => Ok(index),\n\n None => Err(BrixError::with(\"no option selected!\")),\n\n }\n\n}\n", "file_path": "crates/brix_cli/src/select.rs", "rank": 5, "score": 71761.20307658875 }, { "content": "#[test]\n\nfn to_upper() {\n\n let core = common::setup();\n\n let context = brix_processor::create_context(TO_UPPER_CONTEXT.clone());\n\n let contents = common::load_file(\"upper\").unwrap();\n\n\n\n let result = core.process(contents, context).unwrap();\n\n assert!(common::line_assert(result, TO_UPPER_ASSERTIONS.to_vec()))\n\n}\n", "file_path": "crates/brix_processor/tests/to_upper.rs", "rank": 6, "score": 63632.48776898745 }, { "content": "fn arg_project() -> Arg<'static, 'static> {\n\n const HELP: &str = \"The name of the project you're generating code for\";\n\n Arg::with_name(PROJECT)\n\n .value_name(\"project\")\n\n .help(HELP)\n\n .required(true)\n\n}\n\n\n", "file_path": "crates/brix_cli/src/app.rs", "rank": 8, "score": 58685.90100100352 }, { "content": "fn arg_module() -> Arg<'static, 'static> {\n\n const HELP: &str = \"The name of the module to be created within the project\";\n\n Arg::with_name(MODULE)\n\n .value_name(\"module\")\n\n .help(HELP)\n\n .required(true)\n\n}\n\n\n", "file_path": "crates/brix_cli/src/app.rs", "rank": 9, "score": 58685.90100100352 }, { "content": "fn flag_workdir() -> Arg<'static, 'static> {\n\n const HELP: &str =\n\n \"The current working directory to use. Defaults to the directory where brix is run from\";\n\n Arg::with_name(WORKDIR)\n\n .value_name(\"workdir\")\n\n .help(HELP)\n\n .long(\"workdir\")\n\n .short(\"w\")\n\n .takes_value(true)\n\n .validator(is_valid_path)\n\n}\n\n\n", "file_path": "crates/brix_cli/src/app.rs", "rank": 10, "score": 58685.90100100352 }, { "content": "fn arg_language() -> Arg<'static, 'static> {\n\n const HELP: &str = \"The programming language you're generating code for. Directory under -d\";\n\n Arg::with_name(LANGUAGE)\n\n .value_name(\"language\")\n\n .help(HELP)\n\n .required(true)\n\n}\n\n\n", "file_path": "crates/brix_cli/src/app.rs", "rank": 11, "score": 58685.90100100352 }, { "content": "fn flag_config_dir() -> Arg<'static, 'static> {\n\n const HELP: &str = \"\n\nDirectory path from the current working directory.\n\nTemplates and configs are looked up relative to here.\n\nIf the config isn't found here, then ~/.config/brix will be searched\n\n \";\n\n Arg::with_name(CONFIG_DIR)\n\n .value_name(\"config dir\")\n\n .help(HELP)\n\n .long(\"config-dir\")\n\n .short(\"d\")\n\n .takes_value(true)\n\n .validator(is_valid_path)\n\n}\n\n\n", "file_path": "crates/brix_cli/src/app.rs", "rank": 12, "score": 58006.98403450241 }, { "content": "fn flag_log_level() -> Arg<'static, 'static> {\n\n const HELP: &str = \"The log level to use while running a command\";\n\n Arg::with_name(LOG_LEVEL)\n\n .value_name(\"log level\")\n\n .help(HELP)\n\n .long(\"log-level\")\n\n .takes_value(true)\n\n .default_value(\"off\")\n\n .possible_values(&[\"off\", \"error\", \"warn\", \"info\", \"debug\", \"trace\"])\n\n}\n\n\n", "file_path": "crates/brix_cli/src/app.rs", "rank": 13, "score": 58006.98403450241 }, { "content": "fn arg_config_name() -> Arg<'static, 'static> {\n\n const HELP: &str = \"The type of code you're generating e.g controller, also the name of the config file without the extension\";\n\n Arg::with_name(CONFIG_NAME)\n\n .value_name(\"config name\")\n\n .help(HELP)\n\n .required(true)\n\n}\n\n\n", "file_path": "crates/brix_cli/src/app.rs", "rank": 14, "score": 58006.98403450241 }, { "content": "fn is_valid_path(v: String) -> Result<(), String> {\n\n let path = Path::new(&v);\n\n if path.exists() {\n\n Ok(())\n\n } else {\n\n Err(format!(\"The directory {} does not exist\", v))\n\n }\n\n}\n", "file_path": "crates/brix_cli/src/app.rs", "rank": 15, "score": 57709.50226574918 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse lazy_static::lazy_static;\n\nuse std::collections::HashMap;\n\n\n\nmod common;\n\n\n\nlazy_static! {\n\n static ref TO_UPPER_CONTEXT: HashMap<String, String> = {\n\n let mut map = HashMap::new();\n\n map.insert(String::from(\"one\"), String::from(\"alllower\"));\n\n map.insert(String::from(\"two\"), String::from(\"ALREADY_UPPER\"));\n\n map.insert(String::from(\"three\"), String::from(\"mIxEd.cAsE\"));\n\n map\n\n };\n\n static ref TO_UPPER_ASSERTIONS: Vec<&'static str> =\n\n vec![\"ALLLOWER\", \"ALREADY_UPPER\", \"MIXED.CASE\"];\n\n}\n\n\n\n#[test]\n", "file_path": "crates/brix_processor/tests/to_upper.rs", "rank": 16, "score": 57378.46859072294 }, { "content": "/// Creates the clap application and sets args\n\npub fn app() -> App<'static, 'static> {\n\n let mut app = App::new(\"brix\")\n\n .author(crate_authors!(\"\\n\"))\n\n .version(crate_version!())\n\n .max_term_width(100)\n\n .setting(AppSettings::UnifiedHelpMessage)\n\n .setting(AppSettings::ArgRequiredElseHelp)\n\n .usage(USAGE)\n\n .help_message(\"Prints this message\");\n\n\n\n app = app.arg(arg_language());\n\n app = app.arg(arg_config_name());\n\n app = app.arg(arg_project());\n\n app = app.arg(arg_module());\n\n app = app.arg(flag_config_dir());\n\n app = app.arg(flag_log_level());\n\n app = app.arg(flag_workdir());\n\n\n\n app\n\n}\n\n\n", "file_path": "crates/brix_cli/src/app.rs", "rank": 17, "score": 57218.105580120726 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse brix_processor::ProcessorCore;\n\nuse std::fs::read_to_string;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "crates/brix_processor/tests/common.rs", "rank": 18, "score": 57125.76599041396 }, { "content": "pub fn setup<'a>() -> ProcessorCore<'a> {\n\n ProcessorCore::new()\n\n}\n\n\n", "file_path": "crates/brix_processor/tests/common.rs", "rank": 30, "score": 49951.7846421512 }, { "content": "/// Creates the initial context based off of CLI parameters.\n\npub fn cli_config_to_map(config: &brix_cli::Config) -> StdContext {\n\n macro_rules! s {\n\n ($st:expr) => {\n\n String::from($st)\n\n };\n\n }\n\n\n\n let mut map = HashMap::new();\n\n map.insert(s!(\"language\"), s!(&config.language));\n\n map.insert(s!(\"module\"), s!(&config.module));\n\n map.insert(s!(\"project\"), s!(&config.project));\n\n map\n\n}\n", "file_path": "crates/brix_common/src/context.rs", "rank": 31, "score": 43987.7103729972 }, { "content": "/// Returns a `log::LevelFilter` given a string.\n\nfn log_level_to_struct(level: &str) -> LevelFilter {\n\n match level {\n\n \"off\" => LevelFilter::Off,\n\n \"error\" => LevelFilter::Error,\n\n \"warn\" => LevelFilter::Warn,\n\n \"info\" => LevelFilter::Info,\n\n \"debug\" => LevelFilter::Debug,\n\n \"trace\" => LevelFilter::Trace,\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nmacro_rules! s {\n\n () => {\n\n String::new()\n\n };\n\n}\n\n\n\nimpl std::default::Default for Config {\n", "file_path": "crates/brix_cli/src/config.rs", "rank": 32, "score": 41890.4401100616 }, { "content": "/// ## Lifecycle\n\n/// Brix's lifecycle constists of the following steps:\n\n/// 1) Get common variables like working and home directory and create config from `brix_cli`.\n\n/// 2) Get config directory and find module given CLI parameters accordingly.\n\n/// 3) Attempt to load the found config file with `brix_config_loader`.\n\n/// 4) Create the `AppContext` with the config and `ProcessorCore` which will be used during command execution.\n\n/// 5) Run the previous loader and get back a list of commands with their respective parameters.\n\n/// 6) Iterate through the commands and execute them accordingly.\n\nfn try_main(matches: brix_cli::ArgMatches<'static>) -> Result<()> {\n\n let home_dir = home::home_dir();\n\n let config = brix_cli::Config::new(home_dir.clone(), matches);\n\n\n\n SimpleLogger::new()\n\n .with_level(config.log_level)\n\n .init()\n\n .unwrap();\n\n\n\n debug!(\"HOME DIR: {:?}\", home_dir);\n\n\n\n let default_config = PathBuf::from(\".config/brix\");\n\n let config_root = Path::new(config.config_dir.as_ref().unwrap_or(&default_config));\n\n let language_dir = Path::new(&config.language);\n\n let module_dir = config_root.join(language_dir);\n\n\n\n let found_modules = modules_from_config(&module_dir, &config);\n\n if found_modules.is_err() {\n\n brix_cli::error!(\"{}\", found_modules.unwrap_err());\n\n process::exit(2);\n", "file_path": "crates/brix_core/main.rs", "rank": 33, "score": 38090.924073944436 }, { "content": "/// Defines a command list as a vec of tuples containing a [Command] trait object and [ProcessedCommandParams].\n\ntype CommandList = Vec<(Box<dyn Command>, ProcessedCommandParams)>;\n\n\n\n/// Struct that holds current information about the loaded configs and parsers.\n\npub struct ConfigLoader<'a> {\n\n parsers: ParserList,\n\n config_file: Option<PathBuf>,\n\n config_dir: Option<PathBuf>,\n\n cli_config: &'a brix_cli::Config,\n\n}\n\n\n\nimpl<'a> ConfigLoader<'a> {\n\n pub fn new(parsers: ParserList, cli_config: &'a brix_cli::Config) -> Self {\n\n Self {\n\n parsers,\n\n cli_config,\n\n config_file: None,\n\n config_dir: None,\n\n }\n\n }\n\n\n", "file_path": "crates/brix_config_loader/src/lib.rs", "rank": 34, "score": 37339.726643585076 }, { "content": "/// Executes the core `clap` function to parse argumensts from `env::args_os()`.\n\npub fn clap_matches() -> Result<clap::ArgMatches<'static>, crate::BrixError> {\n\n let err = match app::app().get_matches_from_safe(env::args_os()) {\n\n Ok(matches) => return Ok(matches),\n\n Err(err) => err,\n\n };\n\n if err.use_stderr() {\n\n return Err(err.into());\n\n }\n\n\n\n let _ = write!(io::stdout(), \"{}\", err);\n\n process::exit(0);\n\n}\n", "file_path": "crates/brix_cli/src/args.rs", "rank": 35, "score": 34327.80844579053 }, { "content": "/// The main wrapper function for finding a module declaration file.\n\n/// Uses the `config_dir` to determine whether to search in parent directories or not.\n\nfn modules_from_config(dir: &PathBuf, config: &brix_cli::Config) -> Result<Vec<PathBuf>> {\n\n let declarations;\n\n if config.config_dir.is_none() {\n\n declarations = search_for_module_declarations_all(dir.to_str().unwrap(), &config)?;\n\n } else {\n\n declarations = search_for_module_declarations(\n\n config.config_dir.as_ref().unwrap(),\n\n dir.to_str().unwrap(),\n\n &config.config_name,\n\n )?;\n\n }\n\n\n\n if declarations.len() == 0 {\n\n return Err(BrixError::with(&format!(\n\n \"Could not find module declaration for '{}' in {}\",\n\n config.config_name,\n\n util::display_path(&dir.to_string_lossy())\n\n )));\n\n }\n\n\n\n Ok(declarations)\n\n}\n\n\n", "file_path": "crates/brix_core/main.rs", "rank": 36, "score": 33393.375628357535 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\n//! # Brix Common\n\n//! Common crate currently used for storing [AppContext] and other common functions in the future.\n\n\n\nmod app_context;\n\npub mod context;\n\n\n\npub use app_context::AppContext;\n", "file_path": "crates/brix_common/src/lib.rs", "rank": 37, "score": 29795.518853080615 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse std::collections::HashMap;\n\n\n\n/// Current type used for contexts.\n", "file_path": "crates/brix_common/src/context.rs", "rank": 38, "score": 29792.17801842844 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\n//! Contains all helper behavior.\n\n\n\nmod basic;\n\nmod casing;\n\n\n\npub use basic::{ToLowerHelper, ToTitleHelper, ToUpperHelper};\n\npub use casing::ToCaseHelper;\n", "file_path": "crates/brix_processor/src/helpers/mod.rs", "rank": 39, "score": 28955.41337558297 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse brix_cli::Config;\n\nuse brix_processor::ProcessorCore;\n\n\n\n/// Struct containing common properties that require access during command execution.\n\npub struct AppContext<'a> {\n\n pub processor: ProcessorCore<'a>,\n\n pub config: &'a Config,\n\n}\n", "file_path": "crates/brix_common/src/app_context.rs", "rank": 40, "score": 28759.521878827603 }, { "content": " // In case user wants to specify their case in the actual case itself\n\n let modified_case_type = case_type.render().to_case(Case::Kebab);\n\n\n\n let case = match modified_case_type.as_str() {\n\n \"toggle\" => Ok(Case::Toggle),\n\n \"pascal\" => Ok(Case::Pascal),\n\n \"camel\" => Ok(Case::Camel),\n\n \"upper-camel\" => Ok(Case::UpperCamel),\n\n \"snake\" => Ok(Case::Snake),\n\n \"upper-snake\" => Ok(Case::UpperSnake),\n\n \"screaming-snake\" => Ok(Case::ScreamingSnake),\n\n \"kebab\" => Ok(Case::Kebab),\n\n \"cobol\" => Ok(Case::Cobol),\n\n \"train\" => Ok(Case::Train),\n\n \"flat\" => Ok(Case::Flat),\n\n \"upper-flat\" => Ok(Case::UpperFlat),\n\n \"alternating\" => Ok(Case::Alternating),\n\n \"\" => Err(RenderError::new(\n\n \"try putting the name of your desired case in quotation marks e.g. \\\"snake\\\"\",\n\n )),\n", "file_path": "crates/brix_processor/src/helpers/casing.rs", "rank": 41, "score": 28559.014397887044 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\n//! Contains the [ToCaseHelper] helper.\n\n\n\nuse crate::*;\n\nuse convert_case::{Case, Casing};\n\n\n\n/// Helper that is used to convert a parameter from one case to another.\n\n/// Supports the following cases:\n\n/// - toggle\n\n/// - pascal\n\n/// - camel\n\n/// - upper-camel\n\n/// - snake\n\n/// - upper-snake\n\n/// - screaming-snake\n\n/// - kebab\n", "file_path": "crates/brix_processor/src/helpers/casing.rs", "rank": 42, "score": 28551.924957613992 }, { "content": "/// - cobol\n\n/// - train\n\n/// - flat\n\n/// - upper-flat\n\n/// - alternating\n\n#[derive(Clone, Copy)]\n\npub struct ToCaseHelper;\n\n\n\nimpl HelperDef for ToCaseHelper {\n\n fn call<'reg: 'rc, 'rc>(\n\n &self,\n\n h: &Helper,\n\n _: &Handlebars,\n\n _: &Context,\n\n _rc: &mut RenderContext,\n\n out: &mut dyn Output,\n\n ) -> HelperResult {\n\n let case_type = h.param(0).ok_or(RenderError::new(\n\n \"case type (e.g. 'snake' or 'camel') not specified\",\n\n ))?;\n", "file_path": "crates/brix_processor/src/helpers/casing.rs", "rank": 43, "score": 28546.830270892533 }, { "content": " _ => Err(RenderError::new(format!(\n\n \"case '{}' not supported\",\n\n case_type.render()\n\n ))),\n\n }?;\n\n\n\n let value = h\n\n .param(1)\n\n .ok_or(RenderError::new(\"argument to process not specified\"))?;\n\n\n\n let rendered = value.value().render();\n\n let output = rendered.to_case(case);\n\n\n\n out.write(&output)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "crates/brix_processor/src/helpers/casing.rs", "rank": 44, "score": 28532.34711879696 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\n//! Module that contains all parsers and common functionality,\n\n\n\nuse crate::BrixError;\n\nuse crate::RawConfig;\n\nuse std::path::PathBuf;\n\n\n\nmod yaml;\n\npub use yaml::YamlConfigParser;\n\n\n\n/// Trait that all config parsers must implement.\n", "file_path": "crates/brix_config_loader/src/parsers/mod.rs", "rank": 45, "score": 27982.27493416952 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse lazy_static::lazy_static;\n\nuse std::collections::HashMap;\n\n\n\nmod common;\n\n\n\nlazy_static! {\n\n static ref TO_TILE_CONTEXT: HashMap<String, String> = {\n\n let mut map = HashMap::new();\n\n map.insert(String::from(\"one\"), String::from(\"oneword\"));\n\n map.insert(\n\n String::from(\"two\"),\n\n String::from(\"Already in the Title Case\"),\n\n );\n\n map.insert(String::from(\"three\"), String::from(\"I AM YELLING\"));\n\n map\n\n };\n\n static ref TO_TITLE_ASSERTIONS: Vec<&'static str> =\n\n vec![\"Oneword\", \"Already in the Title Case\", \"I Am Yelling\"];\n\n}\n\n\n\n#[test]\n", "file_path": "crates/brix_processor/tests/to_title.rs", "rank": 46, "score": 27360.66232441708 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse lazy_static::lazy_static;\n\nuse std::collections::HashMap;\n\n\n\nmod common;\n\n\n\nlazy_static! {\n\n static ref TO_LOWER_CONTEXT: HashMap<String, String> = {\n\n let mut map = HashMap::new();\n\n map.insert(String::from(\"one\"), String::from(\"ALLUPPER\"));\n\n map.insert(String::from(\"two\"), String::from(\"already_lower\"));\n\n map.insert(String::from(\"three\"), String::from(\"mIxEd.cAsE\"));\n\n map\n\n };\n\n static ref TO_LOWER_ASSERTIONS: Vec<&'static str> =\n\n vec![\"allupper\", \"already_lower\", \"mixed.case\"];\n\n}\n\n\n\n#[test]\n", "file_path": "crates/brix_processor/tests/to_lower.rs", "rank": 47, "score": 27359.724975695983 }, { "content": "#[test]\n\nfn to_lower() {\n\n let core = common::setup();\n\n let context = brix_processor::create_context(TO_LOWER_CONTEXT.clone());\n\n let contents = common::load_file(\"lower\").unwrap();\n\n\n\n let result = core.process(contents, context).unwrap();\n\n assert!(common::line_assert(result, TO_LOWER_ASSERTIONS.to_vec()))\n\n}\n", "file_path": "crates/brix_processor/tests/to_lower.rs", "rank": 48, "score": 26390.560198242896 }, { "content": "#[test]\n\nfn to_title() {\n\n let core = common::setup();\n\n let context = brix_processor::create_context(TO_TILE_CONTEXT.clone());\n\n let contents = common::load_file(\"title\").unwrap();\n\n\n\n let result = core.process(contents, context).unwrap();\n\n assert!(common::line_assert(result, TO_TITLE_ASSERTIONS.to_vec()))\n\n}\n", "file_path": "crates/brix_processor/tests/to_title.rs", "rank": 49, "score": 26390.560198242896 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\n#[cfg(test)]\n\nmod params {\n\n use std::path::PathBuf;\n\n\n\n use dialoguer::console::Term;\n\n\n\n use crate::command::copy::{CopyParams, OverwritableCommand};\n\n use crate::{CopyCommand, ProcessedCommandParams};\n\n\n\n #[test]\n\n fn valid() {\n\n let command = CopyCommand {\n\n term: Term::stdout(),\n\n };\n\n let params = ProcessedCommandParams {\n", "file_path": "crates/brix_commands/src/command/copy/tests/from.rs", "rank": 50, "score": 25511.152566409106 }, { "content": " fn invalid() {\n\n let command = CopyCommand {\n\n term: Term::stdout(),\n\n };\n\n\n\n // TODO: macros for processed command params since commands are changing\n\n let params = ProcessedCommandParams {\n\n source: None,\n\n destination: None,\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n };\n\n\n\n let error = command.from(params).err().unwrap();\n\n let errors = error.into_errors();\n\n let keys = errors.keys();\n\n let mut vec = keys.collect::<Vec<_>>();\n\n\n\n vec.sort();\n\n\n\n assert_eq!(vec, [&\"destination\", &\"source\"]);\n\n }\n\n}\n", "file_path": "crates/brix_commands/src/command/copy/tests/from.rs", "rank": 51, "score": 25508.79126234022 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse std::path::PathBuf;\n\n\n\nuse crate::command::Command;\n\nuse crate::{MkdirCommand, ProcessedCommandParams};\n\nuse brix_common::AppContext;\n\nuse brix_errors::BrixErrorKind;\n\nuse brix_processor::ProcessorCore;\n\n\n\nmacro_rules! run {\n\n ($args:expr) => {{\n\n let processor = ProcessorCore::new();\n\n let config = brix_cli::Config::default();\n\n let command = MkdirCommand::new();\n\n let context = AppContext {\n\n processor,\n", "file_path": "crates/brix_commands/src/command/mkdir/tests/from.rs", "rank": 52, "score": 25508.70946674247 }, { "content": " config: &config,\n\n };\n\n // Ensure that it is a validation error that\n\n assert_eq!(\n\n command.run($args, &context).unwrap_err().kind.unwrap(),\n\n BrixErrorKind::Validation\n\n )\n\n }};\n\n}\n\n\n\n#[test]\n", "file_path": "crates/brix_commands/src/command/mkdir/tests/from.rs", "rank": 53, "score": 25506.244524362315 }, { "content": " source: Option::Some(PathBuf::new()),\n\n destination: Option::Some(PathBuf::new()),\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n };\n\n assert_eq!(\n\n command.from(params).unwrap(),\n\n CopyParams {\n\n source: Default::default(),\n\n destination: Default::default(),\n\n overwrite: None\n\n }\n\n )\n\n }\n\n\n\n #[test]\n", "file_path": "crates/brix_commands/src/command/copy/tests/from.rs", "rank": 54, "score": 25505.698424050614 }, { "content": "/// Trait that all config parsers must implement.\n\npub trait ConfigParser {\n\n fn parse(&self, contents: &str) -> Result<RawConfig, BrixError>;\n\n fn matches(&self, path: &PathBuf) -> bool;\n\n fn extensions(&self) -> Vec<&str>;\n\n}\n", "file_path": "crates/brix_config_loader/src/parsers/mod.rs", "rank": 55, "score": 25422.105640075035 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse std::fs::{read_to_string, remove_file};\n\nuse std::path::PathBuf;\n\n\n\nuse maplit::*;\n\n\n\nuse crate::command::Command;\n\nuse crate::{ProcessedCommandParams, TemplateCommand};\n\n\n\nuse brix_common::AppContext;\n\nuse brix_processor::ProcessorCore;\n\n\n\nmacro_rules! do_test {\n\n ($source:expr, $context:expr, $assertion:expr) => {{\n\n let processor = ProcessorCore::new();\n\n let config = brix_cli::Config::default();\n", "file_path": "crates/brix_commands/src/command/template/tests/run.rs", "rank": 56, "score": 24685.76802045555 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse std::path::PathBuf;\n\n\n\nuse crate::command::Command;\n\nuse crate::{MkdirCommand, ProcessedCommandParams};\n\nuse brix_common::AppContext;\n\nuse brix_processor::ProcessorCore;\n\n\n\nmacro_rules! do_test {\n\n ($destination:expr) => {{\n\n let processor = ProcessorCore::new();\n\n let config = brix_cli::Config::default();\n\n let command = MkdirCommand::new();\n\n let context = AppContext {\n\n processor,\n\n config: &config,\n", "file_path": "crates/brix_commands/src/command/mkdir/tests/run.rs", "rank": 57, "score": 24684.062801679505 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse std::path::PathBuf;\n\n\n\nuse crate::command::Command;\n\nuse crate::{ProcessedCommandParams, TemplateCommand};\n\nuse brix_common::AppContext;\n\nuse brix_errors::BrixErrorKind;\n\nuse brix_processor::ProcessorCore;\n\n\n\nmacro_rules! run {\n\n ($args:expr) => {{\n\n let processor = ProcessorCore::new();\n\n let config = brix_cli::Config::default();\n\n let command = TemplateCommand::new();\n\n let context = AppContext {\n\n processor,\n", "file_path": "crates/brix_commands/src/command/template/tests/invalid.rs", "rank": 58, "score": 24682.646182877827 }, { "content": "\n\nmacro_rules! create_args {\n\n ($source:expr, $destination:expr, $context:expr) => {\n\n ProcessedCommandParams {\n\n source: Some(PathBuf::from($source)),\n\n destination: Some(PathBuf::from($destination)),\n\n overwrite: Some(true),\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: Some($context),\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! s {\n\n ($st:expr) => {\n\n String::from($st)\n\n };\n\n}\n\n\n\n#[test]\n", "file_path": "crates/brix_commands/src/command/template/tests/run.rs", "rank": 59, "score": 24681.59702128514 }, { "content": "#[test]\n\nfn nothing() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: None,\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/mkdir/tests/from.rs", "rank": 60, "score": 24681.21473275852 }, { "content": "#[test]\n\n#[should_panic]\n\nfn valid() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: Some(PathBuf::from(\"/tmp\")),\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n", "file_path": "crates/brix_commands/src/command/mkdir/tests/from.rs", "rank": 61, "score": 24681.139797072927 }, { "content": " let context = AppContext {\n\n processor,\n\n config: &config,\n\n };\n\n\n\n let path = PathBuf::from(\"src/command/template\").join($source);\n\n let temp_dir = \"src/command/template/temp/\";\n\n let file_stem = path.file_stem().unwrap().to_str().unwrap();\n\n let destination = format!(\"{}{}{}\", temp_dir, file_stem, \"_output.txt\");\n\n\n\n let args = create_args!(path.clone(), destination.clone(), $context);\n\n let command = TemplateCommand::new();\n\n command.run(args, &context).unwrap();\n\n\n\n let result = read_to_string(PathBuf::from(destination.clone())).unwrap();\n\n // Ensure that the file is removed from the temp directory\n\n remove_file(destination).unwrap();\n\n assert_eq!(result, $assertion);\n\n }};\n\n}\n", "file_path": "crates/brix_commands/src/command/template/tests/run.rs", "rank": 62, "score": 24680.55739569086 }, { "content": " config: &config,\n\n };\n\n // Ensure it is a validation error\n\n assert_eq!(\n\n command.run($args, &context).unwrap_err().kind.unwrap(),\n\n BrixErrorKind::Validation\n\n );\n\n }};\n\n}\n\n\n\n#[test]\n", "file_path": "crates/brix_commands/src/command/template/tests/invalid.rs", "rank": 63, "score": 24680.18124049767 }, { "content": " };\n\n\n\n let path = PathBuf::from(\"src/command/mkdir/temp\").join($destination);\n\n\n\n let params = ProcessedCommandParams {\n\n source: None,\n\n destination: Some(path.clone()),\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n };\n\n\n\n command.run(params, &context).unwrap();\n\n let exists = path.exists();\n\n\n\n std::fs::remove_dir_all(&path).unwrap();\n\n\n\n assert!(exists);\n\n }};\n\n}\n\n\n\n#[test]\n", "file_path": "crates/brix_commands/src/command/mkdir/tests/run.rs", "rank": 64, "score": 24679.44164948294 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse std::fs::read_to_string;\n\nuse std::path::PathBuf;\n\n\n\nuse crate::command::Command;\n\nuse crate::{ProcessedCommandParams, SearchReplaceCommand};\n\nuse brix_common::AppContext;\n\nuse brix_processor::ProcessorCore;\n\n\n\nmacro_rules! do_test {\n\n ($path:expr, $search:expr, $replace:expr, $assertion:expr) => {{\n\n let processor = ProcessorCore::new();\n\n let config = brix_cli::Config::default();\n\n let command = SearchReplaceCommand::new();\n\n let context = AppContext {\n\n processor,\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/run.rs", "rank": 65, "score": 23911.00934053 }, { "content": " overwrite: None,\n\n search: Some(String::from($search)),\n\n replace: Some(String::from($replace)),\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n }\n\n };\n\n}\n\n\n\n#[test]\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/run.rs", "rank": 66, "score": 23909.752695101022 }, { "content": "// Copyright (c) 2021 Ethan Lerner, Caleb Cushing, and the Brix contributors\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse std::path::PathBuf;\n\n\n\nuse crate::command::Command;\n\nuse crate::{ProcessedCommandParams, SearchReplaceCommand};\n\nuse brix_common::AppContext;\n\nuse brix_errors::BrixErrorKind;\n\nuse brix_processor::ProcessorCore;\n\n\n\nmacro_rules! run {\n\n ($args:expr) => {{\n\n let processor = ProcessorCore::new();\n\n let config = brix_cli::Config::default();\n\n let command = SearchReplaceCommand::new();\n\n let context = AppContext {\n\n processor,\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/invalid.rs", "rank": 67, "score": 23908.366741097645 }, { "content": "#[test]\n\nfn unchanged() {\n\n do_test!(\n\n \"unchanged.hbs\",\n\n hashmap! {\n\n s!(\"foo\") => s!(\"bar\")\n\n },\n\n \"This file will remain unchanged\\n\"\n\n );\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/template/tests/run.rs", "rank": 68, "score": 23906.984834570318 }, { "content": "#[test]\n\nfn destination() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: Some(PathBuf::new()),\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/template/tests/invalid.rs", "rank": 69, "score": 23906.984834570318 }, { "content": "#[test]\n\nfn escaped() {\n\n do_test!(\n\n \"escaped.hbs\",\n\n hashmap! {\n\n s!(\"foo\") => s!(\"(SEE?)\")\n\n },\n\n \"This is not (SEE?) escaped {{this}} is\\n\"\n\n );\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/template/tests/run.rs", "rank": 70, "score": 23906.984834570318 }, { "content": "#[test]\n\nfn nested() {\n\n do_test!(\"nested/one/two\");\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/mkdir/tests/run.rs", "rank": 71, "score": 23906.984834570318 }, { "content": "#[test]\n\nfn nothing() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: None,\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/template/tests/invalid.rs", "rank": 72, "score": 23906.984834570318 }, { "content": "#[test]\n\nfn basic() {\n\n do_test!(\"basic\");\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/mkdir/tests/run.rs", "rank": 73, "score": 23906.984834570318 }, { "content": "#[test]\n\nfn source() {\n\n run!(ProcessedCommandParams {\n\n source: Some(PathBuf::new()),\n\n destination: None,\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/template/tests/invalid.rs", "rank": 74, "score": 23906.984834570318 }, { "content": "#[test]\n\n#[should_panic]\n\nfn valid() {\n\n run!(ProcessedCommandParams {\n\n source: Some(PathBuf::new()),\n\n destination: Some(PathBuf::new()),\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n", "file_path": "crates/brix_commands/src/command/template/tests/invalid.rs", "rank": 75, "score": 23906.909898884725 }, { "content": " config: &config,\n\n };\n\n // Ensure it is a validation error\n\n assert_eq!(\n\n command.run($args, &context).unwrap_err().kind.unwrap(),\n\n BrixErrorKind::Validation\n\n );\n\n }};\n\n}\n\n\n\n#[test]\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/invalid.rs", "rank": 76, "score": 23905.95134230947 }, { "content": " config: &config,\n\n };\n\n\n\n let path = PathBuf::from(\"src/command/search_replace\").join($path);\n\n let contents = read_to_string(path.clone()).unwrap();\n\n let args = create_args!(path.clone(), $search, $replace);\n\n command.run(args, &context).unwrap();\n\n\n\n let result = read_to_string(path.clone()).unwrap();\n\n // Ensure file is reverted to its original state\n\n std::fs::write(path, contents).unwrap();\n\n assert_eq!(result, $assertion);\n\n }};\n\n}\n\n\n\nmacro_rules! create_args {\n\n ($dest:expr, $search:expr, $replace:expr) => {\n\n ProcessedCommandParams {\n\n source: None,\n\n destination: Some(PathBuf::from($dest)),\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/run.rs", "rank": 77, "score": 23905.710218962748 }, { "content": "#[test]\n\nfn no_matches() {\n\n do_test!(\"no_matches.txt\", \"z\", \"foo\", \"Lorem ipsum dolor sit amet\\n\");\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/run.rs", "rank": 78, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn replace() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: None,\n\n overwrite: None,\n\n search: None,\n\n replace: Some(String::new()),\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/invalid.rs", "rank": 79, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn search() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: None,\n\n overwrite: None,\n\n search: Some(String::new()),\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/invalid.rs", "rank": 80, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn destination() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: Some(PathBuf::new()),\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/invalid.rs", "rank": 81, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn nothing() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: None,\n\n overwrite: None,\n\n search: None,\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/invalid.rs", "rank": 82, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn empty() {\n\n do_test!(\"empty.txt\", \"foo\", \"bar\", \"\");\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/run.rs", "rank": 83, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn simple_context() {\n\n do_test!(\n\n \"simple_context.hbs\",\n\n hashmap! {\n\n s!(\"word\") => s!(\"templated\")\n\n },\n\n \"this is templated text\\n\"\n\n );\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/template/tests/run.rs", "rank": 84, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn sub_extension() {\n\n do_test!(\n\n \"sub_extension.rs.hbs\",\n\n hashmap! {\n\n s!(\"foo\") => s!(\"bar\"),\n\n s!(\"bar\") => s!(\"baz\")\n\n },\n\n \"bar baz\\n\"\n\n );\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/template/tests/run.rs", "rank": 85, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn in_parent_dir() {\n\n do_test!(\"../in_parent\");\n\n}\n", "file_path": "crates/brix_commands/src/command/mkdir/tests/run.rs", "rank": 86, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn empty_tags() {\n\n do_test!(\"empty_tags.hbs\", hashmap! {}, \"The here will be \\n\");\n\n}\n", "file_path": "crates/brix_commands/src/command/template/tests/run.rs", "rank": 87, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn basic() {\n\n do_test!(\"basic.txt\", \"es\", \"ES\", \"ES\\n\");\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/run.rs", "rank": 88, "score": 23179.858105485142 }, { "content": "#[test]\n\nfn multiple_stages() {\n\n do_test!(\"multiple_stages\");\n\n do_test!(\"multiple_stages/next\");\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/mkdir/tests/run.rs", "rank": 89, "score": 23179.858105485142 }, { "content": "#[test]\n\n#[should_panic]\n\nfn valid() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: Some(PathBuf::new()),\n\n overwrite: None,\n\n search: Some(String::new()),\n\n replace: Some(String::new()),\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/invalid.rs", "rank": 90, "score": 23179.78316979955 }, { "content": "#[test]\n\nfn full_match() {\n\n do_test!(\"full_match_one.txt\", \"^[0-9]{10}\\\\n$\", \"!\", \"59023485\\n\");\n\n do_test!(\"full_match_two.txt\", \"^[0-9]{10}\\\\n$\", \"!\", \"!\");\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/run.rs", "rank": 91, "score": 22495.66289840246 }, { "content": "#[test]\n\nfn non_english() {\n\n do_test!(\n\n \"non_english.txt\",\n\n \"это не английские буквы\\n\",\n\n \"这些不是英文字母\",\n\n \"这些不是英文字母\"\n\n );\n\n}\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/run.rs", "rank": 92, "score": 22495.66289840246 }, { "content": "#[test]\n\nfn diff_extension() {\n\n do_test!(\n\n \"diff_extension.peb\",\n\n \"foo\",\n\n \"bar\",\n\n \"bar bar bar bar bar bar\\n\"\n\n )\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/run.rs", "rank": 93, "score": 22495.66289840246 }, { "content": "#[test]\n\nfn search_replace() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: None,\n\n overwrite: None,\n\n search: Some(String::new()),\n\n replace: Some(String::new()),\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/invalid.rs", "rank": 94, "score": 22495.66289840246 }, { "content": "#[test]\n\nfn multi_value_context() {\n\n do_test!(\n\n \"multi_value_context.hbs\",\n\n hashmap! {\n\n s!(\"foo\") => s!(\"bar\"),\n\n s!(\"bar\") => s!(\"baz\")\n\n },\n\n \"a bar walks into a baz\\n\"\n\n );\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/template/tests/run.rs", "rank": 95, "score": 22495.66289840246 }, { "content": "#[test]\n\nfn basic_regex() {\n\n do_test!(\n\n \"basic_regex.txt\",\n\n \"[a-zA-Z]\",\n\n \"0\",\n\n \"00002390070029003929300000\\n\"\n\n );\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/run.rs", "rank": 96, "score": 22495.66289840246 }, { "content": "#[test]\n\nfn destination_search() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: Some(PathBuf::new()),\n\n overwrite: None,\n\n search: Some(String::new()),\n\n replace: None,\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/invalid.rs", "rank": 97, "score": 22495.66289840246 }, { "content": "#[test]\n\nfn destination_replace() {\n\n run!(ProcessedCommandParams {\n\n source: None,\n\n destination: Some(PathBuf::new()),\n\n overwrite: None,\n\n search: None,\n\n replace: Some(String::new()),\n\n commands: None,\n\n stdout: None,\n\n context: None,\n\n })\n\n}\n\n\n", "file_path": "crates/brix_commands/src/command/search_replace/tests/invalid.rs", "rank": 98, "score": 22495.66289840246 }, { "content": "use brix_common::AppContext;\n\nuse brix_errors::BrixError;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n mod invalid;\n\n mod run;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct TemplateParams {\n\n source: PathBuf,\n\n destination: PathBuf,\n\n overwrite: Option<bool>,\n\n context: Option<HashMap<String, String>>,\n\n}\n\n\n\nimpl PartialEq for TemplateParams {\n\n fn eq(&self, other: &Self) -> bool {\n\n return self.source == other.source\n", "file_path": "crates/brix_commands/src/command/template.rs", "rank": 99, "score": 14.876824886468704 } ]
Rust
src/lib.rs
mikelma/ostrich-server
685c29502e09a5e0d668ad85cc531ea68711f1f7
use ostrich_core::*; #[macro_use] extern crate log; use tokio::sync::mpsc; use tokio::net::{TcpStream}; use tokio::io::{AsyncReadExt, AsyncWriteExt, AsyncRead}; use tokio::stream::{Stream}; use std::collections::HashMap; use std::io::{self, BufReader, prelude::*}; use std::fs::File; use core::task::{Poll, Context}; use core::pin::Pin; pub mod config; pub type Tx = mpsc::UnboundedSender<Command>; pub type Rx = mpsc::UnboundedReceiver<Command>; pub struct SharedConn { shared_conn: HashMap<String, Tx>, groups: HashMap<String, Vec<String>>, } impl SharedConn { pub fn new() -> SharedConn{ SharedConn{ shared_conn: HashMap::new(), groups: HashMap::new() } } pub fn add(&mut self, name: String, tx: Tx) -> Result<(), io::Error> { if self.shared_conn.contains_key(&name) { return Err(io::Error::new(io::ErrorKind::AlreadyExists, "A user with the same credentials is already loged in")); } self.shared_conn.insert(name, tx); Ok(()) } pub fn remove(&mut self, name: &str) -> Result<(), io::Error> { match self.shared_conn.remove(name) { Some(_) => Ok(()), None => Err(io::Error::new(io::ErrorKind::NotFound, "User not found")), } } pub async fn join_group(&mut self, group_name: &str, username: &str) -> Result<(), io::Error> { if let Some(group) = self.groups.get_mut(group_name) { if group.iter().find(|&x| *x == group_name.to_string()).is_some() { trace!("User {} wanted to join {} when already joined", username, group_name); return Ok(()) } else { group.push(username.to_string()); let notification = Command::ListUsr( group_name.to_string(), ListUsrOperation::Add, format!("\n{}", username)); self.send2group(username, group_name, None, &notification).await?; } } else { self.groups.insert(group_name.to_string(), vec![username.to_string()]); } Ok(()) } pub async fn leave_group(&mut self, username: &str, group_name: &str) -> Result<(), io::Error> { if let Some(group) = self.groups.get_mut(group_name) { if let Some(index) = group.iter().position(|name| name == username) { group.remove(index); let notification = Command::ListUsr( group_name.to_string(), ListUsrOperation::Remove, format!("\n{}", username)); self.send2group(group_name, group_name, Some(vec![&username]), &notification).await?; } else { return Err(io::Error::new(io::ErrorKind::NotFound, format!("User {} not found in {}", username, group_name))) } } else { return Err(io::Error::new(io::ErrorKind::NotFound, format!("Group {} not found", group_name))) } Ok(()) } pub async fn send(&mut self, command: Command) -> Result<(), io::Error>{ let (sender, target) = match &command { Command::Msg(s,t,_) => (s, t), _ => return Err(io::Error::new( io::ErrorKind::InvalidInput, "Wrong command type. Only MSG commands can be sended")), }; if target.starts_with("#") { return self.send2group(sender, target, None, &command).await; } let target_tx = match self.shared_conn.get_mut(&target.to_string()) { Some(t) => t, None => return Err(io::Error::new( io::ErrorKind::NotFound, format!("Target {} not connected or does not exist", target))), }; if let Err(_) = target_tx.send(command) { return Err(io::Error::new(io::ErrorKind::BrokenPipe, "Cannot transmit data to target")); } Ok(()) } async fn send2group(&mut self, sender: &str, target: &str, ignore: Option<Vec<&str>>, command: &Command) -> Result<(), io::Error>{ let group_users = match self.groups.get(target) { Some(g) => g, None => return Err(io::Error::new(io::ErrorKind::InvalidInput, format!("Target group {} does not exist", target))), }; if target != sender && group_users.iter().find(|&x| x == sender).is_none() { return Err(io::Error::new(io::ErrorKind::PermissionDenied, format!("send2group error, sender {} is not a memeber of {}", sender, target))); } for name in group_users { let to_ignore = match &ignore { Some(vec) => { vec.iter().find(|&x| x == name).is_some() }, None => false, }; if name != sender && !to_ignore { let user_tx = match self.shared_conn.get_mut(name) { Some(u) => u, None => return Err(io::Error::new(io::ErrorKind::NotFound, format!("sender {} cannot find user {} in group {}", sender, name, target))), }; if let Err(err) = user_tx.send(command.clone()) { return Err(io::Error::new(io::ErrorKind::PermissionDenied, format!("Cannot send command to {} @ {}, unable to send over Tx: {}", sender, target, err))); } } } Ok(()) } pub fn list_group(&self, group_name: &str) -> Result<Vec<String>, io::Error> { let group = match self.groups.get(group_name) { Some(g) => g, None => return Err(io::Error::new(io::ErrorKind::InvalidInput, format!("Group {} does not exist", group_name))), }; trace!("List of group {} is: {:?}", group_name, group); let max = ostrich_core::TXT_BYTES.len(); let mut users = vec![String::new()]; let mut count = 0; for usr in group { let name = format!("{}\n", usr); count += name.len(); let index = if count / max > 0 { (count/max) } else {0}; if index >= users.len() { users.push(String::new()); } users[index].push_str(&name); } Ok(users) } } pub struct Peer { socket: TcpStream, rx: Rx, pub groups: Vec<String>, } impl Peer { pub fn new(socket: TcpStream, rx: Rx) -> Peer { Peer{ socket, rx , groups: Vec::new()} } pub async fn send_command(&mut self, command: &Command) -> Result<usize, io::Error> { self.socket.write(&RawMessage::to_raw(command)?).await } pub async fn read_command(&mut self) -> Result<Option<Command>, io::Error> { let mut buffer = [0u8;PCK_SIZE]; let n = self.socket.read(&mut buffer).await?; if n == 0 { println!("returning ok"); return Ok(None); } let command = RawMessage::from_raw(&buffer)?; Ok(Some(command)) } } pub enum Message { ToSend(Command), Received(Command), } impl Stream for Peer { type Item = Result<Message, io::Error>; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { if let Poll::Ready(Some(v)) = Pin::new(&mut self.rx).poll_next(cx) { return Poll::Ready(Some(Ok(Message::Received(v)))); } let mut data = [0u8; PCK_SIZE]; let n = match Pin::new(&mut self.socket).poll_read(cx, &mut data) { Poll::Ready(Ok(n)) => n, Poll::Ready(Err(err)) => return Poll::Ready(Some(Err(err))), Poll::Pending => return Poll::Pending, }; if n > 0 { let command = RawMessage::from_raw(&data)?; return Poll::Ready(Some(Ok(Message::ToSend(command)))); } else { return Poll::Ready(None); } } } #[derive(Debug)] #[derive(PartialEq)] struct User { pub name: String, password : String, } pub struct DataBase { db: Vec<User>, } impl DataBase { pub fn new(db_path: &str) -> Result<DataBase, io::Error> { let f = File::open(db_path)?; let mut buff = BufReader::new(f); let mut contents = String::new(); buff.read_to_string(&mut contents)?; let parsed = match json::parse(&contents) { Ok(db) => db, Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)), }; let mut db = Vec::new(); for user in parsed["users"].members() { let name = match user["name"].as_str() { Some(n) => n.to_string(), None => continue, }; trace!("Init user: {}", name); let password = match user["password"].as_str() { Some(s) => s.to_string(), None => continue, }; db.push(User {name, password}); } Ok(DataBase {db}) } pub fn name_exists(&self, name: &str) -> bool { self.db.iter().find(|&x| x.name == name ).is_some() } pub fn check_log_in_credentials(&self, command: Command) -> Result<String, io::Error> { let (username, password) = match &command { Command::Usr(u, p) => (u, p), _ => return Err(io::Error::new(io::ErrorKind::InvalidInput, "Incorrect log in command")), }; let usr = User { name: username.clone().to_string(), password: password.to_string()}; match self.db.iter().position(|x| x.name == usr.name) { Some(index) => { if self.db[index] == usr { return Ok(username.to_string()); } return Err(io::Error::new(io::ErrorKind::PermissionDenied, "Wrong credentials")) }, None => return Ok(username.to_string()), } } }
use ostrich_core::*; #[macro_use] extern crate log; use tokio::sync::mpsc; use tokio::net::{TcpStream}; use tokio::io::{AsyncReadExt, AsyncWriteExt, AsyncRead}; use tokio::stream::{Stream}; use std::collections::HashMap; use std::io::{self, BufReader, prelude::*}; use std::fs::File; use core::task::{Poll, Context}; use core::pin::Pin; pub mod config; pub type Tx = mpsc::UnboundedSender<Command>; pub type Rx = mpsc::UnboundedReceiver<Command>; pub struct SharedConn { shared_conn: HashMap<String, Tx>, groups: HashMap<String, Vec<String>>, } impl SharedConn { pub fn new() -> SharedConn{ SharedConn{ shared_conn: HashMap::new(), groups: HashMap::new() } } pub fn add(&mut self, name: String, tx: Tx) -> Result<(), io::Error> { if self.shared_conn.contains_key(&name) { return Err(io::Error::new(io::ErrorKind::AlreadyExists, "A user with the same credentials is already loged in")); } self.shared_conn.insert(name, tx); Ok(()) } pub fn remove(&mut self, name: &str) -> Result<(), io::Error> { match self.shared_conn.remove(name) { Some(_) => Ok(()), None => Err(io::Error::new(io::ErrorKind::NotFound, "User not found")), } } pub async fn join_group(&mut self, group_name: &str, username: &str) -> Result<(), io::Error> { if let Some(group) = self.groups.get_mut(group_name) { if group.iter().find(|&x| *x == group_name.to_string()).is_some() { trace!("User {} wanted to join {} when already joined", username, group_name); return Ok(()) } else { group.push(username.to_string()); let notification = Command::ListUsr( group_name.to_string(), ListUsrOperation::Add, format!("\n{}", username)); self.send2group(username, group_name, None, &notification).await?; } } else { self.groups.insert(group_name.to_string(), vec![username.to_string()]); } Ok(()) } pub async fn leave_group(&mut self, username: &str, group_name: &str) -> Result<(), io::Error> { if let Some(group) = self.groups.get_mut(group_name) { if let Some(index) = group.iter().position(|name| name == username) { group.remove(index); let notification = Command::ListUsr( group_name.to_string(), ListUsrOperation::Remove, format!("\n{}", username)); self.send2group(group_name, group_name, Some(vec![&username]), &notification).await?; } else { return Err(io::Error::new(io::ErrorKind::NotFound, format!("User {} not found in {}", username, group_name))) } } else { return Err(io::Error::new(io::ErrorKind::NotFound, format!("Group {} not found", group_name))) } Ok(()) } pub async fn send(&mut self, command: Command) -> Result<(), io::Error>{ let (sender, target) = match &command { Command::Msg(s,t,_) => (s, t), _ => return Err(io::Error::new( io::ErrorKind::InvalidInput, "Wrong command type. Only MSG commands can be sended")), }; if target.starts_with("#") { return self.send2group(sender, target, None, &command).await; } let target_tx = match self.shared_conn.get_mut(&target.to_string()) { Some(t) => t, None => return Err(io::Error::new( io::ErrorKind::NotFound, format!("Target {} not connected or does not exist", target))), }; if let Err(_) = target_tx.send(command) { return Err(io::Error::new(io::ErrorKind::BrokenPipe, "Cannot transmit data to target")); } Ok(()) } async fn send2group(&mut self, sender: &str, target: &str, ignore: Option<Vec<&str>>, command: &Command) -> Result<(), io::Error>{ let group_users = match self.groups.get(target) { Some(g) => g, None => return Err(io::Error::new(io::ErrorKind::InvalidInput, format!("Target group {} does not exist", target))), }; if target != sender && group_users.iter().find(|&x| x == sender).is_none() { return Err(io::Error::new(io::ErrorKind::PermissionDenied, format!("send2group error, sender {} is not a memeber of {}", sender, target))); } for name in group_users { let to_ignore = match &ignore { Some(vec) => { vec.iter().find(|&x| x == name).is_some() }, None => false, }; if name != sender && !to_ignore { let user_tx = match self.shared_conn.get_mut(name) { Some(u) => u, None => return Err(io::Error::new(io::ErrorKind::NotFound, format!("sender {} cannot find user {} in group {}", sender, name, target))), }; if let Err(err) = user_tx.send(command.clone()) { return Err(io::Error::new(io::ErrorKind::PermissionDenied, format!("Cannot send command to {} @ {}, unable to send over Tx: {}", sender, target, err))); } } } Ok(()) } pub fn list_group(&self, group_name: &str) -> Result<Vec<String>, io::Error> { let group = match self.groups.get(group_name) { Some(g) => g, None => return Err(io::Error::new(io::ErrorKind::InvalidInput, format!("Group {} does not exist", group_name))), }; trace!("List of group {} is: {:?}", group_name, group); let max = ostrich_core::TXT_BYTES.len(); let mut users = vec![String::new()]; let mut count = 0; for usr in group { let name = format!("{}\n", usr); count += name.len(); let index = if count / max > 0 { (count/max) } else {0}; if index >= users.len() { users.push(String::new()); } users[index].push_str(&name); } Ok(users) } } pub struct Peer { socket: TcpStream, rx: Rx, pub groups: Vec<String>, } impl Peer { pub fn new(socket: TcpStream, rx: Rx) -> Peer { Peer{ socket, rx , groups: Vec::new()} } pub async fn send_command(&mut self, command: &Command) -> Result<usize, io::Error> { self.socket.write(&RawMessage::to_raw(command)?).await } pub async fn read_command(&mut self) -> Result<Option<Command>, io::Error> { let mut buffer = [0u8;PCK_SIZE]; let n = self.socket.read(&mut buffer).await?; if n == 0 { println!("returning ok"); return Ok(None); } let command = RawMessage::from_raw(&buffer)?; Ok(Some(command)) } } pub enum Message { ToSend(Command), Received(Command), } impl Stream for Peer { type Item = Result<Message, io::Error>; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { if let Poll::Ready(Some(v)) = Pin::new(&mut self.rx).poll_next(cx) { return Poll::Ready(Some(Ok(Message::Received(v)))); } let mut data = [0u8; PCK_SIZE]; let n = match Pin::new(&mut self.socket).poll_read(cx, &mut data) { Poll::Ready(Ok(n)) => n, Poll::Ready(Err(err)) => return Poll::Ready(Some(Err(err))), Poll::Pending => return Poll::Pending, }; if n > 0 { let command = RawMessage::from_raw(&data)?; return Poll::Ready(Some(Ok(Message::ToSend(command)))); } else { return Poll::Ready(None); } } } #[derive(Debug)] #[derive(PartialEq)] struct User { pub name: String, password : String, } pub struct DataBase { db: Vec<User>, } impl DataBase { pub fn new(db_path: &str) -> Result<DataBase, io::Error> { let f = File::open(db_path)?; let mut buff = BufReader::new(f); let mut contents = String::new(); buff.read_to_string(&mut contents)?; let parsed = match json::parse(&contents) { Ok(db) => db, Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)), }; let mut db = Vec::new(); for user in parsed["users"].members() {
trace!("Init user: {}", name); let password = match user["password"].as_str() { Some(s) => s.to_string(), None => continue, }; db.push(User {name, password}); } Ok(DataBase {db}) } pub fn name_exists(&self, name: &str) -> bool { self.db.iter().find(|&x| x.name == name ).is_some() } pub fn check_log_in_credentials(&self, command: Command) -> Result<String, io::Error> { let (username, password) = match &command { Command::Usr(u, p) => (u, p), _ => return Err(io::Error::new(io::ErrorKind::InvalidInput, "Incorrect log in command")), }; let usr = User { name: username.clone().to_string(), password: password.to_string()}; match self.db.iter().position(|x| x.name == usr.name) { Some(index) => { if self.db[index] == usr { return Ok(username.to_string()); } return Err(io::Error::new(io::ErrorKind::PermissionDenied, "Wrong credentials")) }, None => return Ok(username.to_string()), } } }
let name = match user["name"].as_str() { Some(n) => n.to_string(), None => continue, };
assignment_statement
[ { "content": "use serde::Deserialize;\n\nuse toml;\n\n\n\nuse std::fs::File;\n\nuse std::io::Read;\n\n\n\n#[derive(Deserialize)]\n\npub struct Config {\n\n pub ip_address: String,\n\n pub port: usize,\n\n\n\n pub logger_file: String,\n\n pub database_file: String,\n\n}\n\n\n\nimpl Config {\n\n\n\n pub fn new(config_path: &str) -> Result<Config, Box<dyn std::error::Error>> {\n\n // Read configuration file\n\n let mut file = File::open(config_path)?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n // Deserialize the config string\n\n Ok(toml::from_str(&contents)?)\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 1, "score": 17775.43070705654 }, { "content": " while let Some(request) = user.next().await {\n\n match request {\n\n Ok(Message::Received(mesg)) => {\n\n // Send the received message to the target user \n\n if let Err(err) = user.send_command(&mesg).await {\n\n debug!(\"User {} error sending message: {}\", name, err);\n\n }\n\n },\n\n Ok(Message::ToSend(mesg)) => {\n\n // The server has received a message from the user,\n\n // normally its a message to forward to another user or group (MSG commad).\n\n // If the message is not a MSG command, process the command.\n\n match mesg {\n\n Command::Msg(_,_,_) => {\n\n // Send the message to the target \n\n if let Err(err) = shared_conn.lock().await.send(mesg).await {\n\n trace!(\"Error user {} when trying to send data: {}\", name, err);\n\n // Crate an error command\n\n let command = Command::Err(\n\n format!(\"unable to send message: {}\", err));\n", "file_path": "src/main.rs", "rank": 15, "score": 23.45740204556984 }, { "content": " if let Err(err) = shared_conn.lock().await.add(name.clone(), tx) {\n\n // The user is already loged in... so suspicious\n\n debug!(\"User {}, error: {}\", name, err.to_string()); \n\n let _ = user.send_command(&Command::Err(err.to_string())).await?;\n\n return Err(io::Error::new(io::ErrorKind::AlreadyExists,\n\n \"A user with the same credentials is already loged in\"));\n\n }\n\n // Notify the user for successful log in\n\n user.send_command(&Command::Ok).await?;\n\n name\n\n },\n\n Err(err) => {\n\n let _ = user.send_command(&Command::Err(err.to_string())).await;\n\n return Err(io::Error::new(io::ErrorKind::PermissionDenied, \n\n format!(\"Login error: {}\", err)));\n\n },\n\n };\n\n\n\n debug!(\"User {} loged in\", name);\n\n\n", "file_path": "src/main.rs", "rank": 16, "score": 23.337026003716332 }, { "content": " // Read the log in command from the user and parse to Command\n\n let login_command = match user.read_command().await {\n\n Ok(Some(login)) => login,\n\n Ok(None) => {\n\n debug!(\"Connection losed!\");\n\n return Ok(());\n\n },\n\n Err(e) => {\n\n debug!(\"User login error: {}\", e);\n\n return Ok(());\n\n },\n\n };\n\n // Check if the log in command is correct.\n\n // If the username is registered, check password.\n\n // Else, log in the user as anonymous user.\n\n let name = match db.lock().await.check_log_in_credentials(login_command) {\n\n Ok(name) => {\n\n // The crediantials where ok.\n\n // Check if a client with the same user is \n\n // already loged in or register the user\n", "file_path": "src/main.rs", "rank": 18, "score": 22.99107582638875 }, { "content": " tokio::spawn(async move {\n\n if let Err(e) = process(world, data, stream, addr).await {\n\n error!(\"User dropped with error, ERROR: {:?}\", e);\n\n }\n\n });\n\n }\n\n}\n\n\n\nasync fn process(shared_conn: Arc<Mutex<SharedConn>>,\n\n db: Arc<Mutex<DataBase>>,\n\n stream: TcpStream,\n\n addr: SocketAddr) -> Result<(), io::Error> {\n\n \n\n debug!(\"New connection from : {}\", addr);\n\n\n\n // Create a channel\n\n let (tx, rx) = mpsc::unbounded_channel(); \n\n\n\n let mut user = Peer::new(stream, rx);\n\n\n", "file_path": "src/main.rs", "rank": 19, "score": 22.185506192665688 }, { "content": " // Send error message to the user\n\n if let Err(err) = user.send_command(&command).await {\n\n debug!(\"Cannot send Err command to user {}: {}\",\n\n name, err);\n\n }\n\n }\n\n },\n\n Command::Join(join_name) => {\n\n // Determine if the user wants to join another user or a group\n\n if join_name.starts_with('#') {\n\n trace!(\"User {} wants to join group: {}\", name, join_name);\n\n \n\n // If the group exists, join the group, else, create it\n\n if let Err(err) = shared_conn.lock().await.join_group(&join_name, &name).await {\n\n debug!(\"User {} cannot join {}: {}\", name, join_name, err);\n\n\n\n // Send error to the user\n\n let command = Command::Err(\n\n format!(\"unable to send message: {}\", err));\n\n if let Err(err) = user.send_command(&command).await {\n", "file_path": "src/main.rs", "rank": 21, "score": 21.71298341695592 }, { "content": " debug!(\"Cannot send Err command to user {}: {}\",\n\n name, err);\n\n }\n\n } else {\n\n // The user successully joined the group, add the group name to the\n\n // list of groups of the user\n\n user.groups.push(join_name.clone());\n\n }\n\n } else {\n\n trace!(\"User {} wants to join user {}\", name, join_name);\n\n }\n\n },\n\n Command::Leave(target) => {\n\n // The user wants to leave a chat \n\n if target.starts_with('#') {\n\n if let Err(err) = shared_conn.lock().await.leave_group(&name, &target).await {\n\n warn!(\"Could not remove user {} from group {}: {}\", name, target, err);\n\n } else {\n\n trace!(\"User {} left group {}\", name, target);\n\n }\n", "file_path": "src/main.rs", "rank": 23, "score": 17.496150074341756 }, { "content": "use ostrich_core::*;\n\n\n\nuse tokio::net::{TcpListener, TcpStream};\n\nuse tokio::sync::{Mutex, mpsc};\n\n\n\nuse std::io;\n\nuse std::sync::Arc;\n\nuse std::net::SocketAddr;\n\nuse std::process;\n\n\n\nuse tokio::stream::{StreamExt};\n\nuse ostrich_server::{\n\n SharedConn, Message, Peer, \n\n // NOTE: Renamed to avoid conflic with simplelog::Config\n\n DataBase, config::Config as ServerConfig \n\n};\n\n\n\n#[macro_use] extern crate log;\n\nextern crate simplelog;\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 17.153123956746388 }, { "content": " debug!(\"Cannot send MSG command to user {}: {}\",\n\n name, err);\n\n }\n\n }\n\n\n\n } else {\n\n debug!(\"cannot list group {}\", gname);\n\n }\n\n },\n\n // Notify that a non valid command is sent\n\n _ => {\n\n trace!(\"User {} invaid command received\", name);\n\n user.send_command(\n\n &Command::Err(\n\n \"Unable to send non MSG command\".to_string()\n\n )\n\n ).await?;\n\n },\n\n }\n\n },\n", "file_path": "src/main.rs", "rank": 25, "score": 16.326106464105877 }, { "content": " }\n\n },\n\n Command::ListUsr(gname, _, _) => {\n\n // Check if the gname is really a group name (groups starts with #)\n\n if !gname.starts_with('#') {\n\n debug!(\"User {} trying to list a non group chat: '{}'\", name, gname);\n\n // Send error to the user\n\n let cmd = Command::Err(\n\n format!(\"Trying to list a non group chat: '{}'\", gname));\n\n if let Err(err) = user.send_command(&cmd).await {\n\n debug!(\"Cannot send Err command to user {}: {}\",\n\n name, err);\n\n }\n\n }\n\n \n\n trace!(\"User {} requests listing group: {}\", name, gname);\n\n if let Ok(usrs_list) = shared_conn.lock().await.list_group(&gname) {\n\n for set in usrs_list {\n\n let cmd = Command::ListUsr(gname.clone(), ListUsrOperation::Add, set);\n\n if let Err(err) = user.send_command(&cmd).await {\n", "file_path": "src/main.rs", "rank": 26, "score": 15.010151251667974 }, { "content": "\n\n eprintln!(\"Fatal: Could not initialize the logger: {}\", err);\n\n process::exit(-1);\n\n }\n\n\n\n info!(\"Ostrich server initialized!\");\n\n info!(\"logger's output file path: {}\", server_config.logger_file);\n\n\n\n // Load the DataBase \n\n let db = match DataBase::new(&server_config.database_file) {\n\n Ok(db) => {\n\n info!(\"Database loaded\");\n\n db\n\n },\n\n Err(err) => {\n\n error!(\"Database loading error: {}\", err);\n\n process::exit(1);\n\n },\n\n };\n\n let db = Arc::new(Mutex::new(db));\n", "file_path": "src/main.rs", "rank": 27, "score": 13.316785121853039 }, { "content": "\n\n let shared_conn = Arc::new(Mutex::new(SharedConn::new()));\n\n\n\n let addr = format!(\"{}:{}\", \n\n server_config.ip_address,\n\n server_config.port);\n\n\n\n // Bind a TCP listener to the socket address\n\n let mut listener = TcpListener::bind(&addr).await?;\n\n info!(\"server running on {}\", addr);\n\n\n\n loop {\n\n // Asynchronously wait for an inbound TcpStream.\n\n let (stream, addr) = listener.accept().await?;\n\n\n\n // Clone a handle to the `ConnectedUsers` state for the new connection.\n\n let world = Arc::clone(&shared_conn);\n\n let data = Arc::clone(&db);\n\n\n\n // Spawn our handler to be run asynchronously.\n", "file_path": "src/main.rs", "rank": 28, "score": 12.619977971041015 }, { "content": "\n\n Err(err) => {\n\n debug!(\"Error, user {}: {}\", name, err);\n\n },\n\n }\n\n }\n\n\n\n // Delete the user from Shared and for every group it's member of\n\n debug!(\"User {} loged out\", name);\n\n\n\n // Delete user for all the groups is in\n\n for group in user.groups {\n\n if let Err(err) = shared_conn.lock().await.leave_group(&name, &group).await {\n\n warn!(\"Could not remove user {} from group {}: {}\", name, group, err);\n\n } else {\n\n trace!(\"User {} left group {}\", name, group);\n\n }\n\n }\n\n // Delete user from shared \n\n if let Err(err) = shared_conn.lock().await.remove(&name) {\n\n debug!(\"Error, user {}: {}\", name, err); \n\n }\n\n\n\n \n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 29, "score": 11.986630505467149 }, { "content": "use simplelog::*;\n\n\n\nuse std::fs::File;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), io::Error> {\n\n\n\n let server_config = ServerConfig::new(\"config.toml\")\n\n .unwrap_or_else(|err| {\n\n eprintln!(\"Fatal error reading ostrich-server config file: {}\",\n\n err);\n\n process::exit(-1);\n\n });\n\n \n\n // Initialize server logger\n\n if let Err(err) = CombinedLogger::init(vec![\n\n TermLogger::new(LevelFilter::Trace, Config::default(), \n\n TerminalMode::Mixed).unwrap(),\n\n WriteLogger::new(LevelFilter::Info, Config::default(), \n\n File::create(server_config.logger_file.clone()).unwrap())]) {\n", "file_path": "src/main.rs", "rank": 30, "score": 10.862270265008338 } ]
Rust
victor/src/dom/mod.rs
servo/victor
dda60efe07dd4cae0e9d93780ed661d41efb16b9
mod html; use crate::style::StyleSetBuilder; use html5ever::tendril::StrTendril; use html5ever::{Attribute, ExpandedName, LocalName, QualName}; use std::borrow::Cow; use std::fmt; pub struct Document { nodes: Vec<Node>, style_elements: Vec<NodeId>, } pub struct Node { pub(crate) parent: Option<NodeId>, pub(crate) next_sibling: Option<NodeId>, pub(crate) previous_sibling: Option<NodeId>, pub(crate) first_child: Option<NodeId>, pub(crate) last_child: Option<NodeId>, pub(crate) data: NodeData, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub(crate) struct NodeId(std::num::NonZeroUsize); impl Document { fn new() -> Self { let dummy = Node::new(NodeData::Document); let document_node = Node::new(NodeData::Document); Document { nodes: vec![dummy, document_node], style_elements: Vec::new(), } } fn document_node_id() -> NodeId { NodeId(std::num::NonZeroUsize::new(1).unwrap()) } pub fn parse_stylesheets(&self, style_set: &mut StyleSetBuilder) { for &id in &self.style_elements { let element = &self[id]; if let Some(type_attr) = element.as_element().unwrap().get_attr(&local_name!("type")) { if !type_attr.eq_ignore_ascii_case("text/css") { continue } } style_set.add_stylesheet(&self.child_text_content(id)) } } fn push_node(&mut self, node: Node) -> NodeId { let next_index = self.nodes.len(); self.nodes.push(node); NodeId(std::num::NonZeroUsize::new(next_index).unwrap()) } fn detach(&mut self, node: NodeId) { let (parent, previous_sibling, next_sibling) = { let node = &mut self[node]; ( node.parent.take(), node.previous_sibling.take(), node.next_sibling.take(), ) }; if let Some(next_sibling) = next_sibling { self[next_sibling].previous_sibling = previous_sibling } else if let Some(parent) = parent { self[parent].last_child = previous_sibling; } if let Some(previous_sibling) = previous_sibling { self[previous_sibling].next_sibling = next_sibling; } else if let Some(parent) = parent { self[parent].first_child = next_sibling; } } fn append(&mut self, parent: NodeId, new_child: NodeId) { self.detach(new_child); self[new_child].parent = Some(parent); if let Some(last_child) = self[parent].last_child.take() { self[new_child].previous_sibling = Some(last_child); debug_assert!(self[last_child].next_sibling.is_none()); self[last_child].next_sibling = Some(new_child); } else { debug_assert!(self[parent].first_child.is_none()); self[parent].first_child = Some(new_child); } self[parent].last_child = Some(new_child); } fn insert_before(&mut self, sibling: NodeId, new_sibling: NodeId) { self.detach(new_sibling); self[new_sibling].parent = self[sibling].parent; self[new_sibling].next_sibling = Some(sibling); if let Some(previous_sibling) = self[sibling].previous_sibling.take() { self[new_sibling].previous_sibling = Some(previous_sibling); debug_assert_eq!(self[previous_sibling].next_sibling, Some(sibling)); self[previous_sibling].next_sibling = Some(new_sibling); } else if let Some(parent) = self[sibling].parent { debug_assert_eq!(self[parent].first_child, Some(sibling)); self[parent].first_child = Some(new_sibling); } self[sibling].previous_sibling = Some(new_sibling); } fn child_text_content(&self, node: NodeId) -> Cow<StrTendril> { let mut link = self[node].first_child; let mut text = None; while let Some(child) = link { if let NodeData::Text { contents } = &self[child].data { match &mut text { None => text = Some(Cow::Borrowed(contents)), Some(text) => text.to_mut().push_tendril(&contents), } } link = self[child].next_sibling; } text.unwrap_or_else(|| Cow::Owned(StrTendril::new())) } pub(crate) fn root_element(&self) -> NodeId { let first_child; { let document_node = &self[Document::document_node_id()]; assert!(matches!(document_node.data, NodeData::Document)); assert!(document_node.parent.is_none()); assert!(document_node.next_sibling.is_none()); assert!(document_node.previous_sibling.is_none()); first_child = document_node.first_child } let mut root = None; for child in self.node_and_next_siblings(first_child.unwrap()) { match &self[child].data { NodeData::Doctype { .. } | NodeData::Comment { .. } | NodeData::ProcessingInstruction { .. } => {} NodeData::Document | NodeData::Text { .. } => { panic!("Unexpected node type under document node") } NodeData::Element(_) => { assert!(root.is_none(), "Found two root elements"); root = Some(child) } } } root.unwrap() } pub(crate) fn node_and_next_siblings<'a>( &'a self, node: NodeId, ) -> impl Iterator<Item = NodeId> + 'a { successors(Some(node), move |&node| self[node].next_sibling) } } impl std::ops::Index<NodeId> for Document { type Output = Node; #[inline] fn index(&self, id: NodeId) -> &Node { &self.nodes[id.0.get()] } } impl std::ops::IndexMut<NodeId> for Document { #[inline] fn index_mut(&mut self, id: NodeId) -> &mut Node { &mut self.nodes[id.0.get()] } } pub(crate) enum NodeData { Document, Doctype { _name: StrTendril, _public_id: StrTendril, _system_id: StrTendril, }, Text { contents: StrTendril, }, Comment { _contents: StrTendril, }, Element(ElementData), ProcessingInstruction { _target: StrTendril, _contents: StrTendril, }, } pub(crate) struct ElementData { pub(crate) name: QualName, pub(crate) attrs: Vec<Attribute>, pub(crate) mathml_annotation_xml_integration_point: bool, } impl ElementData { pub(crate) fn get_attr(&self, name: &LocalName) -> Option<&StrTendril> { let name = ExpandedName { ns: &ns!(), local: name, }; self.attrs.iter().find_map(|attr| { if attr.name.expanded() == name { Some(&attr.value) } else { None } }) } } #[test] #[cfg(target_pointer_width = "64")] fn size_of() { use std::mem::size_of; assert_eq!(size_of::<Node>(), 112); assert_eq!(size_of::<NodeData>(), 72); assert_eq!(size_of::<ElementData>(), 64); } impl Node { pub(crate) fn in_html_document(&self) -> bool { true } pub(crate) fn as_element(&self) -> Option<&ElementData> { match self.data { NodeData::Element(ref data) => Some(data), _ => None, } } pub(crate) fn as_text(&self) -> Option<&StrTendril> { match self.data { NodeData::Text { ref contents } => Some(contents), _ => None, } } fn new(data: NodeData) -> Self { Node { parent: None, previous_sibling: None, next_sibling: None, first_child: None, last_child: None, data: data, } } } impl fmt::Debug for Node { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let ptr: *const Node = self; f.debug_tuple("Node").field(&ptr).finish() } } fn successors<T, F>(first: Option<T>, mut succ: F) -> impl Iterator<Item = T> where F: FnMut(&T) -> Option<T>, { unfold(first, move |next| { next.take().map(|item| { *next = succ(&item); item }) }) } fn unfold<T, St, F>(initial_state: St, f: F) -> Unfold<St, F> where F: FnMut(&mut St) -> Option<T>, { Unfold { state: initial_state, f, } } struct Unfold<St, F> { state: St, f: F, } impl<T, St, F> Iterator for Unfold<St, F> where F: FnMut(&mut St) -> Option<T>, { type Item = T; fn next(&mut self) -> Option<Self::Item> { (self.f)(&mut self.state) } }
mod html; use crate::style::StyleSetBuilder; use html5ever::tendril::StrTendril; use html5ever::{Attribute, ExpandedName, LocalName, QualName}; use std::borrow::Cow; use std::fmt; pub struct Document { nodes: Vec<Node>, style_elements: Vec<NodeId>, } pub struct Node { pub(crate) parent: Option<NodeId>, pub(crate) next_sibling: Option<NodeId>, pub(crate) previous_sibling: Option<NodeId>, pub(crate) first_child: Option<NodeId>, pub(crate) last_child: Option<NodeId>, pub(crate) data: NodeData, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub(crate) struct NodeId(std::num::NonZeroUsize); impl Document { fn new() -> Self { let dummy = Node::new(NodeData::Document); let document_node = Node::new(NodeData::Document); Document { nodes: vec![dummy, document_node], style_elements: Vec::new(), } } fn document_node_id() -> NodeId { NodeId(std::num::NonZeroUsize::new(1).unwrap()) } pub fn parse_stylesheets(&self, style_set: &mut StyleSetBuilder) { for &id in &self.style_elements { let element = &self[id]; if let Some(type_attr) = element.as_element().unwrap().get_attr(&local_name!("type")) { if !type_attr.eq_ignore_ascii_case("text/css") { continue } } style_set.add_stylesheet(&self.child_text_content(id)) } } fn push_node(&mut self, node: Node) -> NodeId { let next_index = self.nodes.len(); self.nodes.push(node); NodeId(std::num::NonZeroUsize::new(next_index).unwrap()) } fn detach(&mut self, node: NodeId) { let (parent, previous_sibling, next_sibling) = { let node = &mut self[node]; ( node.parent.take(), node.previous_sibling.take(), node.next_sibling.take(), ) }; if let Some(next_sibling) = next_sibling { self[next_sibling].previous_sibling = previous_sibling } else if let Some(parent) = parent { self[parent].last_child = previous_sibling; } if let Some(previous_sibling) = previous_sibling { self[previous_sibling].next_sibling = next_sibling; } else if let Some(parent) = parent { self[parent].first_child = next_sibling; } } fn append(&mut self, parent: NodeId, new_child: NodeId) { self.detach(new_child); self[new_child].parent = Some(parent); if let Some(last_child) = self[parent].last_child.take() { self[new_child].previous_sibling = Some(last_child); debug_assert!(self[last_child].next_sibling.is_none()); self[last_child].next_sibling = Some(new_child); } else { debug_assert!(self[parent].first_child.is_none()); self[parent].first_child = Some(new_child); } self[parent].last_child = Some(new_child); } fn insert_before(&mut self, sibling: NodeId, new_sibling: NodeId) { self.detach(new_sibling); self[new_sibling].parent = self[sibling].parent; self[new_sibling].next_sibling = Some(sibling); if let Some(previous_sibling) = self[sibling].previous_sibling.take() { self[new_sibling].previous_sibling = Some(previous_sibling); debug_assert_eq!(self[previous_sibling].next_sibling, Some(sibling)); self[previous_sibling].next_sibling = Some(new_sibling); } else if let Some(parent) = self[sibling].parent { debug_assert_eq!(self[parent].first_child, Some(sibling)); self[parent].first_child = Some(new_sibling); } self[sibling].previous_sibling = Some(new_sibling); } fn child_text_content(&self, node: NodeId) -> Cow<StrTendril> { let mut link = self[node].first_child; let mut text = None; while let Some(child) = link { if let NodeData::Text { contents } = &self[child].data { match &mut text { None => text = Some(Cow::Borrowed(contents)), Some(text) => text.to_mut().push_tendril(&contents), } } link = self[child].next_sibling; } text.unwrap_or_else(|| Cow::Owned(StrTendril::new())) } pub(crate) fn root_element(&self) -> NodeId { let first_child; { let document_node = &self[Document::document_node_id()]; assert!(matches!(document_node.data, NodeData::Document)); assert!(document_node.parent.is_none()); assert!(document_node.next_sibling.is_none()); assert!(document_node.previous_sibling.is_none()); first_child = document_node.first_child } let mut root = None; for child in self.node_and_next_siblings(first_child.unwrap()) { match &self[child].data { NodeData::Doctype { .. } | NodeData::Comment { .. } | NodeData::ProcessingInstruction { .. } => {} NodeData::Document | NodeData::Text { .. } => { panic!("Unexpected node type under document node") } NodeData::Element(_) => { assert!(root.is_none(), "Found two root elements"); root = Some(child) } } } root.unwrap() } pub(crate) fn node_and_next_siblings<'a>( &'a self, node: NodeId, ) -> impl Iterator<Item = NodeId> + 'a { successors(Some(node), move |&node| self[node].next_sibling) } } impl std::ops::Index<NodeId> for Document { type Output = Node; #[inline] fn index(&self, id: NodeId) -> &Node { &self.nodes[id.0.get()] } } impl std::ops::IndexMut<NodeId> for Document { #[inline] fn index_mut(&mut self, id: NodeId) -> &mut Node { &mut self.nodes[id.0.get()] } } pub(crate) enum NodeData { Document, Doctype { _name: StrTendril, _public_id: StrTendril, _system_id: StrTendril, }, Text { contents: StrTendril, }, Comment { _contents: StrTendril, }, Element(ElementData), ProcessingInstruction { _target: StrTendril, _contents: StrTendril, }, } pub(crate) struct ElementData { pub(crate) name: QualName, pub(crate) attrs: Vec<Attribute>, pub(crate) mathml_annotation_xml_integration_point: bool, } impl ElementData { pub(crate) fn get_attr(&self, name: &LocalName) -> Option<&StrTendril> { let name = ExpandedName { ns: &ns!(), local: name, }; self.attrs.iter().find_map(|attr| { if attr.name.expanded() == name { Some(&attr.value) } else { None } }) } } #[test] #[cfg(target_pointer_width = "64")] fn size_of() { use std::mem::size_of; assert_eq!(size_of::<Node>(), 112); assert_eq!(size_of::<NodeData>(), 72); assert_eq!(size_of::<ElementData>(), 64); } impl Node { pub(crate) fn in_html_document(&self) -> bool { true } pub(crate) fn as_element(&self) -> Option<&El
pub(crate) fn as_text(&self) -> Option<&StrTendril> { match self.data { NodeData::Text { ref contents } => Some(contents), _ => None, } } fn new(data: NodeData) -> Self { Node { parent: None, previous_sibling: None, next_sibling: None, first_child: None, last_child: None, data: data, } } } impl fmt::Debug for Node { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let ptr: *const Node = self; f.debug_tuple("Node").field(&ptr).finish() } } fn successors<T, F>(first: Option<T>, mut succ: F) -> impl Iterator<Item = T> where F: FnMut(&T) -> Option<T>, { unfold(first, move |next| { next.take().map(|item| { *next = succ(&item); item }) }) } fn unfold<T, St, F>(initial_state: St, f: F) -> Unfold<St, F> where F: FnMut(&mut St) -> Option<T>, { Unfold { state: initial_state, f, } } struct Unfold<St, F> { state: St, f: F, } impl<T, St, F> Iterator for Unfold<St, F> where F: FnMut(&mut St) -> Option<T>, { type Item = T; fn next(&mut self) -> Option<Self::Item> { (self.f)(&mut self.state) } }
ementData> { match self.data { NodeData::Element(ref data) => Some(data), _ => None, } }
function_block-function_prefixed
[ { "content": "pub fn layout(text: &str, style: &Style) -> Result<Document, FontError> {\n\n let page_size = style.page_size * Px::per_mm();\n\n let page_margin = SideOffsets::from_length_all_same(style.page_margin * Px::per_mm());\n\n let page = Rect::new(Point::origin(), page_size);\n\n let content_area = page.inner_rect(page_margin);\n\n let min_x = content_area.min_x_typed();\n\n let min_y = content_area.min_y_typed();\n\n let max_y = content_area.max_y_typed();\n\n let available_width = content_area.size.width_typed();\n\n\n\n let one_em = Length::<Em>::new(1.);\n\n let line_height = one_em * style.line_height;\n\n let half_leading = (line_height - one_em) / 2.;\n\n let baseline_y = half_leading + style.font.ascender();\n\n\n\n let font_size = style.font_size;\n\n let px_per_em = font_size / one_em;\n\n let line_height = line_height * px_per_em;\n\n let baseline_y = baseline_y * px_per_em;\n\n\n", "file_path": "victor/src/text_plain.rs", "rank": 0, "score": 166364.23352870086 }, { "content": "fn write_name<W: Write>(name: &[u8], w: &mut W) -> io::Result<()> {\n\n w.write_all(b\"/\")?;\n\n for &byte in name {\n\n match KIND[byte as usize] {\n\n CharKind::Regular => w.write_all(&[byte])?,\n\n CharKind::Whitespace | CharKind::Delimiter => {\n\n w.write_all(b\"#\")?;\n\n write_hex(byte, w)?\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n// https://www.adobe.com/content/dam/acom/en/devnet/pdf/PDF32000_2008.pdf#G6.1839343\n", "file_path": "victor/src/pdf/object.rs", "rank": 2, "score": 124505.8622282611 }, { "content": "enum InlineLevel {\n\n Text(StrTendril),\n\n #[allow(unused)]\n\n Inline {\n\n style: Rc<ComputedValues>,\n\n first_fragment: bool,\n\n last_fragment: bool,\n\n children: Vec<InlineLevel>,\n\n },\n\n // Atomic {\n\n // style: Rc<ComputedValues>,\n\n // contents: FormattingContext,\n\n // },\n\n}\n", "file_path": "victor/src/layout/mod.rs", "rank": 3, "score": 121619.66359248011 }, { "content": "fn read_postscript_name(\n\n bytes: &[u8],\n\n table_directory: Slice<TableDirectoryEntry>,\n\n) -> Result<String, FontError> {\n\n /// Macintosh encodings seem to be ASCII-compatible, and a PostScript name is within ASCII\n\n fn decode_macintosh(string_bytes: &[u8]) -> String {\n\n String::from_utf8_lossy(string_bytes).into_owned()\n\n }\n\n\n\n /// Latin-1 range only\n\n fn decode_ucs2(string_bytes: &[u8]) -> String {\n\n string_bytes\n\n .chunks(2)\n\n .map(|chunk| {\n\n if chunk.len() < 2 || chunk[0] != 0 {\n\n '\\u{FFFD}'\n\n } else {\n\n chunk[1] as char\n\n }\n\n })\n", "file_path": "victor/src/fonts/mod.rs", "rank": 4, "score": 113106.11004875263 }, { "content": "type BoxTreeRoot = BlockFormattingContext;\n\n\n", "file_path": "victor/src/layout/mod.rs", "rank": 6, "score": 108222.02688860547 }, { "content": "#[inline]\n\nfn slice_to_10(buffer: &mut [u8; 20]) -> &mut [u8; 10] {\n\n let ptr = buffer as *mut [u8; 20] as *mut [u8; 10];\n\n unsafe { &mut *ptr }\n\n}\n\n\n", "file_path": "victor/src/pdf/syntax.rs", "rank": 7, "score": 100813.23658939176 }, { "content": "fn itoa_zero_padded(mut value: u32, buffer: &mut [u8; 10]) {\n\n for byte in buffer.iter_mut().rev() {\n\n *byte = b\"0123456789\"[(value % 10) as usize];\n\n value /= 10;\n\n }\n\n}\n\n\n\npub struct CountingWrite<'a, W: Write> {\n\n inner: &'a mut W,\n\n bytes_written: usize,\n\n}\n\n\n\nimpl<'a, W: Write> Write for CountingWrite<'a, W> {\n\n #[inline]\n\n fn write(&mut self, buffer: &[u8]) -> io::Result<usize> {\n\n let result = self.inner.write(buffer);\n\n if let Ok(bytes) = result {\n\n self.bytes_written += bytes;\n\n }\n\n result\n", "file_path": "victor/src/pdf/syntax.rs", "rank": 8, "score": 96303.66348106098 }, { "content": "#[test]\n\nfn render() {\n\n let style = text_plain::Style {\n\n page_size: Size::new(210., 297.),\n\n page_margin: Length::new(20.),\n\n font: BITSTREAM_VERA_SANS.get().unwrap(),\n\n font_size: Length::new(16.),\n\n line_height: 1.5,\n\n justify: true,\n\n };\n\n let pdf_bytes = text_plain::layout(ALICE, &style)\n\n .unwrap()\n\n .write_to_pdf_bytes();\n\n\n\n if env::var(\"VICTOR_WRITE_TO_TMP\").is_ok() {\n\n File::create(\"/tmp/alice.pdf\")\n\n .unwrap()\n\n .write_all(&pdf_bytes)\n\n .unwrap();\n\n }\n\n assert!(pdf_bytes == include_bytes!(\"alice.pdf\").as_ref());\n\n}\n", "file_path": "tests/alice.rs", "rank": 9, "score": 91659.15631776955 }, { "content": "#[test]\n\nfn pdf() {\n\n let pdf_bytes = doc().unwrap();\n\n\n\n if env::var(\"VICTOR_WRITE_TO_TMP\").is_ok() {\n\n File::create(\"/tmp/victor.pdf\")\n\n .unwrap()\n\n .write_all(&pdf_bytes)\n\n .unwrap();\n\n }\n\n if env::var(\"VICTOR_PRINT\").is_ok() {\n\n println!(\"{}\", String::from_utf8_lossy(&pdf_bytes));\n\n }\n\n let doc = PdfDocument::from_bytes(&pdf_bytes).unwrap();\n\n assert_eq!(\n\n doc.producer().unwrap().to_str().unwrap(),\n\n \"Victor <https://github.com/SimonSapin/victor>\"\n\n );\n\n\n\n let pages: Vec<_> = doc.pages().collect();\n\n assert_eq!(pages[0].size_in_css_px(), (140., 50.));\n", "file_path": "tests/pdf.rs", "rank": 10, "score": 91659.15631776955 }, { "content": "struct Sink {\n\n document: Document,\n\n quirks_mode: QuirksMode,\n\n}\n\n\n\nimpl Sink {\n\n fn new_node(&mut self, data: NodeData) -> NodeId {\n\n self.document.push_node(Node::new(data))\n\n }\n\n\n\n fn append_common<P, A>(&mut self, child: NodeOrText<NodeId>, previous: P, append: A)\n\n where\n\n P: FnOnce(&mut Document) -> Option<NodeId>,\n\n A: FnOnce(&mut Document, NodeId),\n\n {\n\n let new_node = match child {\n\n NodeOrText::AppendText(text) => {\n\n // Append to an existing Text node if we have one.\n\n if let Some(id) = previous(&mut self.document) {\n\n if let Node {\n", "file_path": "victor/src/dom/html.rs", "rank": 11, "score": 91405.86406053754 }, { "content": "#[doc(hidden)]\n\npub fn pixels_to_hex(pixels: &[u32]) -> String {\n\n pixels\n\n .iter()\n\n .map(|p| format!(\"{:08X}\", p))\n\n .collect::<Vec<_>>()\n\n .join(\", \")\n\n}\n", "file_path": "lester/src/lib.rs", "rank": 12, "score": 91326.63367112682 }, { "content": "#[allow(unused)]\n\nenum FormattingContext {\n\n // Not included: inline formatting context, which is always part of a block container\n\n Flow(BlockFormattingContext),\n\n // Replaced(ReplacedElement), // Not called FC in specs, but behaves close enough\n\n // Table(Table),\n\n // Other layout modes go here\n\n}\n\n\n", "file_path": "victor/src/layout/mod.rs", "rank": 13, "score": 89291.89307587745 }, { "content": "enum BlockContainer {\n\n BlockLevels(Vec<BlockLevel>),\n\n InlineFormattingContext(Vec<InlineLevel>),\n\n}\n\n\n", "file_path": "victor/src/layout/mod.rs", "rank": 14, "score": 89291.89307587745 }, { "content": "enum BlockLevel {\n\n #[allow(unused)]\n\n SameFormattingContextBlock {\n\n style: Rc<ComputedValues>,\n\n contents: BlockContainer,\n\n },\n\n // Other {\n\n // style: Rc<ComputedValues>,\n\n // contents: FormattingContext,\n\n // },\n\n}\n\n\n", "file_path": "victor/src/layout/mod.rs", "rank": 15, "score": 89291.89307587745 }, { "content": "struct Rewind<I>\n\nwhere\n\n I: Iterator,\n\n{\n\n inner: I,\n\n buffer: Option<I::Item>,\n\n}\n\n\n\nimpl<I> Rewind<I>\n\nwhere\n\n I: Iterator,\n\n{\n\n fn new(inner: I) -> Self {\n\n Rewind {\n\n inner,\n\n buffer: None,\n\n }\n\n }\n\n\n\n fn rewind(&mut self, item: I::Item) {\n", "file_path": "victor/src/text_plain.rs", "rank": 16, "score": 89248.12344580489 }, { "content": "#[derive(Copy, Clone)]\n\nstruct NodeRef<'a> {\n\n document: &'a Document,\n\n node: NodeId,\n\n}\n\n\n\nimpl<'a> std::fmt::Debug for NodeRef<'a> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n self.node.fmt(f)\n\n }\n\n}\n\n\n\nimpl<'a> NodeRef<'a> {\n\n fn node(self) -> &'a Node {\n\n &self.document[self.node]\n\n }\n\n}\n\n\n", "file_path": "victor/src/style/selectors.rs", "rank": 17, "score": 87134.9786209811 }, { "content": "#[test]\n\nfn blank_pdf() {\n\n static PDF_BYTES: &[u8] = include_bytes!(\"A4_one_empty_page.pdf\");\n\n let doc = PdfDocument::from_bytes(PDF_BYTES).unwrap();\n\n assert_eq!(doc.pages().len(), 1);\n\n assert!(doc.pages().nth(1).is_none());\n\n assert!(doc.pages().nth(2).is_none());\n\n let page = doc.pages().nth(0).unwrap();\n\n let (width, height) = page.size_in_ps_points();\n\n assert_approx_eq!(width, millimeters_to_poscript_points(210.));\n\n assert_approx_eq!(height, millimeters_to_poscript_points(297.));\n\n}\n\n\n", "file_path": "lester/tests/pdf.rs", "rank": 18, "score": 86606.5185260617 }, { "content": "#[test]\n\nfn invalid_png() {\n\n let bytes: &[u8] = b\"\\x89PNG\\rnot\";\n\n match ImageSurface::read_from_png(bytes) {\n\n Err(lester::LesterError::Cairo(ref err)) if err.description() == \"out of memory\" => {}\n\n Err(err) => panic!(\"expected 'out of memory' error, got {:?}\", err),\n\n Ok(_) => panic!(\"expected error\"),\n\n }\n\n}\n\n\n", "file_path": "lester/tests/png.rs", "rank": 19, "score": 86606.5185260617 }, { "content": "/// The unit of FWord and UFWord\n\nstruct FontDesignUnit;\n\n\n\n#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)]\n\npub(crate) struct GlyphId(pub(crate) u16);\n\n\n\n#[derive(Debug)]\n\npub enum FontError {\n\n /// Victor only supports TrueType fonts at the moment.\n\n UnsupportedFormat,\n\n\n\n /// The font file contains an offset to beyond the end of the file.\n\n OffsetBeyondEof,\n\n\n\n /// The font file contains an offset that puts the end of the pointed object\n\n /// beyond the end of the file.\n\n OffsetPlusLengthBeyondEof,\n\n\n\n /// One of the required TrueType tables is missing in this font.\n\n MissingTable,\n\n\n", "file_path": "victor/src/fonts/mod.rs", "rank": 20, "score": 86274.78292950046 }, { "content": "#[derive(Default)]\n\nstruct InlineBuilderExtra {\n\n self_fragments_split_by_block_levels: Vec<(Vec<InlineLevel>, BlockLevel)>,\n\n}\n\n\n\nimpl PushBlock for InlineBuilderExtra {\n\n fn push_block(builder: &mut Builder<Self>, block: BlockLevel) {\n\n builder\n\n .extra\n\n .self_fragments_split_by_block_levels\n\n .push((builder.consecutive_inline_levels.take(), block))\n\n }\n\n}\n\n\n", "file_path": "victor/src/layout/box_generation.rs", "rank": 21, "score": 85098.28374515192 }, { "content": "#[test]\n\nfn round_trip_png() {\n\n static PNG_BYTES: &[u8] = include_bytes!(\"pattern_4x4.png\");\n\n let mut surface = ImageSurface::read_from_png(PNG_BYTES).unwrap();\n\n\n\n fn assert_expected_pixels(pixels: lester::Argb32Pixels) {\n\n assert_eq!(pixels.width, 4);\n\n assert_eq!(pixels.height, 4);\n\n // ARGB32\n\n const RED: u32 = 0xFFFF_0000;\n\n const BLUE: u32 = 0xFF00_00FF;\n\n #[rustfmt::skip]\n\n assert_eq!(\n\n pixels.buffer,\n\n &[\n\n RED, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE,\n\n ]\n\n );\n", "file_path": "lester/tests/png.rs", "rank": 22, "score": 84341.08093881668 }, { "content": "#[test]\n\nfn zero_bytes_png() {\n\n expect_io_error_kind(\n\n ImageSurface::read_from_png(\"\".as_bytes()),\n\n io::ErrorKind::UnexpectedEof,\n\n )\n\n}\n\n\n", "file_path": "lester/tests/png.rs", "rank": 23, "score": 84341.08093881668 }, { "content": "#[test]\n\nfn pattern_4x4_pdf() {\n\n static PDF_BYTES: &[u8] = include_bytes!(\"pattern_4x4.pdf\");\n\n let doc = PdfDocument::from_bytes(PDF_BYTES).unwrap();\n\n let page = doc.pages().next().unwrap();\n\n assert_eq!(page.size_in_ps_points(), (3., 3.));\n\n assert_eq!(page.size_in_css_px(), (4., 4.));\n\n\n\n let options = RenderOptions {\n\n // Apparently this (in addition to having `/Interpolate false` in the PDF file)\n\n // is required to convince Poppler to use use nearest-neighbor instead of bilinear\n\n // when \"interpolating\" a 4x4 image source into a 4x4 surface.\n\n for_printing: true,\n\n\n\n ..RenderOptions::default()\n\n };\n\n let mut surface = page.render_with_options(options).unwrap();\n\n const RED: u32 = 0xFFFF_0000;\n\n const BLUE: u32 = 0xFF00_00FF;\n\n #[rustfmt::skip]\n\n assert_pixels_eq!(\n", "file_path": "lester/tests/pdf.rs", "rank": 24, "score": 84341.08093881668 }, { "content": "#[test]\n\nfn forward_read_error() {\n\n struct InvalidDataRead;\n\n\n\n impl io::Read for InvalidDataRead {\n\n fn read(&mut self, _: &mut [u8]) -> io::Result<usize> {\n\n Err(io::ErrorKind::InvalidData.into())\n\n }\n\n }\n\n\n\n expect_io_error_kind(\n\n ImageSurface::read_from_png(InvalidDataRead),\n\n io::ErrorKind::InvalidData,\n\n )\n\n}\n\n\n", "file_path": "lester/tests/png.rs", "rank": 25, "score": 84341.08093881668 }, { "content": "#[test]\n\nfn zero_bytes_pdf() {\n\n match PdfDocument::from_bytes(b\"\") {\n\n Err(ref err)\n\n if err.description() == \"PDF document is damaged\"\n\n || err.description() == \"Failed to load document\" => {}\n\n Err(err) => panic!(\"expected 'damaged document' error, got {:?}\", err),\n\n Ok(_) => panic!(\"expected error\"),\n\n }\n\n}\n\n\n\nmacro_rules! assert_approx_eq {\n\n ($a: expr, $b: expr) => {{\n\n let a = ($a * 1000.).round() / 1000.;\n\n let b = ($b * 1000.).round() / 1000.;\n\n assert_eq!(a, b)\n\n }};\n\n}\n\n\n", "file_path": "lester/tests/pdf.rs", "rank": 26, "score": 84341.08093881668 }, { "content": "#[test]\n\nfn forward_write_error() {\n\n struct InvalidDataWrite;\n\n\n\n impl io::Write for InvalidDataWrite {\n\n fn write(&mut self, _: &[u8]) -> io::Result<usize> {\n\n Err(io::ErrorKind::InvalidData.into())\n\n }\n\n fn flush(&mut self) -> io::Result<()> {\n\n Ok(())\n\n }\n\n }\n\n\n\n let surface = ImageSurface::new_rgb24(4, 4).unwrap();\n\n expect_io_error_kind(\n\n surface.write_to_png(InvalidDataWrite),\n\n io::ErrorKind::InvalidData,\n\n )\n\n}\n\n\n", "file_path": "lester/tests/png.rs", "rank": 27, "score": 84341.08093881668 }, { "content": "#[test]\n\n#[should_panic(expected = \"panicking during read callback\")]\n\nfn forward_read_panic() {\n\n struct PanickingRead;\n\n\n\n impl io::Read for PanickingRead {\n\n fn read(&mut self, _: &mut [u8]) -> io::Result<usize> {\n\n panic!(\"panicking during read callback\")\n\n }\n\n }\n\n\n\n unreachable!(ImageSurface::read_from_png(PanickingRead).is_ok())\n\n}\n\n\n", "file_path": "lester/tests/png.rs", "rank": 28, "score": 84340.81740237722 }, { "content": "#[test]\n\n#[should_panic(expected = \"panicking during write callback\")]\n\nfn forward_write_panic() {\n\n struct PanickingWrite;\n\n\n\n impl io::Write for PanickingWrite {\n\n fn write(&mut self, _: &[u8]) -> io::Result<usize> {\n\n panic!(\"panicking during write callback\")\n\n }\n\n fn flush(&mut self) -> io::Result<()> {\n\n Ok(())\n\n }\n\n }\n\n\n\n let surface = ImageSurface::new_rgb24(4, 4).unwrap();\n\n unreachable!(surface.write_to_png(PanickingWrite).is_ok())\n\n}\n", "file_path": "lester/tests/png.rs", "rank": 29, "score": 84340.81740237722 }, { "content": "#[cfg(target_pointer_width = \"64\")]\n\nfn _assert_size_of() {\n\n let _ = std::mem::transmute::<Cmap, [u8; 24]>;\n\n let _ = std::mem::transmute::<Font, [u8; 112]>;\n\n}\n\n\n\nimpl Font {\n\n pub fn parse<B: Into<Cow<'static, [u8]>>>(bytes: B) -> Result<Arc<Self>, FontError> {\n\n Self::parse_cow(bytes.into())\n\n }\n\n\n\n fn parse_cow(bytes: Cow<'static, [u8]>) -> Result<Arc<Self>, FontError> {\n\n let mut font = Self::parse_without_cow_bytes_field(&bytes)?;\n\n font.bytes = bytes;\n\n Ok(Arc::new(font))\n\n }\n\n\n\n #[inline]\n\n fn parse_without_cow_bytes_field(bytes: &[u8]) -> Result<Self, FontError> {\n\n let bytes: &[u8] = &*bytes;\n\n let offset_table = Position::<OffsetSubtable>::initial();\n", "file_path": "victor/src/fonts/mod.rs", "rank": 30, "score": 83956.08705598957 }, { "content": "pub trait ToComputedValue {\n\n type Computed;\n\n fn to_computed(&self) -> Self::Computed;\n\n}\n\n\n\n#[derive(Copy, Clone, Parse)]\n\npub enum CssWideKeyword {\n\n Inherit,\n\n Initial,\n\n Unset,\n\n}\n\n\n\n/// https://drafts.csswg.org/css-display-3/#the-display-properties\n\n#[derive(Copy, Clone, ComputedAsSpecified)]\n\npub enum Display {\n\n None,\n\n Other {\n\n outside: DisplayOutside,\n\n inside: DisplayInside,\n\n },\n", "file_path": "victor/src/style/values/mod.rs", "rank": 31, "score": 82635.63377136024 }, { "content": "pub trait Parse: Sized {\n\n fn parse<'i, 't>(parser: &mut Parser<'i, 't>) -> Result<Self, PropertyParseError<'i>>;\n\n}\n\n\n", "file_path": "victor/src/style/values/mod.rs", "rank": 33, "score": 80803.09961409244 }, { "content": "struct BlockFormattingContext(BlockContainer);\n\n\n", "file_path": "victor/src/layout/mod.rs", "rank": 34, "score": 80297.1900642364 }, { "content": "fn find_element<'a, F>(\n\n document: &'a Document,\n\n first: Option<NodeId>,\n\n next: F,\n\n) -> Option<NodeRef<'a>>\n\nwhere\n\n F: Fn(&Node) -> Option<NodeId>,\n\n{\n\n let mut node = first?;\n\n loop {\n\n if document[node].as_element().is_some() {\n\n return Some(NodeRef { document, node })\n\n }\n\n node = next(&document[node])?\n\n }\n\n}\n\n\n\nimpl<'a> selectors::Element for NodeRef<'a> {\n\n type Impl = Impl;\n\n\n", "file_path": "victor/src/style/selectors.rs", "rank": 35, "score": 79397.64217692037 }, { "content": "#[proc_macro_derive(ReadFromBytes)]\n\npub fn derive_read_from_bytes(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input: syn::DeriveInput = syn::parse(input).unwrap();\n\n let name = &input.ident;\n\n\n\n let tokens = quote! {\n\n impl crate::fonts::parsing::ReadFromBytes for #name {\n\n fn read_from(bytes: &[u8]) -> Result<Self, crate::fonts::FontError> {\n\n use crate::fonts::parsing::ReadFromBytes;\n\n ReadFromBytes::read_from(bytes).map(#name) // Assume single unnamed field\n\n }\n\n }\n\n };\n\n\n\n tokens.into()\n\n}\n", "file_path": "proc-macros/src/lib.rs", "rank": 36, "score": 75725.30455720332 }, { "content": "#[proc_macro_derive(ComputedAsSpecified)]\n\npub fn derive_computed_as_specified(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input: syn::DeriveInput = syn::parse(input).unwrap();\n\n let name = &input.ident;\n\n\n\n let tokens = quote! {\n\n impl crate::style::values::ToComputedValue for #name {\n\n type Computed = Self;\n\n fn to_computed(&self) -> Self::Computed {\n\n std::clone::Clone::clone(self)\n\n }\n\n }\n\n };\n\n\n\n tokens.into()\n\n}\n\n\n", "file_path": "proc-macros/src/style.rs", "rank": 37, "score": 75725.30455720332 }, { "content": "#[proc_macro_derive(SfntTable, attributes(tag))]\n\npub fn derive_sfnt_table(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input: syn::DeriveInput = syn::parse(input).unwrap();\n\n let name = &input.ident;\n\n\n\n let mut table_impl = quote!();\n\n for attr in &input.attrs {\n\n if let Some(syn::Meta::NameValue(ref meta)) = attr.interpret_meta() {\n\n if meta.ident == \"tag\" {\n\n if let syn::Lit::Str(ref tag) = meta.lit {\n\n let value = tag.value();\n\n assert_eq!(value.len(), 4);\n\n let tag = syn::LitByteStr::new(value.as_bytes(), tag.span());\n\n table_impl = quote! {\n\n #[warn(dead_code)]\n\n impl crate::fonts::SfntTable for #name {\n\n const TAG: Tag = Tag(*#tag);\n\n }\n\n };\n\n break\n\n }\n", "file_path": "proc-macros/src/lib.rs", "rank": 38, "score": 75725.30455720332 }, { "content": "fn write_hex<W: Write>(byte: u8, w: &mut W) -> io::Result<()> {\n\n const HEX_DIGITS: [u8; 16] = *b\"0123456789ABCDEF\";\n\n w.write_all(&[\n\n HEX_DIGITS[(byte >> 4) as usize],\n\n HEX_DIGITS[(byte & 0x0F) as usize],\n\n ])\n\n}\n\n\n", "file_path": "victor/src/pdf/object.rs", "rank": 39, "score": 75195.63296036166 }, { "content": "#[proc_macro_derive(Parse)]\n\npub fn derive_parse_single_keyword(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input: syn::DeriveInput = syn::parse(input).unwrap();\n\n let name = &input.ident;\n\n\n\n let variants: Vec<_> = match input.data {\n\n syn::Data::Enum(data) => data\n\n .variants\n\n .into_iter()\n\n .map(|variant| variant.ident)\n\n .collect(),\n\n _ => panic!(\"derive(Parse) only supports enums\"),\n\n };\n\n\n\n let names: Vec<_> = variants\n\n .iter()\n\n .map(|ident| {\n\n let mut name = String::new();\n\n for c in ident.to_string().chars() {\n\n if c.is_ascii_lowercase() {\n\n name.push(c)\n", "file_path": "proc-macros/src/style.rs", "rank": 40, "score": 74595.82698052547 }, { "content": "fn doc() -> Result<Vec<u8>, FontError> {\n\n let vera = BITSTREAM_VERA_SANS.get()?;\n\n let noto = NOTO.get()?;\n\n let ahem = AHEM.get()?;\n\n let mut doc = Document::new();\n\n doc.add_page(Size::new(140., 50.))\n\n .show_text(&TextRun {\n\n segment: ShapedSegment::naive_shape(\"Têst→iimm\", vera)?,\n\n font_size: Length::new(15.),\n\n origin: point(10., 20.),\n\n })?\n\n .show_text(&TextRun {\n\n segment: ShapedSegment::naive_shape(\"pÉX\", ahem)?,\n\n font_size: Length::new(15.),\n\n origin: point(10., 40.),\n\n })?\n\n .show_text(&TextRun {\n\n segment: ShapedSegment::naive_shape(\"𐁉 𐁁𐀓𐀠𐀴𐀍\", noto)?,\n\n font_size: Length::new(15.),\n\n origin: point(65., 40.),\n\n })?;\n\n doc.add_page(Size::new(4., 4.))\n\n .set_color(&RGBA(0., 0., 1., 1.))\n\n .paint_rectangle(&rect(0., 1., 4., 3.))\n\n .set_color(&RGBA(1., 0., 0., 0.5))\n\n .paint_rectangle(&rect(0., 0., 1., 2.));\n\n\n\n Ok(doc.write_to_pdf_bytes())\n\n}\n\n\n", "file_path": "tests/pdf.rs", "rank": 41, "score": 74539.07044338726 }, { "content": "fn millimeters_to_poscript_points(mm: f64) -> f64 {\n\n let inches = mm / 25.4;\n\n inches * 72.\n\n}\n\n\n", "file_path": "lester/tests/pdf.rs", "rank": 42, "score": 73795.08079280116 }, { "content": "#[repr(u8)]\n\nenum CharKind {\n\n Whitespace,\n\n Delimiter,\n\n Regular,\n\n}\n\n\n\n/*\n\nkind = ['r'] * 256\n\nfor byte in [0, 0x09, 0x0A, 0x0C, 0x0D, 0x20]:\n\n kind[byte] = 'W'\n\nfor char in \"()<>[]{}/%\":\n\n kind[ord(char)] = 'D'\n\n\n\nfor line in range(32):\n\n print ' ',\n\n for column in range(8):\n\n byte = column + 8 * line\n\n print kind[byte] + ',',\n\n print '//',\n\n for column in range(8):\n", "file_path": "victor/src/pdf/object.rs", "rank": 44, "score": 57823.72889079938 }, { "content": "struct GraphicsState {\n\n non_stroking_color_rgb: (f32, f32, f32),\n\n alpha: f32,\n\n}\n\n\n\nmacro_rules! op {\n\n ( $self_: expr, $operator: expr ) => {\n\n op!($self_, $operator,)\n\n };\n\n ( $self_: expr, $operator: expr, $( $operands: expr ),*) => {\n\n {\n\n $(\n\n Object::from($operands).write(&mut $self_.operations).unwrap();\n\n $self_.operations.push(b' ');\n\n )*\n\n $self_.operations.extend(str::as_bytes($operator));\n\n $self_.operations.push(b'\\n');\n\n }\n\n }\n\n}\n", "file_path": "victor/src/pdf/convert.rs", "rank": 45, "score": 56923.718692495844 }, { "content": "fn main() {\n\n // We rely on poppler-glib’s dependency on cairo to link cairo.\n\n // If we make a second pkg-config call, rustc complains with\n\n // \"warning: redundant linker flag specified for library `cairo`\"\n\n\n\n pkg_config::Config::new()\n\n // FIXME: Do we actually require a more recent version than this?\n\n .atleast_version(\"0.16.0\")\n\n .probe(\"poppler-glib\")\n\n .unwrap();\n\n}\n", "file_path": "lester/build.rs", "rank": 46, "score": 56138.36943244784 }, { "content": "fn expect_io_error_kind<T>(result: Result<T, lester::LesterError>, expected_kind: io::ErrorKind) {\n\n match result {\n\n Err(lester::LesterError::Io(err)) => assert_eq!(\n\n err.kind(),\n\n expected_kind,\n\n \"Expected {:?} error, got {:?}\",\n\n expected_kind,\n\n err\n\n ),\n\n Err(err) => panic!(\"Expected an IO error, got {:?}\", err),\n\n Ok(_) => panic!(\"Expected an error\"),\n\n }\n\n}\n\n\n", "file_path": "lester/tests/png.rs", "rank": 47, "score": 55133.920025192754 }, { "content": "struct Context<'a> {\n\n document: &'a dom::Document,\n\n author_styles: &'a StyleSet,\n\n}\n\n\n", "file_path": "victor/src/layout/box_generation.rs", "rank": 48, "score": 54784.00557097285 }, { "content": "#[derive(Default)]\n\nstruct BlockContainerBuilderExtra {\n\n block_levels: Vec<BlockLevel>,\n\n}\n\n\n\nimpl PushBlock for BlockContainerBuilderExtra {\n\n fn push_block(builder: &mut Builder<Self>, block: BlockLevel) {\n\n if !builder.consecutive_inline_levels.is_empty() {\n\n builder.wrap_inlines_in_anonymous_block();\n\n }\n\n builder.extra.block_levels.push(block)\n\n }\n\n}\n\nimpl Builder<BlockContainerBuilderExtra> {\n\n fn wrap_inlines_in_anonymous_block(&mut self) {\n\n self.extra\n\n .block_levels\n\n .push(BlockLevel::SameFormattingContextBlock {\n\n style: ComputedValues::anonymous_inheriting_from(&self.style),\n\n contents: BlockContainer::InlineFormattingContext(\n\n self.consecutive_inline_levels.take(),\n", "file_path": "victor/src/layout/box_generation.rs", "rank": 49, "score": 53914.152049581564 }, { "content": "struct Builder<Extra> {\n\n style: Rc<ComputedValues>,\n\n consecutive_inline_levels: Vec<InlineLevel>,\n\n extra: Extra,\n\n}\n\n\n\nimpl<Extra: Default + PushBlock> Builder<Extra> {\n\n fn new(style: Rc<ComputedValues>) -> Self {\n\n Self {\n\n style,\n\n consecutive_inline_levels: Vec::new(),\n\n extra: Extra::default(),\n\n }\n\n }\n\n\n\n fn push_child_elements(&mut self, context: &Context, parent_element: dom::NodeId) {\n\n if let Some(first_child) = context.document[parent_element].first_child {\n\n for child in context.document.node_and_next_siblings(first_child) {\n\n match &context.document[child].data {\n\n dom::NodeData::Document\n", "file_path": "victor/src/layout/box_generation.rs", "rank": 50, "score": 53714.051731802654 }, { "content": "struct ByAddress<T>(T);\n\n\n\nimpl<T> hash::Hash for ByAddress<T>\n\nwhere\n\n T: Deref,\n\n T::Target: Sized,\n\n{\n\n fn hash<H>(&self, state: &mut H)\n\n where\n\n H: hash::Hasher,\n\n {\n\n (self.0.deref() as *const T::Target as usize).hash(state)\n\n }\n\n}\n\n\n\nimpl<T> PartialEq for ByAddress<T>\n\nwhere\n\n T: Deref,\n\n T::Target: Sized,\n\n{\n", "file_path": "victor/src/pdf/convert.rs", "rank": 51, "score": 51914.6132535699 }, { "content": "#[test]\n\nfn it_works() {\n\n tagged_union_with_jump_tables! {\n\n #[repr(u16)]\n\n enum Foo {\n\n V1(u8, String),\n\n V2(&'static str),\n\n V3,\n\n }\n\n\n\n fn get(&self, x: u8) -> (&str, u8) {\n\n match *self {\n\n Foo::V1(ref u, ref s) => { (&**s, *u) }\n\n Foo::V2(ref s) => { (s, x) }\n\n Foo::V3 => { (\"3\", x) }\n\n }\n\n }\n\n }\n\n assert_eq!(Foo::V1(1, \"\".into()).get(0), (\"\", 1));\n\n assert_eq!(Foo::V2(\"bar\").get(5), (\"bar\", 5));\n\n assert_eq!(Foo::V3.get(10), (\"3\", 10));\n\n}\n", "file_path": "victor/src/tagged_union_with_jump_tables.rs", "rank": 52, "score": 51437.32457459174 }, { "content": "fn _static_assert_size() {\n\n let _ = std::mem::transmute::<Object<'static>, [u8; 32]>;\n\n}\n\n\n\npub(crate) type KeyValuePairs<'a> = &'a [(&'a [u8], Object<'a>)];\n\n\n\n#[derive(Debug)]\n\npub(crate) struct Dictionary<'a> {\n\n pub prev: Option<&'a Dictionary<'a>>,\n\n pub pairs: KeyValuePairs<'a>,\n\n}\n\n\n\nmacro_rules! array {\n\n ($( $value: expr ),* ,) => {\n\n array![ $( $value ),* ]\n\n };\n\n ($( $value: expr ),*) => {\n\n &[ $( crate::pdf::object::Object::from($value) ),* ][..]\n\n }\n\n}\n", "file_path": "victor/src/pdf/object.rs", "rank": 53, "score": 51433.07213869314 }, { "content": "fn i16_from_bytes(bytes: [u8; 2]) -> i16 {\n\n unsafe { mem::transmute(bytes) }\n\n}\n\n\n", "file_path": "victor/src/fonts/parsing.rs", "rank": 54, "score": 43816.20881830698 }, { "content": "fn u16_from_bytes(bytes: [u8; 2]) -> u16 {\n\n unsafe { mem::transmute(bytes) }\n\n}\n\n\n", "file_path": "victor/src/fonts/parsing.rs", "rank": 55, "score": 43816.20881830698 }, { "content": "fn u32_from_bytes(bytes: [u8; 4]) -> u32 {\n\n unsafe { mem::transmute(bytes) }\n\n}\n\n\n\nimpl ReadFromBytes for i16 {\n\n fn read_from(bytes: &[u8]) -> Result<Self, FontError> {\n\n Ok(i16::from_be(i16_from_bytes(ReadFromBytes::read_from(\n\n bytes,\n\n )?)))\n\n }\n\n}\n\n\n\nimpl ReadFromBytes for u16 {\n\n fn read_from(bytes: &[u8]) -> Result<Self, FontError> {\n\n Ok(u16::from_be(u16_from_bytes(ReadFromBytes::read_from(\n\n bytes,\n\n )?)))\n\n }\n\n}\n\n\n", "file_path": "victor/src/fonts/parsing.rs", "rank": 56, "score": 43816.20881830698 }, { "content": "#[cfg(test)]\n\n#[macro_use]\n\nextern crate lester;\n\n#[cfg(test)]\n\n#[macro_use]\n\nextern crate victor;\n\n\n\n#[cfg(test)]\n\nmod alice;\n\n#[cfg(test)]\n\nmod pdf;\n", "file_path": "tests/lib.rs", "rank": 57, "score": 36947.32801709429 }, { "content": "use lester::{Backdrop, PdfDocument, RenderOptions};\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::io::Write;\n\nuse victor::fonts::{FontError, LazyStaticFont, BITSTREAM_VERA_SANS};\n\nuse victor::pdf::Document;\n\nuse victor::primitives::{point, rect, Length, Size, TextRun, RGBA};\n\nuse victor::text::ShapedSegment;\n\n\n\nstatic AHEM: LazyStaticFont = include_font!(\"fonts/ahem/ahem.ttf\");\n\nstatic NOTO: LazyStaticFont = include_font!(\"fonts/noto/NotoSansLinearB-Regular.ttf\");\n\n\n", "file_path": "tests/pdf.rs", "rank": 58, "score": 36945.028084621394 }, { "content": "use std::env;\n\nuse std::fs::File;\n\nuse std::io::Write;\n\nuse victor::fonts::BITSTREAM_VERA_SANS;\n\nuse victor::primitives::*;\n\nuse victor::text_plain;\n\n\n\nstatic ALICE: &'static str = include_str!(\"alice.txt\");\n\n\n\n#[test]\n", "file_path": "tests/alice.rs", "rank": 59, "score": 36944.98491011877 }, { "content": " assert_eq!(pages[1].size_in_css_px(), (4., 4.));\n\n\n\n // FIXME: find a way to round-trip code points without a glyph like '→'\n\n assert_eq!(\n\n pages[0].text().to_str().unwrap(),\n\n \"Têst iimm\\npÉX 𐁉 𐁁𐀓𐀠𐀴𐀍\"\n\n );\n\n assert_eq!(pages[1].text().to_str().unwrap(), \"\");\n\n\n\n if env::var(\"VICTOR_WRITE_TO_TMP\").is_ok() {\n\n pages[0]\n\n .render_with_dppx(3.)\n\n .unwrap()\n\n .write_to_png_file(\"/tmp/victor.png\")\n\n .unwrap()\n\n }\n\n let mut surface = pages[1].render().unwrap();\n\n const RED_: u32 = 0x8080_0000;\n\n const BLUE: u32 = 0xFF00_00FF;\n\n const BOTH: u32 = 0xFF80_007F;\n", "file_path": "tests/pdf.rs", "rank": 60, "score": 36943.53530866286 }, { "content": " const ____: u32 = 0x0000_0000;\n\n #[rustfmt::skip]\n\n assert_pixels_eq!(\n\n surface.pixels().buffer,\n\n &[\n\n RED_, ____, ____, ____,\n\n BOTH, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE,\n\n ]\n\n );\n\n\n\n let mut surface = pages[1]\n\n .render_with_options(RenderOptions {\n\n dppx_x: 2.0,\n\n dppx_y: 3.0,\n\n backdrop: Backdrop::White,\n\n ..RenderOptions::default()\n\n })\n\n .unwrap();\n", "file_path": "tests/pdf.rs", "rank": 61, "score": 36939.76923643046 }, { "content": " let pixels = surface.pixels();\n\n assert_eq!((pixels.width, pixels.height), (8, 12));\n\n {\n\n const RED_: u32 = 0xFFFF_7F7F;\n\n const ____: u32 = 0xFFFF_FFFF;\n\n #[rustfmt::skip]\n\n assert_pixels_eq!(\n\n pixels.buffer,\n\n &[\n\n RED_, RED_, ____, ____, ____, ____, ____, ____,\n\n RED_, RED_, ____, ____, ____, ____, ____, ____,\n\n RED_, RED_, ____, ____, ____, ____, ____, ____,\n\n BOTH, BOTH, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE,\n\n BOTH, BOTH, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE,\n\n BOTH, BOTH, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE, BLUE,\n\n ][..]\n\n );\n\n }\n\n\n\n assert!(pdf_bytes == include_bytes!(\"expected.pdf\").as_ref());\n\n}\n", "file_path": "tests/pdf.rs", "rank": 62, "score": 36937.01446418605 }, { "content": "use crate::fonts::{Em, Font, FontError, GlyphId};\n\nuse crate::primitives::Length;\n\nuse std::sync::Arc;\n\n\n\npub struct ShapedSegment {\n\n pub(crate) font: Arc<Font>,\n\n pub(crate) glyphs: Vec<GlyphId>,\n\n pub(crate) advance_width: Length<Em>,\n\n}\n\n\n\nimpl ShapedSegment {\n\n /// Simplistic text shaping:\n\n ///\n\n /// * No font fallback\n\n /// * No support for complex scripts\n\n /// * No ligatures\n\n /// * No kerning\n\n pub fn naive_shape(text: &str, font: Arc<Font>) -> Result<Self, FontError> {\n\n let mut glyphs = Vec::new();\n\n let mut advance_width = Length::new(0.);\n", "file_path": "victor/src/text.rs", "rank": 63, "score": 35899.73659909846 }, { "content": " for ch in text.chars() {\n\n let id = font.glyph_id(ch)?;\n\n advance_width += font.glyph_width(id)?;\n\n glyphs.push(id);\n\n }\n\n Ok(ShapedSegment {\n\n font,\n\n glyphs,\n\n advance_width,\n\n })\n\n }\n\n}\n", "file_path": "victor/src/text.rs", "rank": 64, "score": 35883.55760267913 }, { "content": "#[macro_use]\n\nextern crate lester;\n\n\n\nuse lester::{PdfDocument, RenderOptions};\n\nuse std::error::Error;\n\n\n\n#[test]\n", "file_path": "lester/tests/pdf.rs", "rank": 65, "score": 35528.17484879081 }, { "content": "use lester::ImageSurface;\n\nuse std::error::Error;\n\nuse std::io;\n\n\n\n#[test]\n", "file_path": "lester/tests/png.rs", "rank": 66, "score": 35524.89666596419 }, { "content": " }\n\n\n\n assert_expected_pixels(surface.pixels());\n\n\n\n let mut bytes = Vec::new();\n\n surface.write_to_png(&mut bytes).unwrap();\n\n\n\n let mut surface2 = ImageSurface::read_from_png(&*bytes).unwrap();\n\n assert_expected_pixels(surface2.pixels());\n\n}\n\n\n", "file_path": "lester/tests/png.rs", "rank": 67, "score": 35523.649351030945 }, { "content": " surface.pixels().buffer,\n\n &[\n\n RED, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE,\n\n BLUE, BLUE, BLUE, BLUE,\n\n ]\n\n );\n\n\n\n let mut surface = page\n\n .render_with_options(RenderOptions {\n\n dppx_x: 2.0,\n\n dppx_y: 3.0,\n\n ..RenderOptions::default()\n\n })\n\n .unwrap();\n\n let pixels = surface.pixels();\n\n assert_eq!((pixels.width, pixels.height), (8, 12));\n\n}\n", "file_path": "lester/tests/pdf.rs", "rank": 68, "score": 35521.878182510605 }, { "content": "use crate::fonts::{Em, FontDesignUnit};\n\nuse std::fmt::{self, Write};\n\n\n\npub(in crate::fonts) type FontDesignUnitsPerEmFactorU16 =\n\n euclid::TypedScale<u16, Em, FontDesignUnit>;\n\n\n\npub(in crate::fonts) type FWord = euclid::Length<i16, FontDesignUnit>;\n\npub(in crate::fonts) type UFWord = euclid::Length<u16, FontDesignUnit>;\n\n\n\n/// 32-bit signed fixed-point number (16.16)\n\n#[derive(Debug, Copy, Clone)]\n\npub(in crate::fonts) struct FixedPoint(pub u32);\n\n\n\n/// Instant in time as seconds since 1904-01-01 midnight UTC\n\n#[derive(Debug, Copy, Clone)]\n\npub(in crate::fonts) struct LongDateTime(pub i64);\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ReadFromBytes)]\n\npub(in crate::fonts) struct Tag(pub [u8; 4]);\n\n\n", "file_path": "victor/src/fonts/types.rs", "rank": 69, "score": 34716.13480226576 }, { "content": "// ~~~~ Trait impls ~~~~\n\n\n\nimpl fmt::Debug for Tag {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n for &b in &self.0 {\n\n // ASCII printable or space\n\n f.write_char(if b' ' <= b && b <= b'~' { b } else { b'?' } as char)?\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl From<LongDateTime> for std::time::SystemTime {\n\n fn from(instant: LongDateTime) -> Self {\n\n use std::time::{Duration, UNIX_EPOCH};\n\n\n\n // `date --utc -d 1904-01-01 +%s`\n\n let truetype_epoch = UNIX_EPOCH - Duration::from_secs(2_082_844_800);\n\n\n\n let seconds_since_truetype_epoch = instant.0;\n\n if seconds_since_truetype_epoch >= 0 {\n\n truetype_epoch + Duration::from_secs(seconds_since_truetype_epoch as u64)\n\n } else {\n\n truetype_epoch - Duration::from_secs((-seconds_since_truetype_epoch) as u64)\n\n }\n\n }\n\n}\n", "file_path": "victor/src/fonts/types.rs", "rank": 70, "score": 34710.63849178151 }, { "content": " .attrs\n\n .iter()\n\n .map(|e| e.name.clone())\n\n .collect::<HashSet<_>>();\n\n element.attrs.extend(\n\n attrs\n\n .into_iter()\n\n .filter(|attr| !existing_names.contains(&attr.name)),\n\n );\n\n }\n\n\n\n fn remove_from_parent(&mut self, &target: &NodeId) {\n\n self.document.detach(target)\n\n }\n\n\n\n fn reparent_children(&mut self, &node: &NodeId, &new_parent: &NodeId) {\n\n let mut next_child = self.document[node].first_child;\n\n while let Some(child) = next_child {\n\n debug_assert_eq!(self.document[child].parent, Some(node));\n\n self.document.append(new_parent, child);\n\n next_child = self.document[child].next_sibling\n\n }\n\n }\n\n}\n", "file_path": "victor/src/dom/html.rs", "rank": 71, "score": 34611.1886103992 }, { "content": " self.new_node(NodeData::Comment { _contents: text })\n\n }\n\n\n\n fn create_pi(&mut self, target: StrTendril, data: StrTendril) -> NodeId {\n\n self.new_node(NodeData::ProcessingInstruction {\n\n _target: target,\n\n _contents: data,\n\n })\n\n }\n\n\n\n fn append(&mut self, &parent: &NodeId, child: NodeOrText<NodeId>) {\n\n self.append_common(\n\n child,\n\n |document| document[parent].last_child,\n\n |document, new_node| document.append(parent, new_node),\n\n )\n\n }\n\n\n\n fn append_before_sibling(&mut self, &sibling: &NodeId, child: NodeOrText<NodeId>) {\n\n self.append_common(\n", "file_path": "victor/src/dom/html.rs", "rank": 72, "score": 34609.31380113676 }, { "content": " child,\n\n |document| document[sibling].previous_sibling,\n\n |document, new_node| document.insert_before(sibling, new_node),\n\n )\n\n }\n\n\n\n fn append_based_on_parent_node(\n\n &mut self,\n\n element: &NodeId,\n\n prev_element: &NodeId,\n\n child: NodeOrText<NodeId>,\n\n ) {\n\n if self.document[*element].parent.is_some() {\n\n self.append_before_sibling(element, child)\n\n } else {\n\n self.append(prev_element, child)\n\n }\n\n }\n\n\n\n fn append_doctype_to_document(\n", "file_path": "victor/src/dom/html.rs", "rank": 73, "score": 34608.2003860319 }, { "content": " data: NodeData::Text { contents },\n\n ..\n\n } = &mut self.document[id]\n\n {\n\n contents.push_tendril(&text);\n\n return\n\n }\n\n }\n\n self.new_node(NodeData::Text { contents: text })\n\n }\n\n NodeOrText::AppendNode(node) => node,\n\n };\n\n\n\n append(&mut self.document, new_node)\n\n }\n\n}\n\n\n\nimpl TreeSink for Sink {\n\n type Handle = NodeId;\n\n type Output = Document;\n", "file_path": "victor/src/dom/html.rs", "rank": 74, "score": 34605.42664376047 }, { "content": " &mut self,\n\n name: StrTendril,\n\n public_id: StrTendril,\n\n system_id: StrTendril,\n\n ) {\n\n let node = self.new_node(NodeData::Doctype {\n\n _name: name,\n\n _public_id: public_id,\n\n _system_id: system_id,\n\n });\n\n self.document.append(Document::document_node_id(), node)\n\n }\n\n\n\n fn add_attrs_if_missing(&mut self, &target: &NodeId, attrs: Vec<Attribute>) {\n\n let element = if let NodeData::Element(element) = &mut self.document[target].data {\n\n element\n\n } else {\n\n panic!(\"not an element\")\n\n };\n\n let existing_names = element\n", "file_path": "victor/src/dom/html.rs", "rank": 75, "score": 34604.30830353766 }, { "content": " name: QualName,\n\n attrs: Vec<Attribute>,\n\n ElementFlags {\n\n mathml_annotation_xml_integration_point,\n\n ..\n\n }: ElementFlags,\n\n ) -> NodeId {\n\n let is_style = name.expanded() == expanded_name!(html \"style\");\n\n let element = self.new_node(NodeData::Element(ElementData {\n\n name,\n\n attrs,\n\n mathml_annotation_xml_integration_point,\n\n }));\n\n if is_style {\n\n self.document.style_elements.push(element)\n\n }\n\n element\n\n }\n\n\n\n fn create_comment(&mut self, text: StrTendril) -> NodeId {\n", "file_path": "victor/src/dom/html.rs", "rank": 76, "score": 34604.165787236794 }, { "content": "use super::*;\n\nuse html5ever::interface::tree_builder::{ElementFlags, NodeOrText, QuirksMode, TreeSink};\n\nuse html5ever::tendril::TendrilSink;\n\nuse html5ever::{parse_document, ExpandedName};\n\nuse std::borrow::Cow;\n\nuse std::collections::HashSet;\n\n\n\nimpl Document {\n\n pub fn parse_html(utf8_bytes: &[u8]) -> Self {\n\n let sink = Sink {\n\n document: Document::new(),\n\n quirks_mode: QuirksMode::NoQuirks,\n\n };\n\n parse_document(sink, Default::default())\n\n .from_utf8()\n\n .one(utf8_bytes)\n\n }\n\n}\n\n\n", "file_path": "victor/src/dom/html.rs", "rank": 77, "score": 34597.48749979849 }, { "content": " self.document[target]\n\n .as_element()\n\n .expect(\"not an element\")\n\n .name\n\n .expanded()\n\n }\n\n\n\n fn get_template_contents(&mut self, &target: &NodeId) -> NodeId {\n\n target\n\n }\n\n\n\n fn is_mathml_annotation_xml_integration_point(&self, &target: &NodeId) -> bool {\n\n self.document[target]\n\n .as_element()\n\n .expect(\"not an element\")\n\n .mathml_annotation_xml_integration_point\n\n }\n\n\n\n fn create_element(\n\n &mut self,\n", "file_path": "victor/src/dom/html.rs", "rank": 78, "score": 34593.05886652234 }, { "content": "\n\n fn finish(self) -> Document {\n\n self.document\n\n }\n\n\n\n fn parse_error(&mut self, _: Cow<'static, str>) {}\n\n\n\n fn get_document(&mut self) -> NodeId {\n\n Document::document_node_id()\n\n }\n\n\n\n fn set_quirks_mode(&mut self, mode: QuirksMode) {\n\n self.quirks_mode = mode;\n\n }\n\n\n\n fn same_node(&self, x: &NodeId, y: &NodeId) -> bool {\n\n x == y\n\n }\n\n\n\n fn elem_name<'a>(&'a self, &target: &'a NodeId) -> ExpandedName<'a> {\n", "file_path": "victor/src/dom/html.rs", "rank": 79, "score": 34591.91434510038 }, { "content": "use self::css_units::*;\n\nuse crate::fonts::{Em, Font, FontError};\n\nuse crate::pdf::Document;\n\nuse crate::primitives::{Length, Point, Rect, SideOffsets, Size, TextRun};\n\nuse crate::text::ShapedSegment;\n\nuse std::sync::Arc;\n\nuse xi_unicode::LineBreakIterator;\n\n\n\npub mod css_units {\n\n use crate::primitives::Scale;\n\n\n\n pub use crate::primitives::CssPx as Px;\n\n pub struct Mm;\n\n pub struct In;\n\n\n\n impl Mm {\n\n pub fn per_in() -> Scale<In, Self> {\n\n Scale::new(25.4)\n\n }\n\n }\n", "file_path": "victor/src/text_plain.rs", "rank": 80, "score": 34572.65958208732 }, { "content": " let mut pdf_doc = Document::new();\n\n let mut line_segments = Vec::new();\n\n\n\n let mut previous_break_position = 0;\n\n let mut segments = Rewind::new(LineBreakIterator::new(text).map(\n\n |(position, is_hard_break)| {\n\n let range = previous_break_position..position;\n\n previous_break_position = position;\n\n let text_segment = text[range].trim_right_matches('\\n');\n\n let segment = ShapedSegment::naive_shape(text_segment, style.font.clone())?;\n\n Ok((segment, is_hard_break))\n\n },\n\n ));\n\n\n\n 'pages: loop {\n\n let mut pdf_page = pdf_doc.add_page(page_size);\n\n let mut y = min_y;\n\n\n\n 'lines: loop {\n\n let mut total_width = Length::new(0.);\n", "file_path": "victor/src/text_plain.rs", "rank": 81, "score": 34568.043913907844 }, { "content": " impl Px {\n\n pub fn per_in() -> Scale<In, Self> {\n\n Scale::new(96.)\n\n }\n\n pub fn per_mm() -> Scale<Mm, Self> {\n\n Mm::per_in().inv() * Self::per_in()\n\n }\n\n }\n\n}\n\n\n\npub struct Style {\n\n pub page_size: Size<Mm>,\n\n pub page_margin: Length<Mm>,\n\n pub font: Arc<Font>,\n\n pub font_size: Length<Px>,\n\n pub line_height: f32,\n\n pub justify: bool,\n\n}\n\n\n", "file_path": "victor/src/text_plain.rs", "rank": 82, "score": 34565.85845167694 }, { "content": " assert!(self.buffer.is_none());\n\n self.buffer = Some(item)\n\n }\n\n}\n\n\n\nimpl<I> Iterator for Rewind<I>\n\nwhere\n\n I: Iterator,\n\n{\n\n type Item = I::Item;\n\n\n\n fn next(&mut self) -> Option<I::Item> {\n\n self.buffer.take().or_else(|| self.inner.next())\n\n }\n\n}\n", "file_path": "victor/src/text_plain.rs", "rank": 83, "score": 34559.30373608778 }, { "content": " if is_hard_break {\n\n justify = false;\n\n break\n\n }\n\n }\n\n\n\n let extra = available_width - total_width;\n\n let word_spacing = if justify && extra > Length::new(0.) {\n\n extra / (line_segments.len() - 1) as f32\n\n } else {\n\n Length::new(0.)\n\n };\n\n let baseline = y + baseline_y;\n\n let mut x = min_x;\n\n for segment in line_segments.drain(..) {\n\n let origin = Point::from_lengths(x, baseline);\n\n x += segment.advance_width * px_per_em + word_spacing;\n\n pdf_page.show_text(&TextRun {\n\n segment,\n\n font_size,\n", "file_path": "victor/src/text_plain.rs", "rank": 84, "score": 34558.723304133724 }, { "content": " let justify;\n\n loop {\n\n let (segment, is_hard_break) = match segments.next() {\n\n Some(result) => result?,\n\n // End of document\n\n // FIXME: use 'return' when lifetimes are non-lexical\n\n None => break 'pages,\n\n };\n\n\n\n let advance_width = segment.advance_width * px_per_em;\n\n let next_total_width = total_width + advance_width;\n\n if next_total_width > available_width && total_width > Length::new(0.) {\n\n // This segment doesn’t fit on this line, and isn’t the first on the line:\n\n // go to the next line.\n\n segments.rewind(Ok((segment, is_hard_break)));\n\n justify = style.justify;\n\n break\n\n }\n\n line_segments.push(segment);\n\n total_width = next_total_width;\n", "file_path": "victor/src/text_plain.rs", "rank": 85, "score": 34558.14296181109 }, { "content": " origin,\n\n })?;\n\n }\n\n\n\n y += line_height;\n\n if y > max_y {\n\n // We’ve reached the bottom of the page\n\n break\n\n }\n\n }\n\n }\n\n Ok(pdf_doc)\n\n}\n\n\n", "file_path": "victor/src/text_plain.rs", "rank": 86, "score": 34549.53134125415 }, { "content": "impl Document {\n\n pub fn new() -> Self {\n\n Document {\n\n in_progress: InProgressDoc::new(),\n\n }\n\n }\n\n\n\n pub fn add_page(&mut self, size: Size<CssPx>) -> Page {\n\n Page {\n\n in_progress: InProgressPage::new(&mut self.in_progress, size),\n\n }\n\n }\n\n\n\n /// Encode this document to PDF and write it into the file with the given name.\n\n pub fn write_to_pdf_file<P: AsRef<path::Path>>(&self, filename: P) -> Result<(), io::Error> {\n\n self.write_to_pdf(&mut io::BufWriter::new(fs::File::create(filename)?))\n\n }\n\n\n\n /// Encode this document to PDF and return a vector of bytes\n\n pub fn write_to_pdf_bytes(&self) -> Vec<u8> {\n", "file_path": "victor/src/pdf/mod.rs", "rank": 99, "score": 33815.53449628933 } ]
Rust
panbuild/projects.rs
louib/panbuild
2cb07ebd6c21d7f4f0fb588cb8ed4570f20d1344
use serde::{Deserialize, Serialize}; use std::path::Path; use std::process::Command; pub const CORE_PROJECTS: [&'static str; 20] = [ "https://git.savannah.gnu.org/cgit/bash.git", "https://git.savannah.gnu.org/cgit/make.git", "https://git.savannah.gnu.org/cgit/diffutils.git", "https://git.savannah.gnu.org/cgit/findutils.git", "https://git.savannah.gnu.org/cgit/gzip.git", "https://git.savannah.gnu.org/git/grep.git", "https://git.savannah.gnu.org/cgit/tar.git", "https://git.savannah.gnu.org/git/libtool.git", "https://git.lysator.liu.se/lsh/lsh.git", "https://git.savannah.gnu.org/cgit/gawk.git", "https://github.com/gwsw/less.git", "https://github.com/openbsd/src.git", "https://gcc.gnu.org/git/gcc.git", "https://git.sv.gnu.org/cgit/coreutils.git", "https://sourceware.org/git/binutils-gdb.git", "https://sourceware.org/git/glibc.git", "https://gitlab.gnome.org/GNOME/gtk.git", "https://gitlab.gnome.org/GNOME/glib.git", "https://dev.gnupg.org/source/gnupg.git", "https://gitlab.com/gnutls/gnutls.git", ]; #[derive(Serialize, Deserialize, Default)] pub struct SoftwareProject { pub id: String, pub name: String, pub summary: String, pub description: String, pub web_urls: Vec<String>, pub vcs_urls: Vec<String>, pub artifact_names: Vec<String>, pub build_systems: Vec<String>, pub maintainers: Vec<String>, pub default_branch: Option<String>, pub versions: Vec<String>, pub keywords: Vec<String>, pub root_hashes: Vec<String>, } impl SoftwareProject { pub fn harvest(repo_url: &str) -> SoftwareProject { let mut project = SoftwareProject::default(); let repo_path = crate::utils::clone_git_repo(repo_url).unwrap(); project.id = crate::utils::repo_url_to_reverse_dns(repo_url); for file_path in crate::utils::get_all_paths(Path::new(&repo_path)).unwrap() { let mut abstract_manifest = match crate::manifests::manifest::AbstractManifest::load_from_file(file_path.to_str().unwrap().to_string()) { Some(m) => m, None => continue, }; project.build_systems.push(abstract_manifest.get_type().unwrap().to_string()); } match crate::utils::get_git_repo_root_hashes(&repo_path) { Ok(root_hashes) => project.root_hashes = root_hashes, Err(e) => { log::warn!("Could not get root commit hashes for repo located at {}: {}.", &repo_path, e); } } project } pub fn merge(&mut self, other_project: &SoftwareProject) { for build_system in &other_project.build_systems { self.build_systems.push(build_system.clone()); } } } #[derive(Serialize, Deserialize, Default)] pub struct ProjectVersion { pub project_id: String, pub name: String, pub url: String, pub url_type: crate::modules::SourceType, pub tag: String, pub branch: String, pub sha256sum: String, pub dependencies: Vec<Dependency>, } #[derive(Serialize, Deserialize, Default)] pub struct Dependency { pub min_version: crate::version::SemanticVersion, pub max_version: crate::version::SemanticVersion, pub project_id: String, } pub fn get_modules() -> Vec<crate::modules::SoftwareModule> { let mut modules = vec![]; for project in crate::db::Database::get_all_projects() { for project_version in &project.versions { for artifact_name in &project.artifact_names { let mut module = crate::modules::SoftwareModule::default(); module.name = artifact_name.to_string(); module.version = project_version.to_string(); module.tag = project_version.to_string(); if project.vcs_urls.len() != 0 { module.url = project.vcs_urls[0].to_string(); } modules.push(module); } } } modules } pub fn get_project_tag_names() -> Vec<String> { return vec![]; } pub fn get_project_commit_signature() -> String { return String::from(""); }
use serde::{Deserialize, Serialize}; use std::path::Path; use std::process::Command; pub const CORE_PROJECTS: [&'static str; 20] = [ "https://git.savannah.gnu.org/cgit/bash.git", "https://git.savannah.gnu.org/cgit/make.git", "https://git.savannah.gnu.org/cgit/diffutils.git", "https://git.savannah.gnu.org/cgit/findutils.git", "https://git.savannah.gnu.org/cgit/gzip.git", "https://git.savannah.gnu.org/git/grep.git", "https://git.savannah.gnu.org/cgit/tar.git", "https://git.savannah.gnu.org/git/libtool.git", "https://git.lysator.liu.se/lsh/lsh.git", "https://git.savannah.gnu.org/cgit/gawk.git", "https://github.com/gwsw/less.git", "https://github.com/openbsd/src.git", "https://gcc.gnu.org/git/gcc.git", "https://git.sv.gnu.org/cgit/coreutils.git", "https://sourceware.org/git/binutils-gdb.git", "https://sourceware.org/git/glibc.git", "https://gitlab.gnome.org/GNOME/gtk.git", "https://gitlab.gnome.org/GNOME/glib.git", "https://dev.gnupg.org/source/gnupg.git", "https://gitlab.com/gnutls/gnutls.git", ]; #[derive(Serialize, Deserialize, Default)] pub struct SoftwareProject { pub id: String, pub name: String, pub summary: String, pub description: String, pub web_urls: Vec<String>, pub vcs_urls: Vec<String>, pub artifact_names: Vec<String>, pub build_systems: Vec<String>, pub maintainers: Vec<String>, pub default_branch: Option<String>, pub versions: Vec<String>, pub keywords: Vec<String>, pub root_hashes: Vec<String>, } impl SoftwareProject { pub fn harvest(repo_url: &str) -> SoftwareProject { let mut project = SoftwareProject::default(); let repo_path = crate::utils::clone_git_repo(repo_url).unwrap(); project.id = crate::utils::repo_url_to_reverse_dns(repo_url); for file_path in crate::utils::get_all_paths(Path::new(&repo_path)).unwrap() { let mut abstract_manifest = matc
pub fn merge(&mut self, other_project: &SoftwareProject) { for build_system in &other_project.build_systems { self.build_systems.push(build_system.clone()); } } } #[derive(Serialize, Deserialize, Default)] pub struct ProjectVersion { pub project_id: String, pub name: String, pub url: String, pub url_type: crate::modules::SourceType, pub tag: String, pub branch: String, pub sha256sum: String, pub dependencies: Vec<Dependency>, } #[derive(Serialize, Deserialize, Default)] pub struct Dependency { pub min_version: crate::version::SemanticVersion, pub max_version: crate::version::SemanticVersion, pub project_id: String, } pub fn get_modules() -> Vec<crate::modules::SoftwareModule> { let mut modules = vec![]; for project in crate::db::Database::get_all_projects() { for project_version in &project.versions { for artifact_name in &project.artifact_names { let mut module = crate::modules::SoftwareModule::default(); module.name = artifact_name.to_string(); module.version = project_version.to_string(); module.tag = project_version.to_string(); if project.vcs_urls.len() != 0 { module.url = project.vcs_urls[0].to_string(); } modules.push(module); } } } modules } pub fn get_project_tag_names() -> Vec<String> { return vec![]; } pub fn get_project_commit_signature() -> String { return String::from(""); }
h crate::manifests::manifest::AbstractManifest::load_from_file(file_path.to_str().unwrap().to_string()) { Some(m) => m, None => continue, }; project.build_systems.push(abstract_manifest.get_type().unwrap().to_string()); } match crate::utils::get_git_repo_root_hashes(&repo_path) { Ok(root_hashes) => project.root_hashes = root_hashes, Err(e) => { log::warn!("Could not get root commit hashes for repo located at {}: {}.", &repo_path, e); } } project }
function_block-function_prefixed
[ { "content": "pub fn normalize_name(name: &String) -> String {\n\n let mut response: String = \"\".to_string();\n\n for c in name.chars() {\n\n if c.is_alphabetic() || c.is_numeric() {\n\n response.push_str(&c.to_string());\n\n continue;\n\n }\n\n // We don't want to add multiple hyphens or dots in a row, and we want\n\n // to start the name with an alphanum character.\n\n if response.ends_with(\"-\") || response.ends_with(\".\") || response.is_empty() {\n\n continue;\n\n }\n\n response.push_str(&c.to_string());\n\n }\n\n response\n\n}\n\n\n\n// TODO migrate to pb-tools\n\npub struct PagedResponse {\n\n pub next_page_url: Option<String>,\n", "file_path": "panbuild/utils.rs", "rank": 0, "score": 207050.76663956896 }, { "content": "pub fn run(command_name: &str, args: HashMap<String, String>) -> i32 {\n\n logger::init();\n\n\n\n log::debug!(\"running command {}.\", command_name);\n\n\n\n let mut config = match crate::config::read_or_init_config() {\n\n Ok(c) => c,\n\n Err(e) => panic!(\"Could not load or init config: {}\", e),\n\n };\n\n\n\n if command_name == \"lint\" {\n\n let manifest_file_path = args.get(\"manifest_file_path\").expect(\"an input file is required!\");\n\n\n\n let mut abstract_manifest = match crate::manifests::manifest::AbstractManifest::load_from_file(manifest_file_path.to_string()) {\n\n Some(m) => m,\n\n None => return 1,\n\n };\n\n\n\n let manifest_dump = match abstract_manifest.dump() {\n\n Ok(d) => d,\n", "file_path": "panbuild/lib.rs", "rank": 2, "score": 192560.26870797065 }, { "content": "pub fn get_git_repo_root_hashes(repo_path: &str) -> Result<Vec<String>, String> {\n\n // FIXME there can actually be more than 1 parentless commit\n\n // in a git repo, in the case of a merger. A parentless commit\n\n // can also be found in multiple projects in the case of a fork.\n\n println!(\"Getting initial commit for repo at {}\", repo_path);\n\n\n\n let mut output = Command::new(\"git\")\n\n .arg(format!(\"--git-dir={}/.git\", repo_path).to_owned())\n\n .arg(\"rev-list\")\n\n .arg(\"--max-parents=0\".to_owned())\n\n .arg(\"HEAD\")\n\n .stdout(Stdio::piped())\n\n .spawn()\n\n .unwrap();\n\n\n\n let mut output = match output.wait_with_output() {\n\n Ok(o) => o,\n\n Err(e) => return Err(e.to_string()),\n\n };\n\n if !output.status.success() {\n\n return Err(\"Could not get root hashes.\".to_string());\n\n }\n\n let all_hashes = match std::str::from_utf8(&output.stdout) {\n\n Ok(v) => v,\n\n Err(e) => panic!(\"Invalid UTF-8 sequence: {}\", e),\n\n };\n\n\n\n Ok(all_hashes.split('\\n').map(|s| s.trim().to_string()).filter(|s| s.len() != 0).collect())\n\n}\n\n\n", "file_path": "panbuild/utils.rs", "rank": 3, "score": 185212.1732356059 }, { "content": "pub fn get_org_repos(org_name: &str) -> Vec<panbuild::projects::SoftwareProject> {\n\n let mut paged_response = get_repos(panbuild::utils::PagedRequest {\n\n domain: \"\".to_string(),\n\n token: None,\n\n next_page_url: Some(format!(\"https://api.github.com/orgs/{}/repos?type=all&per_page=100\", org_name)),\n\n });\n\n let mut all_projects = vec![];\n\n let mut projects = paged_response.results;\n\n while projects.len() > 0 {\n\n for project in projects {\n\n log::info!(\"Adding project {}.\", &project.name);\n\n all_projects.push(project);\n\n }\n\n\n\n if paged_response.next_page_url.is_none() {\n\n break;\n\n }\n\n\n\n paged_response = get_repos(panbuild::utils::PagedRequest {\n\n domain: \"\".to_string(),\n\n token: None,\n\n next_page_url: paged_response.next_page_url,\n\n });\n\n projects = paged_response.results;\n\n }\n\n all_projects\n\n}\n\n\n", "file_path": "pb-tools/hubs/github.rs", "rank": 4, "score": 181772.15628182905 }, { "content": "pub fn clone_git_repo(repo_url: &str) -> Result<String, String> {\n\n let project_id = repo_url_to_reverse_dns(repo_url);\n\n let repos_dir = get_repos_dir_path();\n\n let repo_dir = format!(\"{}/{}\", repos_dir, project_id);\n\n if Path::new(&repo_dir).is_dir() {\n\n return Ok(repo_dir);\n\n }\n\n if let Err(e) = fs::create_dir(&repo_dir) {\n\n return Err(e.to_string());\n\n }\n\n\n\n println!(\"Cloning repo {}\", repo_url);\n\n let mut output = Command::new(\"git\")\n\n .arg(\"clone\")\n\n .arg(repo_url)\n\n .arg(&repo_dir)\n\n .stdout(Stdio::piped())\n\n .spawn()\n\n .unwrap();\n\n\n", "file_path": "panbuild/utils.rs", "rank": 5, "score": 177013.8939444087 }, { "content": "///```\n\n///let mut reverse_dns = panbuild::utils::repo_url_to_reverse_dns(\"https://github.com/louib/panbuild.git\");\n\n///assert_eq!(reverse_dns, \"com.github.louib.panbuild\");\n\n///reverse_dns = panbuild::utils::repo_url_to_reverse_dns(\"https://gitlab.com/louib/panbuild.git\");\n\n///assert_eq!(reverse_dns, \"com.gitlab.louib.panbuild\");\n\n///reverse_dns = panbuild::utils::repo_url_to_reverse_dns(\"https://git.savannah.gnu.org/cgit/make.git\");\n\n///assert_eq!(reverse_dns, \"org.gnu.savannah.git.cgit.make\");\n\n///```\n\npub fn repo_url_to_reverse_dns(repo_url: &str) -> String {\n\n if !repo_url.starts_with(\"https://\") {\n\n panic!(\"Only supports https urls: {}\", repo_url);\n\n }\n\n if !repo_url.ends_with(\".git\") {\n\n panic!(\"Only supports git repositories: {}\", repo_url);\n\n }\n\n let mut sanitized_url = repo_url[8..].to_string();\n\n // Removing the .git at the end of the url.\n\n // There has to be a better way to do this...\n\n // But rust has no negative index for the list\n\n // comprehension.\n\n sanitized_url.pop();\n\n sanitized_url.pop();\n\n sanitized_url.pop();\n\n sanitized_url.pop();\n\n\n\n let mut repo_url_parts = sanitized_url.split(\"/\");\n\n let domain = repo_url_parts.next().unwrap();\n\n let mut reversed_domain: String = \"\".to_string();\n", "file_path": "panbuild/utils.rs", "rank": 7, "score": 171212.5736744554 }, { "content": "pub fn get_and_add_repos(domain: &str, token_env_var_name: &str, db: &mut panbuild::db::Database) {\n\n log::info!(\"Getting all projects from GitLab instance at {}.\", domain);\n\n let mut request = panbuild::utils::PagedRequest {\n\n domain: domain.to_string(),\n\n token: None,\n\n next_page_url: None,\n\n };\n\n if let Ok(token) = env::var(token_env_var_name) {\n\n // See https://docs.gitlab.com/ee/api/#oauth2-tokens\n\n // for documentation on OAuth authentication.\n\n request.token = Some(token);\n\n } else {\n\n log::warn!(\"No GitLab API token located at {} for instance at {}. Aborting.\", token_env_var_name, domain);\n\n return;\n\n }\n\n let mut paged_response = get_repos(request);\n\n\n\n let mut projects = paged_response.results;\n\n while projects.len() > 0 {\n\n for project in projects {\n", "file_path": "pb-tools/hubs/gitlab.rs", "rank": 8, "score": 170220.9596366093 }, { "content": "pub fn get_projects(formulae_url: &str) -> Vec<panbuild::projects::SoftwareProject> {\n\n let mut projects: Vec<panbuild::projects::SoftwareProject> = vec![];\n\n\n\n let client = reqwest::blocking::Client::builder().build().unwrap();\n\n\n\n // TODO make this really asynchronous with async/await.\n\n let mut response = match client.get(formulae_url).send() {\n\n Ok(r) => r,\n\n Err(e) => return vec![],\n\n };\n\n\n\n let brew_recipes: Vec<HomebrewRecipe> = match serde_json::from_str(&response.text().unwrap()) {\n\n Ok(r) => r,\n\n Err(e) => {\n\n eprintln!(\"Could not parse brew recipes {}.\", e);\n\n return vec![];\n\n }\n\n };\n\n\n\n for brew_recipe in brew_recipes {\n", "file_path": "pb-tools/hubs/brew.rs", "rank": 9, "score": 167682.81107145734 }, { "content": "pub fn run_command(abstract_manifest: &crate::manifests::manifest::AbstractManifest, command: &str) -> Result<String, String> {\n\n let flatpak_build_dir = path::Path::new(DEFAULT_FLATPAK_OUTPUT_DIR);\n\n if !flatpak_build_dir.is_dir() {\n\n return Err(\"Looks like this workspace was not built. Run `panbuild make` first.\".to_string());\n\n }\n\n\n\n let child = Command::new(\"flatpak-builder\")\n\n .arg(\"--run\")\n\n .arg(DEFAULT_FLATPAK_OUTPUT_DIR)\n\n .arg(&abstract_manifest.path)\n\n .arg(command)\n\n .stdout(Stdio::piped())\n\n .spawn()\n\n .unwrap();\n\n\n\n let output = match child.wait_with_output() {\n\n Ok(o) => o,\n\n Err(e) => return Err(e.to_string()),\n\n };\n\n if !output.status.success() {\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 10, "score": 166252.85373135709 }, { "content": "// TODO migrate to pb-tools\n\n/// See https://www.w3.org/wiki/LinkHeader\n\n///```\n\n///let link_header = r###\"\n\n///<https://gitlab.gnome.org/api/v4/projects?page=4&per_page=100>; rel=\"prev\",\n\n///<https://gitlab.gnome.org/api/v4/projects?page=6&per_page=100>; rel=\"next\",\n\n///<https://gitlab.gnome.org/api/v4/projects?page=1&per_page=100>; rel=\"first\",\n\n///<https://gitlab.gnome.org/api/v4/projects?page=118&per_page=100>; rel=\"last\"\n\n///\"###;\n\n///assert_eq!(\n\n/// panbuild::utils::get_next_page_url(link_header),\n\n/// Some(\"https://gitlab.gnome.org/api/v4/projects?page=6&per_page=100\".to_string()),\n\n///);\n\n///assert_eq!(\n\n/// panbuild::utils::get_next_page_url(\"\"),\n\n/// None,\n\n///);\n\n///\n\n///```\n\npub fn get_next_page_url(link_header: &str) -> Option<String> {\n\n log::debug!(\"Getting next page from header {}.\", link_header);\n\n for link in link_header.split(\",\") {\n\n let mut link_parts = link.split(\";\");\n\n let url = match link_parts.next() {\n\n Some(u) => u,\n\n None => continue,\n\n };\n\n let rel = match link_parts.next() {\n\n Some(u) => u,\n\n None => continue,\n\n };\n\n if !rel.contains(\"rel=\\\"next\\\"\") {\n\n continue;\n\n }\n\n let mut next_page_url = url.trim();\n\n next_page_url = &next_page_url[1..next_page_url.len() - 1];\n\n return Some(next_page_url.to_string());\n\n }\n\n None\n\n}\n\n\n", "file_path": "panbuild/utils.rs", "rank": 11, "score": 164832.92284535692 }, { "content": "pub fn fetch_file(file_url: String) -> Result<String, String> {\n\n let file_name_parts = file_url.split(\"/\");\n\n let file_name = file_name_parts.last().unwrap();\n\n\n\n println!(\"Getting file at {}\", file_url);\n\n let mut output = Command::new(\"wget\")\n\n .arg(file_url.to_string())\n\n .arg(\"-P /tmp/\")\n\n .stdout(Stdio::piped())\n\n .spawn()\n\n .unwrap();\n\n\n\n let local_file_path = \"/tmp/\".to_owned() + &file_name.to_owned();\n\n\n\n let mut output = match output.wait_with_output() {\n\n Ok(o) => o,\n\n Err(e) => return Err(e.to_string()),\n\n };\n\n if !output.status.success() {\n\n return Err(\"Could not fetch file.\".to_string());\n\n }\n\n\n\n Ok(local_file_path)\n\n}\n\n\n", "file_path": "panbuild/utils.rs", "rank": 12, "score": 144326.62986031908 }, { "content": "/// Setup the system\n\npub fn setup(abstract_manifest: &crate::manifests::manifest::AbstractManifest) -> Result<String, String> {\n\n let child = Command::new(\"flatpak\")\n\n .arg(\"remote-add\")\n\n .arg(\"--if-not-exists\")\n\n .arg(\"--user\")\n\n .arg(\"flathub\")\n\n .stdout(Stdio::piped())\n\n .spawn()\n\n .unwrap();\n\n\n\n let output = match child.wait_with_output() {\n\n Ok(o) => o,\n\n Err(e) => return Err(e.to_string()),\n\n };\n\n if !output.status.success() {\n\n return Ok(\"it went ok\".to_string());\n\n }\n\n Ok(String::from(\"lol\"))\n\n}\n\n\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 13, "score": 140045.14404744603 }, { "content": "// Gets the path the repos should be located at.\n\n// FIXME not sure this function belongs in utils...\n\npub fn get_repos_dir_path() -> String {\n\n if let Ok(path) = env::var(\"PB_REPOS_DIR_PATH\") {\n\n return path.to_string();\n\n }\n\n \"/tmp\".to_string()\n\n}\n\n\n", "file_path": "panbuild/utils.rs", "rank": 14, "score": 139211.9837369877 }, { "content": "pub fn run_build(abstract_manifest: &crate::manifests::manifest::AbstractManifest) -> Result<String, String> {\n\n let flatpak_cache_dir = path::Path::new(DEFAULT_FLATPAK_BUILDER_CACHE_DIR);\n\n if flatpak_cache_dir.is_dir() {\n\n let timestamp = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH);\n\n let backup_folder_name = format!(\"{}-{}\", DEFAULT_FLATPAK_BUILDER_CACHE_DIR.to_owned(), timestamp.unwrap().as_secs());\n\n println!(\"Making a backup of the flatpak-builder cache folder at {}\", backup_folder_name);\n\n\n\n let mut output = Command::new(\"cp\")\n\n .arg(\"-R\")\n\n .arg(DEFAULT_FLATPAK_BUILDER_CACHE_DIR)\n\n .arg(backup_folder_name)\n\n .spawn();\n\n\n\n let mut output = match output {\n\n Ok(o) => o,\n\n Err(e) => return Err(e.to_string()),\n\n };\n\n }\n\n\n\n let child = Command::new(\"flatpak-builder\")\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 15, "score": 138243.0383619457 }, { "content": "// Returns the updated list of modules in the manifest.\n\npub fn add_module(manifest: &mut FlatpakManifest, new_module: &SoftwareModule) -> Result<Vec<SoftwareModule>, String> {\n\n for module in &manifest.modules {\n\n if module.name == new_module.name {\n\n return Err(format!(\"Already a module named {}.\", module.name));\n\n }\n\n }\n\n let mut new_flatpak_module = FlatpakModule::default();\n\n new_flatpak_module.name = new_module.name.to_string();\n\n\n\n let mut flatpak_sources = FlatpakSource::default();\n\n flatpak_sources.r#type = \"git\".to_string(); // FIXME use the url_type\n\n flatpak_sources.url = Some(new_module.url.to_string());\n\n // This is the default, unless a version is explicitely declared.\n\n flatpak_sources.branch = Some(\"master\".to_string());\n\n new_flatpak_module.sources = vec![flatpak_sources];\n\n\n\n manifest.modules.insert(0, new_flatpak_module);\n\n\n\n Ok(manifest.get_modules())\n\n}\n\n\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 16, "score": 135883.57096513815 }, { "content": "pub fn ask_yes_no_question(question: String) -> bool {\n\n let mut answer = String::new();\n\n print!(\"{}? [Y/n]: \", question);\n\n let _ = stdout().flush();\n\n stdin().read_line(&mut answer).expect(\"Error while reading answer for question.\");\n\n if let Some('\\n') = answer.chars().next_back() {\n\n answer.pop();\n\n }\n\n if let Some('\\r') = answer.chars().next_back() {\n\n answer.pop();\n\n }\n\n if answer == \"Y\" || answer == \"y\" {\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "panbuild/utils.rs", "rank": 17, "score": 130853.57872963257 }, { "content": "pub fn read_config() -> Result<PanbuildConfig, String> {\n\n // Make that more robust maybe?\n\n let config_path = DEFAULT_CACHE_DIR.to_owned() + \"config.yaml\";\n\n let config_path = path::Path::new(&config_path);\n\n let config_content = match fs::read_to_string(config_path) {\n\n Ok(m) => m,\n\n Err(e) => return Err(format!(\"Failed to read the config file at {}\", config_path.to_str().unwrap_or(\"\"))),\n\n };\n\n\n\n let config: PanbuildConfig = match serde_yaml::from_str(&config_content) {\n\n Ok(m) => m,\n\n Err(e) => return Err(format!(\"Failed to parse the config file at {}: {}.\", config_path.to_str().unwrap_or(\"\"), e)),\n\n };\n\n Ok(config)\n\n}\n\n\n", "file_path": "panbuild/config.rs", "rank": 18, "score": 130853.57872963257 }, { "content": "pub fn file_path_matches(path: &str) -> bool {\n\n if path.to_uppercase().ends_with(\"PKGBUILD\") {\n\n return true;\n\n }\n\n // TODO not sure about this, but I think the source infos\n\n // are easier to parse than the pkg build, which is a script.\n\n // See https://wiki.archlinux.org/index.php/.SRCINFO\n\n if path.to_uppercase().ends_with(\".SRCINFO\") {\n\n return true;\n\n }\n\n return false;\n\n}\n", "file_path": "panbuild/manifests/arch.rs", "rank": 19, "score": 129592.25308528377 }, { "content": "pub fn read_or_init_config() -> Result<PanbuildConfig, String> {\n\n match read_config() {\n\n Ok(config) => Ok(config),\n\n Err(_) => match write_config(&PanbuildConfig::default()) {\n\n Ok(c) => return Ok(c),\n\n Err(e) => return Err(e),\n\n },\n\n }\n\n}\n", "file_path": "panbuild/config.rs", "rank": 20, "score": 128285.5707876455 }, { "content": "pub fn get_and_add_repos(db: &mut panbuild::db::Database) {\n\n log::info!(\"Getting all projects from github.com\");\n\n let mut request = panbuild::utils::PagedRequest {\n\n domain: \"\".to_string(),\n\n token: None,\n\n next_page_url: None,\n\n };\n\n let mut paged_response = get_repos(request);\n\n\n\n let mut projects = paged_response.results;\n\n while projects.len() > 0 {\n\n for project in projects {\n\n log::info!(\"Adding project {}.\", &project.name);\n\n db.add_project(project);\n\n }\n\n\n\n if paged_response.next_page_url.is_none() {\n\n break;\n\n }\n\n\n\n paged_response = get_repos(panbuild::utils::PagedRequest {\n\n domain: \"\".to_string(),\n\n token: paged_response.token,\n\n next_page_url: paged_response.next_page_url,\n\n });\n\n projects = paged_response.results;\n\n }\n\n}\n\n\n", "file_path": "pb-tools/hubs/github.rs", "rank": 21, "score": 119195.53690249496 }, { "content": "pub fn get_and_add_recipes(db: &mut panbuild::db::Database) {\n\n // All the formulae for macOS\n\n for project in get_projects(\"https://formulae.brew.sh/api/formula.json\") {\n\n db.add_project(project);\n\n }\n\n\n\n // All the formulae for Linux\n\n for project in get_projects(\"https://formulae.brew.sh/api/formula-linux.json\") {\n\n db.add_project(project);\n\n }\n\n\n\n // There are also the cask formulae, but they have a different format.\n\n // https://formulae.brew.sh/api/cask.json\n\n}\n\n\n", "file_path": "pb-tools/hubs/brew.rs", "rank": 22, "score": 119195.53690249496 }, { "content": "pub fn write_config(config: &PanbuildConfig) -> Result<PanbuildConfig, String> {\n\n let cache_dir = path::Path::new(DEFAULT_CACHE_DIR);\n\n if !cache_dir.is_dir() {\n\n match fs::create_dir(cache_dir) {\n\n Ok(_) => {}\n\n Err(e) => return Err(format!(\"Could not create cache dir at {}\", DEFAULT_CACHE_DIR)),\n\n };\n\n }\n\n\n\n let config_content = match serde_yaml::to_string(&config) {\n\n Ok(m) => m,\n\n Err(e) => return Err(format!(\"Failed to dump the config {}\", e)),\n\n };\n\n\n\n let config_path = DEFAULT_CACHE_DIR.to_owned() + \"config.yaml\";\n\n let config_path = path::Path::new(&config_path);\n\n match fs::write(config_path, config_content) {\n\n Ok(m) => m,\n\n Err(e) => return Err(format!(\"Failed to write the config file at {}: {}\", config_path.to_str().unwrap_or(\"\"), e)),\n\n };\n\n\n\n read_config()\n\n}\n\n\n", "file_path": "panbuild/config.rs", "rank": 23, "score": 119006.12862449184 }, { "content": "pub fn get_all_paths(dir: &Path) -> Result<Vec<std::path::PathBuf>, String> {\n\n let mut all_paths: Vec<std::path::PathBuf> = vec![];\n\n\n\n let dir_entries = match fs::read_dir(dir) {\n\n Ok(entries) => entries,\n\n Err(err) => return Ok(vec![]),\n\n };\n\n for entry in dir_entries {\n\n let entry_path = entry.unwrap().path();\n\n if entry_path.is_dir() {\n\n let mut dir_paths: Vec<std::path::PathBuf> = get_all_paths(&entry_path)?;\n\n all_paths.append(&mut dir_paths);\n\n } else {\n\n all_paths.push(entry_path);\n\n }\n\n }\n\n\n\n Ok(all_paths)\n\n}\n\n\n", "file_path": "panbuild/utils.rs", "rank": 24, "score": 109313.98034993713 }, { "content": "fn parse_paragraph(paragraph: &String) -> HashMap<String, String> {\n\n let mut fields: HashMap<String, String> = HashMap::new();\n\n let lines = paragraph.split(\"\\n\");\n\n\n\n let mut field_name: String = String::from(\"\");\n\n let mut field_value: String = String::from(\"\");\n\n\n\n for line in lines {\n\n if is_empty_line(line) {\n\n continue;\n\n }\n\n if is_commented_line(line) {\n\n continue;\n\n }\n\n if is_field_start(line) {\n\n if !field_name.is_empty() {\n\n fields.insert(field_name, field_value.trim().to_string());\n\n }\n\n\n\n let parts: Vec<&str> = line.split(CONTROL_FILE_SEPARATOR).collect();\n", "file_path": "panbuild/manifests/debian.rs", "rank": 26, "score": 105183.41244341081 }, { "content": "fn parse_paragraphs(content: &String) -> Vec<String> {\n\n let mut paragraphs = vec![];\n\n let lines = content.split(\"\\n\");\n\n let mut paragraph: String = String::from(\"\");\n\n\n\n for line in lines {\n\n if !is_empty_line(line) {\n\n paragraph.push_str(line);\n\n paragraph.push_str(\"\\n\");\n\n }\n\n if is_empty_line(line) && !paragraph.is_empty() {\n\n paragraphs.push(paragraph);\n\n paragraph = String::from(\"\");\n\n }\n\n }\n\n if !paragraph.is_empty() {\n\n paragraphs.push(paragraph);\n\n }\n\n paragraphs\n\n}\n\n\n", "file_path": "panbuild/manifests/debian.rs", "rank": 27, "score": 102126.21270169286 }, { "content": "pub fn init() {\n\n simple_logging::log_to_stderr(get_log_level());\n\n}\n\n\n", "file_path": "panbuild/logger.rs", "rank": 28, "score": 101362.94819752568 }, { "content": "pub fn is_setup(abstract_manifest: &crate::manifests::manifest::AbstractManifest) -> bool {\n\n let child = Command::new(\"flatpak\").arg(\"remote-list\").arg(\"--user\").stdout(Stdio::piped()).spawn().unwrap();\n\n\n\n let output = match child.wait_with_output() {\n\n Ok(o) => o,\n\n Err(e) => return false,\n\n };\n\n if !output.status.success() {\n\n return false;\n\n }\n\n let stdout = match str::from_utf8(&output.stdout) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n log::warn!(\"Invalid UTF-8 sequence printed by `flatpak remote-list`.\");\n\n return false;\n\n }\n\n };\n\n return true;\n\n}\n\n\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 29, "score": 93434.87405303458 }, { "content": "fn is_commented_line(line: &str) -> bool {\n\n for c in line.chars() {\n\n if c == ' ' {\n\n continue;\n\n }\n\n if c == '\\t' {\n\n continue;\n\n }\n\n if c == '#' {\n\n return true;\n\n }\n\n return false;\n\n }\n\n return false;\n\n}\n\n\n\n// Currently the documentation comes from the Debian control file documentation.\n\npub enum Priority {\n\n // Packages which are necessary for the proper functioning of the system (usually,\n\n // this means that dpkg functionality depends on these packages).\n", "file_path": "panbuild/manifests/debian.rs", "rank": 30, "score": 92244.6171120668 }, { "content": "fn is_empty_line(line: &str) -> bool {\n\n for c in line.chars() {\n\n if c == ' ' {\n\n continue;\n\n }\n\n if c == '\\t' {\n\n continue;\n\n }\n\n return false;\n\n }\n\n return true;\n\n}\n\n\n", "file_path": "panbuild/manifests/debian.rs", "rank": 31, "score": 92244.6171120668 }, { "content": "fn is_field_start(line: &str) -> bool {\n\n for c in line.chars() {\n\n if c.is_alphanumeric() {\n\n continue;\n\n }\n\n if c == '-' {\n\n continue;\n\n }\n\n if c == CONTROL_FILE_SEPARATOR.chars().nth(0).unwrap() {\n\n return true;\n\n }\n\n return false;\n\n }\n\n return true;\n\n}\n\n\n", "file_path": "panbuild/manifests/debian.rs", "rank": 32, "score": 92244.6171120668 }, { "content": "pub fn load_manifest_from_config() -> Option<crate::manifests::manifest::AbstractManifest> {\n\n let mut config = match read_or_init_config() {\n\n Ok(c) => c,\n\n Err(e) => {\n\n eprintln!(\"Could not load or init config: {}\", e);\n\n return None;\n\n }\n\n };\n\n\n\n let workspace_name = match &config.current_workspace {\n\n Some(w) => w,\n\n None => {\n\n eprintln!(\"Not currently in a workspace. Use `ls` to list the available workspaces and manifests.\");\n\n return None;\n\n }\n\n };\n\n\n\n if !config.workspaces.contains_key(workspace_name) {\n\n eprintln!(\n\n \"Workspace {} does not exist. Use `ls` to list the available workspaces and manifests.\",\n", "file_path": "panbuild/config.rs", "rank": 33, "score": 77444.0518319746 }, { "content": "pub fn get_repos(request: panbuild::utils::PagedRequest) -> panbuild::utils::PagedResponse {\n\n // By default, we get all the repos.\n\n let mut current_url = format!(\"https://api.github.com/repositories?type=all&per_page=2\");\n\n if let Some(url) = request.next_page_url {\n\n current_url = url;\n\n }\n\n\n\n let mut projects: Vec<panbuild::projects::SoftwareProject> = vec![];\n\n let default_response = panbuild::utils::PagedResponse {\n\n results: vec![],\n\n token: None,\n\n next_page_url: None,\n\n };\n\n\n\n let mut headers = header::HeaderMap::new();\n\n // User agent is required when using the GitHub API.\n\n // See https://docs.github.com/en/rest/overview/resources-in-the-rest-api#user-agent-required\n\n headers.insert(\"User-Agent\", header::HeaderValue::from_str(\"panbuild\").unwrap());\n\n headers.insert(\"Accept\", header::HeaderValue::from_str(\"application/vnd.github.v3+json\").unwrap());\n\n if let Ok(token) = env::var(\"PB_GITHUB_TOKEN\") {\n", "file_path": "pb-tools/hubs/github.rs", "rank": 34, "score": 69824.74452787856 }, { "content": "pub fn get_repos(request: panbuild::utils::PagedRequest) -> panbuild::utils::PagedResponse {\n\n let mut current_url = format!(\"https://{}/api/v4/projects?per_page=100&simple=false\", request.domain);\n\n if let Some(url) = request.next_page_url {\n\n current_url = url;\n\n }\n\n\n\n let mut projects: Vec<panbuild::projects::SoftwareProject> = vec![];\n\n let default_response = panbuild::utils::PagedResponse {\n\n results: vec![],\n\n token: None,\n\n next_page_url: None,\n\n };\n\n\n\n let mut headers = header::HeaderMap::new();\n\n let auth_header_value = format!(\"Bearer {}\", request.token.as_ref().unwrap());\n\n let auth_header = header::HeaderValue::from_str(&auth_header_value.to_string()).unwrap();\n\n headers.insert(\"Authorization\", auth_header);\n\n let client = reqwest::blocking::Client::builder().default_headers(headers).build().unwrap();\n\n\n\n log::info!(\"Getting GitLab projects page at {}.\", current_url);\n", "file_path": "pb-tools/hubs/gitlab.rs", "rank": 35, "score": 69824.74452787856 }, { "content": "struct PanbuilbArguments {\n\n // TODO use enum for command name?\n\n command_name: String,\n\n arguments: Vec<String>,\n\n // TODO use enums for those?\n\n output_format: String,\n\n}\n\n\n", "file_path": "panbuild/lib.rs", "rank": 36, "score": 59180.42226273763 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct GitHubRepo {\n\n id: String,\n\n name: String,\n\n full_name: String,\n\n description: String,\n\n fork: bool,\n\n is_template: Option<bool>,\n\n archived: Option<bool>,\n\n disabled: Option<bool>,\n\n topics: Option<Vec<String>>,\n\n clone_url: Option<String>,\n\n git_url: Option<String>,\n\n homepage: Option<String>,\n\n forks_count: Option<i64>,\n\n stargazers_count: Option<i64>,\n\n watchers_count: Option<i64>,\n\n size: Option<i64>,\n\n default_branch: Option<String>,\n\n}\n\nimpl GitHubRepo {\n", "file_path": "pb-tools/hubs/github.rs", "rank": 37, "score": 55693.521570483485 }, { "content": "def normalize_project_name(project_name):\n\n response = project_name\n\n response = response.lower()\n\n # at least replacing spaces and tabs.\n\n response = response.replace(' ', '-')\n\n response = response.replace('\\t', '-')\n\n response = response.replace('\\n', '-')\n\n response = response.replace('é', 'e')\n\n response = response.replace('è', 'e')\n\n response = response.replace('ê', 'e')\n\n response = response.replace('ó', 'o')\n\n response = response.replace('ë', 'e')\n\n response = response.replace('à', 'a')\n", "file_path": "scripts/discover_repos.py", "rank": 38, "score": 53373.601285705896 }, { "content": "fn main() {\n\n let yaml = load_yaml!(\"panbuild.yml\");\n\n let panbuild_app: App = App::from_yaml(yaml).version(APP_VERSION);\n\n\n\n // Here we could use get_matches_safe and override the error messages.\n\n // See https://docs.rs/clap/2.33.1/clap/struct.App.html#method.get_matches_safe\n\n let matches: ArgMatches = panbuild_app.get_matches();\n\n\n\n if matches.is_present(\"version\") {\n\n println!(\"{}\", APP_VERSION);\n\n exit(0);\n\n }\n\n\n\n let command_name = matches.subcommand_name().unwrap_or(\"\");\n\n let mut flags: HashMap<String, bool> = HashMap::new();\n\n // let mut options: HashMap<String, bool> = HashMap::new();\n\n let mut arguments: HashMap<String, String> = HashMap::new();\n\n\n\n let command_name = match matches.subcommand_name() {\n\n Some(command_name) => command_name,\n", "file_path": "panbuild/main.rs", "rank": 39, "score": 51910.08546155177 }, { "content": "fn get_log_level() -> log::LevelFilter {\n\n let log_level_string = match env::var(\"PB_LOG_LEVEL\") {\n\n Ok(level) => level,\n\n Err(e) => return DEFAULT_LOG_LEVEL,\n\n };\n\n\n\n let log_level = match log::LevelFilter::from_str(&log_level_string) {\n\n Ok(level) => level,\n\n Err(e) => {\n\n eprintln!(\"Invalid log level value {}\", log_level_string);\n\n return DEFAULT_LOG_LEVEL;\n\n }\n\n };\n\n\n\n return log_level;\n\n}\n", "file_path": "panbuild/logger.rs", "rank": 40, "score": 44141.46720893855 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n// See https://github.com/rust-lang/rfcs/blob/master/text/1105-api-evolution.md\n\n// and https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field\n\n#[derive(Serialize, Deserialize, Default)]\n\npub struct SemanticVersion {\n\n pub major: i32,\n\n pub minor: i32,\n\n pub patch: i32,\n\n pub pre_release: String,\n\n pub build: String,\n\n}\n\nimpl SemanticVersion {\n\n // From https://semver.org/:\n\n // <valid semver> ::= <version core>\n\n // | <version core> \"-\" <pre-release>\n\n // | <version core> \"+\" <build>\n\n // | <version core> \"-\" <pre-release> \"+\" <build>\n\n pub fn parse(version: &String) -> Option<SemanticVersion> {\n\n let mut pre_release = \"\".to_string();\n", "file_path": "panbuild/version.rs", "rank": 41, "score": 35082.473432149534 }, { "content": " let mut build = \"\".to_string();\n\n\n\n let parts: Vec<&str> = version.split('+').collect();\n\n if parts.len() == 2 {\n\n build = parts[1].to_string();\n\n }\n\n\n\n let parts: Vec<&str> = parts[0].split('-').collect();\n\n if parts.len() == 2 {\n\n pre_release = parts[1].to_string();\n\n }\n\n\n\n let version_core = parts[0].trim().to_string();\n\n let version_parts: Vec<&str> = version_core.split('.').collect();\n\n if version_parts.len() != 3 {\n\n eprintln!(\"Invalid semantic version {}.\", version_core);\n\n return None;\n\n }\n\n\n\n let major: i32 = match version_parts[0].parse() {\n", "file_path": "panbuild/version.rs", "rank": 42, "score": 35070.28503208296 }, { "content": " assert!(SemanticVersion::parse(&\"1.2.fsdfsd\".to_string()).is_none());\n\n }\n\n\n\n #[test]\n\n pub fn test_parse_simple_version() {\n\n let mut sem_ver = SemanticVersion::parse(&\"1.2.3\".to_string());\n\n assert!(sem_ver.is_some());\n\n let sem_ver = sem_ver.unwrap();\n\n assert_eq!(sem_ver.major, 1);\n\n assert_eq!(sem_ver.minor, 2);\n\n assert_eq!(sem_ver.patch, 3);\n\n }\n\n\n\n #[test]\n\n pub fn test_parse_version_with_release() {\n\n let mut sem_ver = SemanticVersion::parse(&\"1.2.3-alpha\".to_string());\n\n assert!(sem_ver.is_some());\n\n let sem_ver = sem_ver.unwrap();\n\n assert_eq!(sem_ver.major, 1);\n\n assert_eq!(sem_ver.minor, 2);\n", "file_path": "panbuild/version.rs", "rank": 43, "score": 35068.92852505246 }, { "content": "\n\n Some(SemanticVersion {\n\n major: major,\n\n minor: minor,\n\n patch: patch,\n\n pre_release: pre_release,\n\n build: build,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n pub fn test_parse_invalid_strings() {\n\n assert!(SemanticVersion::parse(&\"not a version\".to_string()).is_none());\n\n assert!(SemanticVersion::parse(&\"fdsfsdfd.2.3\".to_string()).is_none());\n\n assert!(SemanticVersion::parse(&\"1.fsdfsd.3\".to_string()).is_none());\n", "file_path": "panbuild/version.rs", "rank": 44, "score": 35068.65444657248 }, { "content": " assert_eq!(sem_ver.patch, 3);\n\n assert_eq!(sem_ver.pre_release, \"alpha\".to_string());\n\n }\n\n\n\n #[test]\n\n pub fn test_parse_version_with_build() {\n\n let mut sem_ver = SemanticVersion::parse(&\"1.2.3+build\".to_string());\n\n assert!(sem_ver.is_some());\n\n let sem_ver = sem_ver.unwrap();\n\n assert_eq!(sem_ver.major, 1);\n\n assert_eq!(sem_ver.minor, 2);\n\n assert_eq!(sem_ver.patch, 3);\n\n assert_eq!(sem_ver.build, \"build\".to_string());\n\n }\n\n\n\n #[test]\n\n pub fn test_parse_version_with_build_and_release() {\n\n let mut sem_ver = SemanticVersion::parse(&\"1.2.3-alpha+build\".to_string());\n\n assert!(sem_ver.is_some());\n\n let sem_ver = sem_ver.unwrap();\n\n assert_eq!(sem_ver.major, 1);\n\n assert_eq!(sem_ver.minor, 2);\n\n assert_eq!(sem_ver.patch, 3);\n\n assert_eq!(sem_ver.build, \"build\".to_string());\n\n assert_eq!(sem_ver.pre_release, \"alpha\".to_string());\n\n }\n\n}\n", "file_path": "panbuild/version.rs", "rank": 45, "score": 35068.05134507292 }, { "content": " Ok(v) => v,\n\n Err(e) => {\n\n eprintln!(\"Invalid major version {}\", version_parts[0]);\n\n return None;\n\n }\n\n };\n\n let minor: i32 = match version_parts[1].parse() {\n\n Ok(v) => v,\n\n Err(e) => {\n\n eprintln!(\"Invalid minor version {}\", version_parts[1]);\n\n return None;\n\n }\n\n };\n\n let patch: i32 = match version_parts[2].parse() {\n\n Ok(v) => v,\n\n Err(e) => {\n\n eprintln!(\"Invalid patch version {}\", version_parts[2]);\n\n return None;\n\n }\n\n };\n", "file_path": "panbuild/version.rs", "rank": 46, "score": 35060.2270993667 }, { "content": "\"\"\"\n\nMerge multiple projects files in a directory.\n\n\"\"\"\n\nimport json\n\nimport re\n\nimport sys\n\nimport os\n\n\n\nOUTPUT_FILE_NAME = 'all_projects.json'\n\n\n\n\n\nif __name__ == '__main__':\n\n projects_dir = os.environ.get('PB_OUT_DIR', '')\n\n all_projects = []\n\n all_projects_filtered = []\n\n num_files = 0\n\n num_projects_initial = 0\n\n\n\n if not projects_dir:\n\n print('Must define PB_OUT_DIR!')\n\n sys.exit(1)\n\n\n\n if not os.path.isdir(projects_dir):\n\n print('{} is not a directory!'.format(projects_dir))\n\n sys.exit(1)\n\n\n\n for file_path in os.listdir(projects_dir):\n\n # Not merge the projects from our own output!\n\n if OUTPUT_FILE_NAME in file_path:\n\n continue\n\n\n\n try:\n\n file_full_path = projects_dir + file_path\n\n print('opening file {}'.format(file_full_path))\n\n projects = json.loads(open(file_full_path, 'r').read())\n\n all_projects.extend(projects)\n\n num_files += 1\n\n num_projects_initial += len(projects)\n\n except Exception as e:\n\n print(e)\n\n continue\n\n\n\n print('Loaded {} projects for {} files.'.format(len(all_projects), num_files))\n\n\n\n for project in all_projects:\n\n if not len(project.get('vcs_urls', [])):\n\n continue\n\n all_projects_filtered.append(project)\n\n print('{} projects already have vcs urls.'.format(len(all_projects_filtered)))\n\n all_projects_filtered = []\n\n\n\n for project in all_projects:\n\n for web_url in project.get('web_urls', []):\n\n if re.match(r\"https://github.com/([\\w\\d\\.]+)/([\\w\\d\\.]+)\", web_url):\n\n project['vcs_urls'].append(web_url)\n\n if re.match(r\"https://gitlab.com/([\\w\\d\\.]+)/([\\w\\d\\.]+)\", web_url):\n\n project['vcs_urls'].append(web_url)\n\n\n\n project['vcs_urls'] = list(set(project.get('vcs_urls', [])))\n\n if not len(project.get('vcs_urls', [])):\n\n continue\n\n all_projects_filtered.append(project)\n\n print('Writing {} projects.'.format(len(all_projects_filtered)))\n\n\n\n out_file_path = projects_dir + OUTPUT_FILE_NAME\n\n open(out_file_path, 'w').write(json.dumps(all_projects, indent=2))\n", "file_path": "scripts/merge_projects.py", "rank": 53, "score": 29552.120159940674 }, { "content": "def get_updated_project(current_project, new_project):\n\n \"\"\"\n\n Get the updated project base on the newly discovered project.\n\n \"\"\"\n\n\n\n if not new_project.get('description') or not current_project.get('description'):\n\n current_project['description'] = new_project['description']\n\n elif new_project['description'].strip() not in current_project['description'].strip():\n\n current_project['description'] += '\\n' + new_project['description']\n\n\n\n for tag in new_project.get('tags', []):\n\n if 'tags' not in current_project:\n\n current_project['tags'] = []\n\n if tag in current_project['tags']:\n\n continue\n\n current_project['tags'].append(tag)\n\n current_project['tags'] = sorted(list(set(current_project['tags'])))\n\n for url in new_project.get('urls', []):\n\n if 'urls' not in current_project:\n\n current_project['urls'] = []\n\n if url in current_project['urls']:\n\n continue\n\n current_project['urls'].append(url)\n\n current_project['urls'] = sorted(list(set(current_project['urls'])))\n\n for url in new_project.get('vcs_urls', []):\n\n if 'vcs_urls' not in current_project:\n\n current_project['vcs_urls'] = []\n\n if url in current_project['vcs_urls']:\n\n continue\n\n current_project['vcs_urls'].append(url)\n\n current_project['vcs_urls'] = sorted(list(set(current_project['vcs_urls'])))\n\n\n", "file_path": "scripts/discover_repos.py", "rank": 54, "score": 26572.392031192874 }, { "content": "def get_projects_from_github():\n\n github_projects = get_all_projects_from_github()\n\n projects = []\n\n\n\n for github_project in github_projects:\n\n # We only consider original projects.\n\n if github_project.get('fork'):\n\n continue\n\n\n\n print(github_project)\n\n\n\n # TODO We require at least 1 fork for the project to be considered.\n\n\n\n project = {}\n\n project['name'] = normalize_project_name(github_project['name'])\n\n project['description'] = github_project['description']\n\n project['urls'] = []\n\n project['urls'].append(github_project['html_url'])\n\n project['urls'] = sorted(project['urls'])\n\n project['vcs_urls'] = []\n\n if github_project.get('full_name'):\n\n # GitHub always has the same format for those URLs.\n\n project['vcs_urls'].append('https://github.com/{0}.git'.format(github_project.get('full_name')))\n\n project['vcs_urls'].append('[email protected]:{0}.git'.format(github_project.get('full_name')))\n\n project['vcs_urls'] = sorted(project['vcs_urls'])\n\n\n\n # TODO we must use a different endpoint for GitHub\n\n project['tags'] = []\n\n\n\n # TODO use the README for the long description\n\n\n\n projects.append(project)\n\n\n", "file_path": "scripts/discover_repos.py", "rank": 55, "score": 26568.07807096663 }, { "content": "def get_projects_from_savannah(savannah_url):\n\n # FIXME must be authenticated to use simple=false\n\n projects_endpoint = \"https://{savannah_url}/projects?visibility=public&simple=false\"\n\n # Example initial request for the full list of projects\n\n # https://savannah.nongnu.org/search/index.php?type_of_search=soft&words=%%%\n\n # Subsequent paged requests look like this\n\n # https://savannah.nongnu.org/search/?type_of_search=soft&words=%2A&offset=25&max_rows=25#results\n\n\n\n # Any URL that is of type /projects/project_name is a candidate project.\n", "file_path": "scripts/discover_repos.py", "rank": 56, "score": 26568.07807096663 }, { "content": "def get_all_projects_from_github():\n\n projects_url = \"https://api.github.com/repositories?\".format()\n\n projects = []\n\n\n\n next_page_url = projects_url\n\n while next_page_url:\n\n print(\"Calling projects endpoint at \" + next_page_url)\n\n response = requests.get(next_page_url)\n\n\n\n try:\n\n # Making sure there was no error.\n\n response.raise_for_status()\n\n except Exception as e:\n\n print(e)\n\n break\n\n\n\n github_projects = response.json()\n\n projects.extend(github_projects)\n\n print(\"Projects endpoint returned {0} projects \".format(len(github_projects)))\n\n\n\n link_header = response.headers.get('link')\n\n if not link_header:\n\n next_page_url = None\n\n break\n\n\n\n for link in link_header.split(','):\n\n [url, rel] = link.split(';')\n\n if rel.strip() == 'rel=\"next\"':\n\n # Removing leading < and trailing >\n\n next_page_url = url.strip()[1:-1]\n\n break\n\n\n", "file_path": "scripts/discover_repos.py", "rank": 57, "score": 26568.07807096663 }, { "content": "def get_project_from_savannah_page(project_page_url):\n\n \"\"\"\n\n URLs are of the type https://savannah.nongnu.org/projects/project_page\n\n \"\"\"\n\n\n\n # Project Homepage\n\n # Download Area for the releases and downloadable files\n", "file_path": "scripts/discover_repos.py", "rank": 58, "score": 25708.16040680082 }, { "content": " false\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Deserialize, Serialize)]\n\n#[serde(default)]\n\npub struct PyProjectProject {\n\n pub name: String,\n\n pub version: String,\n\n pub description: String,\n\n pub license: String,\n\n pub readme: String,\n\n // python version that this package targets.\n\n pub python: String,\n\n pub homepage: String,\n\n pub repository: String,\n\n pub documentation: String,\n\n pub keywords: Vec<String>,\n\n pub classifiers: Vec<String>,\n\n}\n", "file_path": "panbuild/manifests/pyproject.rs", "rank": 59, "score": 31.70935069697885 }, { "content": " pub fn to_software_project(self) -> panbuild::projects::SoftwareProject {\n\n let mut project = panbuild::projects::SoftwareProject::default();\n\n let git_url = format!(\"https://github.com/{}.git\", self.full_name);\n\n project.id = panbuild::utils::repo_url_to_reverse_dns(&git_url);\n\n project.name = self.name;\n\n project.default_branch = self.default_branch;\n\n project.description = self.description;\n\n project.vcs_urls.push(git_url);\n\n if let Some(topics) = self.topics {\n\n project.keywords = topics;\n\n }\n\n project\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct GitHub {}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct GitHubError {\n\n pub message: String,\n\n pub documentation_url: String,\n\n}\n\n\n", "file_path": "pb-tools/hubs/github.rs", "rank": 60, "score": 31.284585661670892 }, { "content": " Compiler,\n\n Bootloader,\n\n Firmware,\n\n Media,\n\n Unknown,\n\n}\n\npub const DEFAULT_MODULE_TYPE: ModuleType = ModuleType::Lib;\n\nimpl Default for ModuleType {\n\n fn default() -> Self {\n\n DEFAULT_MODULE_TYPE\n\n }\n\n}\n\n\n\n#[derive(Default, Debug, Serialize, Deserialize)]\n\n/// Generic representation of a software module.\n\npub struct SoftwareModule {\n\n pub name: String,\n\n pub project_id: Option<String>,\n\n\n\n // The version of the current module.\n", "file_path": "panbuild/modules.rs", "rank": 61, "score": 30.879219744503352 }, { "content": "#[derive(Deserialize, Serialize, Debug, Default)]\n\n#[serde(default)]\n\npub struct CargoPackage {\n\n pub name: String,\n\n // TODO use a semver.\n\n pub version: String,\n\n pub authors: Vec<String>,\n\n // The rust edition.\n\n pub edition: String,\n\n pub description: String,\n\n pub license: String,\n\n pub repository: String,\n\n // The path of the readme file.\n\n pub readme: String,\n\n // The is a limit of 5 keywords per package.\n\n pub keywords: Vec<String>,\n\n // The list of files to include when publishing the package.\n\n pub include: Vec<String>,\n\n}\n\n\n", "file_path": "panbuild/manifests/cargo.rs", "rank": 62, "score": 29.711935933456253 }, { "content": "use std::collections::BTreeMap;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Default, Deserialize, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[serde(default)]\n\npub struct JavascriptPackageManifest {\n\n pub name: String,\n\n\n\n // FIXME could be translated to a semver.\n\n pub version: String,\n\n pub description: String,\n\n\n\n pub repository: String,\n\n\n\n pub files: Vec<String>,\n\n\n\n pub license: String,\n\n pub author: JavascriptPackageAuthor,\n", "file_path": "panbuild/manifests/javascript.rs", "rank": 63, "score": 28.9596962954153 }, { "content": "use std::collections::BTreeMap;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n// The format is defined in https://www.python.org/dev/peps/pep-0518/\n\n#[derive(Debug, Default, Deserialize, Serialize)]\n\n#[serde(default)]\n\npub struct PyProjectManifest {\n\n pub project: PyProjectProject,\n\n pub dependencies: BTreeMap<String, PyProjectDependency>,\n\n}\n\nimpl PyProjectManifest {\n\n pub fn get_type(&self) -> &str {\n\n return \"pyproject\";\n\n }\n\n\n\n pub fn file_path_matches(path: &str) -> bool {\n\n if path.to_lowercase().ends_with(\"pyproject.toml\") {\n\n return true;\n\n }\n", "file_path": "panbuild/manifests/pyproject.rs", "rank": 64, "score": 28.523100119878904 }, { "content": "use std::env;\n\n\n\nuse reqwest::header;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct GitLab {}\n\n\n\n// GitLab API described here\n\n// https://docs.gitlab.com/ee/api/projects.html\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct GitLabProject {\n\n pub id: String,\n\n pub name: String,\n\n pub name_with_namespace: String,\n\n pub created_at: String,\n\n pub last_activity_at: String,\n\n pub forks_count: i32,\n\n pub star_count: i32,\n", "file_path": "pb-tools/hubs/gitlab.rs", "rank": 65, "score": 28.13480716658752 }, { "content": " pub install_instructions: String,\n\n pub install_path: String,\n\n}\n\nimpl SoftwareModule {\n\n pub fn get_identifier(&self) -> &str {\n\n if self.version.len() != 0 {\n\n return self.version.as_str();\n\n }\n\n if self.tag.len() != 0 {\n\n return self.tag.as_str();\n\n }\n\n if self.commit.len() != 0 {\n\n return self.commit.as_str();\n\n }\n\n return \"\";\n\n }\n\n}\n\n\n\n#[derive(Default, Debug, Serialize, Deserialize)]\n\npub struct AbstractExecutable {\n", "file_path": "panbuild/modules.rs", "rank": 66, "score": 27.22880404413394 }, { "content": " pub description: Option<String>,\n\n pub default_branch: Option<String>,\n\n pub ssh_url_to_repo: String,\n\n pub http_url_to_repo: String,\n\n pub readme_url: String,\n\n pub tag_list: Vec<String>,\n\n // From the API doc:\n\n // If the project is a fork, and you provide a valid token to authenticate,\n\n // the forked_from_project field appears in the response.\n\n pub forked_from_project: Option<GitLabParentProject>,\n\n}\n\nimpl GitLabProject {\n\n pub fn to_software_project(self) -> panbuild::projects::SoftwareProject {\n\n let mut project = panbuild::projects::SoftwareProject::default();\n\n project.id = panbuild::utils::repo_url_to_reverse_dns(&self.http_url_to_repo);\n\n project.name = self.name;\n\n project.default_branch = self.default_branch;\n\n project.description = self.description.unwrap_or(\"\".to_string());\n\n project.vcs_urls.push(self.http_url_to_repo);\n\n project.keywords = self.tag_list;\n", "file_path": "pb-tools/hubs/gitlab.rs", "rank": 67, "score": 27.057227204071673 }, { "content": " pub versions: HomebrewRecipeVersions,\n\n}\n\nimpl HomebrewRecipe {\n\n pub fn to_software_project(self) -> panbuild::projects::SoftwareProject {\n\n let mut project = panbuild::projects::SoftwareProject::default();\n\n // We filter out http:// urls for now, but could try to convert to https in the future.\n\n if self.urls.stable.url.ends_with(\".git\") && self.urls.stable.url.starts_with(\"https\") {\n\n project.id = panbuild::utils::repo_url_to_reverse_dns(&self.urls.stable.url);\n\n project.vcs_urls.push(self.urls.stable.url);\n\n }\n\n project\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct HomebrewRecipeVersions {\n\n pub stable: String,\n\n pub bottle: bool,\n\n pub head: Option<String>,\n\n}\n", "file_path": "pb-tools/hubs/brew.rs", "rank": 68, "score": 26.657710444119562 }, { "content": "\n\n// See `man flatpak-manifest` for the flatpak manifest specs.\n\n#[derive(Deserialize, Serialize, Debug, Default)]\n\n#[serde(rename_all = \"kebab-case\")]\n\n#[serde(default)]\n\npub struct FlatpakManifest {\n\n // Name of the application.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub app_name: String,\n\n\n\n // A string defining the application id.\n\n // Both names (app-id and id) are accepted.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub app_id: String,\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub id: String,\n\n\n\n // The branch to use when exporting the application.\n\n // If this is unset the defaults come from the default-branch option.\n\n //\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 69, "score": 26.412891270734978 }, { "content": "use std::collections::BTreeMap;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Deserialize, Serialize, Debug, Default)]\n\n#[serde(default)]\n\npub struct CargoManifest {\n\n pub package: CargoPackage,\n\n pub dependencies: BTreeMap<String, CargoDependency>,\n\n}\n\nimpl CargoManifest {\n\n pub fn get_type(&self) -> &str {\n\n return \"cargo\";\n\n }\n\n\n\n pub fn file_path_matches(path: &str) -> bool {\n\n if path.to_lowercase().ends_with(\"cargo.toml\") {\n\n return true;\n\n }\n\n return false;\n", "file_path": "panbuild/manifests/cargo.rs", "rank": 70, "score": 25.50765943015476 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Default)]\n\npub struct SoftwareDeveloper {\n\n pub name: String,\n\n pub aliases: Vec<String>,\n\n pub emails: Vec<String>,\n\n}\n", "file_path": "panbuild/developers.rs", "rank": 71, "score": 25.164289041363705 }, { "content": " project\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct GitLabParentProject {\n\n pub id: String,\n\n pub name: String,\n\n}\n\n\n", "file_path": "pb-tools/hubs/gitlab.rs", "rank": 72, "score": 25.095648410156 }, { "content": "#[derive(Deserialize, Serialize, Debug, Default)]\n\n#[serde(default)]\n\npub struct CargoDependencies {}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\n#[serde(untagged)]\n\npub enum CargoDependency {\n\n Version(String),\n\n VersionObject(CargoVersionObject),\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct CargoVersionObject {\n\n pub version: String,\n\n pub features: Option<Vec<String>>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "panbuild/manifests/cargo.rs", "rank": 73, "score": 24.938167956519294 }, { "content": "use std::collections::BTreeMap;\n\nuse std::fs;\n\nuse std::path;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n// Make that more robust maybe?\n\npub const DEFAULT_CACHE_DIR: &str = \".panbuild/\";\n\n\n\n#[derive(Deserialize, Serialize, Debug, Default)]\n\n#[serde(default)]\n\npub struct PanbuildConfig {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub current_workspace: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub last_build: Option<String>,\n\n\n\n pub workspaces: BTreeMap<String, String>,\n\n}\n\n\n", "file_path": "panbuild/config.rs", "rank": 74, "score": 24.508620710650206 }, { "content": " pub name: String,\n\n pub path: String,\n\n pub is_desktop: bool,\n\n pub is_daemon: bool,\n\n // Whether or not this is the primary executable of the bundle.\n\n pub is_primary: bool,\n\n pub icon_path: String,\n\n}\n\n\n\n#[derive(Default, Debug, Serialize, Deserialize)]\n\npub struct AbstractPermission {\n\n pub name: String,\n\n pub description: String,\n\n pub api_type: APIType,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub enum APIType {\n\n Dbus,\n\n Files,\n", "file_path": "panbuild/modules.rs", "rank": 75, "score": 23.706101033151008 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct DebianPackagesHub {}\n\nimpl DebianPackagesHub {\n\n pub fn get_modules_from_debian_repository(repo_name: &str, repo_sources_url: &str) -> Vec<panbuild::modules::SoftwareModule> {\n\n vec![]\n\n }\n\n}\n", "file_path": "pb-tools/hubs/deb.rs", "rank": 76, "score": 23.65881304004467 }, { "content": " match fs::write(new_project_fs_path, serde_yaml::to_string(&project).unwrap()) {\n\n Ok(content) => content,\n\n Err(e) => {\n\n eprintln!(\"Could not write new project at {}: {}\", project_path.to_string(), e);\n\n }\n\n };\n\n self.projects.push(project);\n\n }\n\n\n\n pub fn search_projects(&self, search_term: &str) -> Vec<&SoftwareProject> {\n\n let mut projects: Vec<&SoftwareProject> = vec![];\n\n for project in &self.projects {\n\n if project.name.contains(&search_term) {\n\n projects.push(&project);\n\n }\n\n }\n\n projects\n\n }\n\n\n\n pub fn get_project(&self, project_id: &str) -> Option<SoftwareProject> {\n\n None\n\n }\n\n\n\n pub fn has_project(&self, project_id: &str) -> bool {\n\n self.indexed_projects.contains_key(project_id)\n\n }\n\n}\n", "file_path": "panbuild/db.rs", "rank": 77, "score": 23.012716671339497 }, { "content": " pub maintainer: String,\n\n // List of the names and email addresses of co-maintainers of the package, if any.\n\n // Format is name <[email protected]>\n\n pub uploaders: Vec<String>,\n\n pub build_depends: Vec<String>,\n\n // A semver reference to a \"canonical\" version.\n\n // (mandatory)\n\n pub standards_version: String,\n\n // URL of a website to browser the source code.\n\n pub homepage: String,\n\n // URL of a website to browser the source code.\n\n pub vcs_browser: String,\n\n // URL of the git repo.\n\n pub vcs_git: String,\n\n}\n\nimpl DebianManifest {\n\n pub fn get_type(&self) -> &str {\n\n return \"debian\";\n\n }\n\n\n", "file_path": "panbuild/manifests/debian.rs", "rank": 78, "score": 22.912663673187296 }, { "content": "use std::collections::BTreeMap;\n\nuse std::env;\n\nuse std::fs;\n\nuse std::path;\n\n\n\nuse crate::manifests::flatpak::FlatpakModule;\n\nuse crate::projects::SoftwareProject;\n\n\n\npub const DEFAULT_DB_PATH: &str = \".panbuild-db\";\n\npub const MODULES_DB_SUBDIR: &str = \"/modules\";\n\npub const PROJECTS_DB_SUBDIR: &str = \"/projects\";\n\n\n\npub struct Database {\n\n pub projects: Vec<SoftwareProject>,\n\n pub modules: Vec<FlatpakModule>,\n\n pub indexed_projects: BTreeMap<String, SoftwareProject>,\n\n}\n\nimpl Database {\n\n pub fn get_database() -> Database {\n\n if let Err(e) = fs::create_dir_all(Database::get_modules_db_path()) {\n", "file_path": "panbuild/db.rs", "rank": 79, "score": 22.792527131939902 }, { "content": " // The name of the runtime that the application uses.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub runtime: String,\n\n\n\n // The version of the runtime that the application uses, defaults to master.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub runtime_version: String,\n\n\n\n // The name of the development runtime that the application builds with.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub sdk: String,\n\n\n\n // The name of the development extensions that the application requires to build.\n\n pub sdk_extensions: Vec<String>,\n\n\n\n // Initialize the (otherwise empty) writable /var in the build with a copy of this runtime.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub var: String,\n\n\n\n // Use this file as the base metadata file when finishing.\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 80, "score": 21.86083608189188 }, { "content": "#[derive(Debug, Default, Deserialize, Serialize)]\n\n#[derive(Hash)]\n\n#[serde(rename_all = \"kebab-case\")]\n\n#[serde(default)]\n\npub struct FlatpakModule {\n\n // The name of the module, used in e.g. build logs. The name is also\n\n // used for constructing filenames and commandline arguments,\n\n // therefore using spaces or '/' in this string is a bad idea.\n\n pub name: String,\n\n\n\n // If true, skip this module\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub disabled: Option<bool>,\n\n\n\n // An array of objects defining sources that will be downloaded and extracted in order.\n\n // String members in the array are interpreted as the name of a separate\n\n // json or yaml file that contains sources. See below for details.\n\n // FIXME this can also be a string, which represents a local path to a module file.\n\n #[serde(skip_serializing_if = \"Vec::is_empty\")]\n\n pub sources: Vec<FlatpakSource>,\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 81, "score": 21.835575634493097 }, { "content": " \"Recommends\",\n\n \"Suggests\",\n\n \"Enhances\",\n\n \"Pre-Depends\",\n\n \"Conflicts\",\n\n \"Breaks\",\n\n];\n\n\n\n#[derive(Serialize, Deserialize, Default, Debug)]\n\npub struct DebianManifest {\n\n // The name of the source described in this manifest.\n\n // (mandatory)\n\n pub source: String,\n\n pub version: String,\n\n // The packages in the archive areas main, contrib and non-free are grouped\n\n // further into sections to simplify handling.\n\n pub section: String,\n\n pub priority: String,\n\n // Format is name <[email protected]>\n\n // (mandatory)\n", "file_path": "panbuild/manifests/debian.rs", "rank": 82, "score": 21.78296689772872 }, { "content": "use reqwest::header;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct HomebrewRecipe {\n\n pub name: String,\n\n pub full_name: String,\n\n pub tap: String,\n\n pub aliases: Vec<String>,\n\n pub license: Option<String>,\n\n pub desc: String,\n\n pub homepage: String,\n\n\n\n pub disabled: bool,\n\n pub deprecated: bool,\n\n pub outdated: bool,\n\n pub pinned: bool,\n\n\n\n pub urls: HomebrewRecipeUrls,\n", "file_path": "pb-tools/hubs/brew.rs", "rank": 83, "score": 21.587928548065968 }, { "content": "use std::collections::HashMap;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n// See https://snapcraft.io/docs/snapcraft-yaml-reference for the full YAML reference.\n\n// TODO is https://snapcraft.io/docs/snapcraft-advanced-grammar relevant?\n\n// The top-level keys and values in snapcraft.yaml provide the snap build process, and the store,\n\n// with the overarching details of a snap. See Snapcraft app metadata and Snapcraft parts metadata for\n\n// details on how apps and parts are configured within snapcraft.yaml.\n\n// Top-level details include a snap’s name, version and description, alongside operational values\n\n// such as its confinement level and supported architecture.\n\n#[derive(Deserialize, Serialize, Debug, Default)]\n\n#[serde(rename_all = \"kebab-case\")]\n\n#[serde(default)]\n\npub struct SnapcraftManifest {\n\n // Incorporate external metadata via the referenced part.\n\n // See Using external metadata for more details.\n\n pub adopt_info: String,\n\n\n\n // List of build and run architectures.\n", "file_path": "panbuild/manifests/snap.rs", "rank": 84, "score": 21.468390351180936 }, { "content": "\n\n // Any icon with this name will be renamed to a name based on id during\n\n // the cleanup phase. Note that this is the icon name, not the full filenames,\n\n // so it should not include a filename extension.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub rename_icon: String,\n\n\n\n // Replace the appdata project-license field with this string.\n\n // This is useful as the upstream license is typically only about\n\n // the application itself, whereas the bundled app can contain other\n\n // licenses too.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub appdata_license: String,\n\n\n\n // If rename-icon is set, keep a copy of the old icon file.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub copy_icon: Option<bool>,\n\n\n\n // This string will be prefixed to the Name key in the main application desktop file.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 85, "score": 21.056682885539768 }, { "content": "//! # Panbuild\n\n//!\n\n//! `panbuild` is the universal builder.\n\nuse std::collections::HashMap;\n\n\n\npub mod db;\n\npub mod logger;\n\npub mod manifests;\n\npub mod modules;\n\npub mod projects;\n\npub mod utils;\n\n\n\nmod config;\n\nmod developers;\n\nmod version;\n\n\n\npub use manifests::flatpak::FlatpakModule;\n\npub use manifests::manifest::AbstractManifest;\n\npub use modules::SoftwareModule;\n\npub use projects::SoftwareProject;\n\n\n\nuse std::env;\n\nuse std::fs;\n\nuse std::path;\n\n\n\nconst DEFAULT_GIT_CACHE_DIR: &str = \".git/\";\n\nconst DEFAULT_PACKAGE_LIST_SEP: &str = \",\";\n\n\n", "file_path": "panbuild/lib.rs", "rank": 86, "score": 20.97940065328683 }, { "content": " if js_package_manifest.scripts.len() == 0 {\n\n log::debug!(\"The scripts section is missing from the Javascript package manifest.\");\n\n return None;\n\n }\n\n\n\n Some(js_package_manifest)\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Deserialize, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[serde(default)]\n\npub struct JavascriptPackageAuthor {\n\n pub name: String,\n\n pub email: String,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "panbuild/manifests/javascript.rs", "rank": 87, "score": 20.60430250817503 }, { "content": " pub desktop_file_name_prefix: String,\n\n\n\n // This string will be suffixed to the Name key in the main application desktop file.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub desktop_file_name_suffix: String,\n\n\n\n // An array of strings specifying the modules to be built in order.\n\n // String members in the array are interpreted as the name of a separate\n\n // json or yaml file that contains a module. See below for details.\n\n #[serde(skip_serializing_if = \"Vec::is_empty\")]\n\n pub modules: Vec<FlatpakModule>,\n\n}\n\nimpl FlatpakManifest {\n\n pub fn get_type(&self) -> &str {\n\n return \"flatpak\";\n\n }\n\n\n\n pub fn file_path_matches(path: &str) -> bool {\n\n let parts: Vec<&str> = path.split(\"/\").collect();\n\n if parts.len() == 0 {\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 88, "score": 20.277116607189324 }, { "content": "\n\n // TODO fetch the version from the sources.\n\n software_module\n\n }\n\n pub fn get_hash(&self) -> u64 {\n\n let mut s = DefaultHasher::new();\n\n self.hash(&mut s);\n\n s.finish()\n\n }\n\n}\n\n\n\npub const ALLOWED_SOURCE_TYPES: [&'static str; 10] = [\"archive\", \"git\", \"bzr\", \"svn\", \"dir\", \"file\", \"script\", \"shell\", \"patch\", \"extra-data\"];\n\n\n\n// The sources are a list pointer to the source code that needs to be extracted into\n\n// the build directory before the build starts.\n\n// They can be of several types, distinguished by the type property.\n\n//\n\n// Additionally, the sources list can contain a plain string, which is interpreted as the name\n\n// of a separate json or yaml file that is read and inserted at this\n\n// point. The file can contain a single source, or an array of sources.\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 89, "score": 20.20293936897885 }, { "content": "const REQUIRED_TOP_LEVEL_FIELDS: [&'static str; 0] = [\n\n // DESCRIPTION,\n\n // GRADE,\n\n // NAME,\n\n // SUMMARY,\n\n // According to the spec, the version is indeed required, but some projects don't\n\n // populate it so it can be generated dynamically when creating a new version.\n\n // See the `snapcraftctl set-version` command for examples.\n\n // VERSION,\n\n];\n\n\n\n// strict: no access outside of declared interfaces through plugs.\n\n// devmode: a special mode for snap creators and developers.\n\n// classic: allows access to your system’s resources in much the same way traditional packages do.\n\n// For more information, refer to https://snapcraft.io/docs/snap-confinement.\n", "file_path": "panbuild/manifests/snap.rs", "rank": 90, "score": 20.020087844003836 }, { "content": "#[derive(Debug, Default, Deserialize, Serialize)]\n\n#[derive(Hash)]\n\n#[serde(rename_all = \"kebab-case\")]\n\npub struct FlatpakSource {\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub r#type: String,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub url: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub path: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub tag: Option<String>,\n\n\n\n // The name of the branch to checkout.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub branch: Option<String>,\n\n}\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 91, "score": 19.432758546293847 }, { "content": "use std::collections::HashMap;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::manifests::manifest::AbstractManifest;\n\nuse crate::modules::SoftwareModule;\n\n\n\nconst CONTROL_FILE_SEPARATOR: &str = \":\";\n\n\n\npub const ALLOWED_SECTIONS: [&'static str; 57] = [\n\n \"admin\",\n\n \"cli-mono\",\n\n \"comm\",\n\n \"database\",\n\n \"debug\",\n\n \"devel\",\n\n \"doc\",\n\n \"editors\",\n\n \"education\",\n\n \"electronics\",\n", "file_path": "panbuild/manifests/debian.rs", "rank": 92, "score": 19.403199542334658 }, { "content": "use std::collections::hash_map::DefaultHasher;\n\nuse std::collections::BTreeMap;\n\nuse std::hash::{Hash, Hasher};\n\nuse std::path;\n\nuse std::process::{Command, Output, Stdio};\n\nuse std::str;\n\nuse std::time::SystemTime;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::modules::{BuildSystem, SoftwareModule};\n\n\n\nconst DEFAULT_FLATPAK_BUILDER_CACHE_DIR: &str = \".flatpak-builder\";\n\nconst DEFAULT_FLATPAK_OUTPUT_DIR: &str = \"build\";\n\n\n\n// Other choices are org.gnome.Platform and org.kde.Platform\n\nconst DEFAULT_RUNTIME: &str = \"org.freedesktop.Platform\";\n\nconst DEFAULT_RUNTIME_VERSION: &str = \"master\";\n\n// Other choices are org.gnome.Sdk and org.kde.Sdk\n\nconst DEFAULT_SDK: &str = \"org.freedesktop.Sdk\";\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 93, "score": 19.282596671353858 }, { "content": " // This key overrides both the default-branch key, and the --default-branch commandline option.\n\n // Unless you need a very specific branchname (like for a runtime or an extension) it is recommended\n\n // to use the default-branch key instead, because you can then override the default using\n\n // --default-branch when building for instance a test build.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub branch: String,\n\n\n\n // The default branch to use when exporting the application. Defaults to master.\n\n // This key can be overridden by the --default-branch commandline option.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub default_branch: String,\n\n\n\n // The collection ID of the repository, defaults to being unset.\n\n // Setting a globally unique collection ID allows the apps in the\n\n // repository to be shared over peer to peer systems without needing further configuration.\n\n // If building in an existing repository, the collection ID must match the existing\n\n // configured collection ID for that repository.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub collection_id: String,\n\n\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 94, "score": 19.23122387890247 }, { "content": " #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub test_rule: String,\n\n\n\n // Array of commands to run during the tests.\n\n #[serde(skip_serializing_if = \"Vec::is_empty\")]\n\n pub test_commands: Vec<String>,\n\n\n\n // An array of objects specifying nested modules to be built before this one.\n\n // String members in the array are interpreted as names of a separate json or\n\n // yaml file that contains a module.\n\n #[serde(skip_serializing_if = \"Vec::is_empty\")]\n\n pub modules: Vec<FlatpakModule>,\n\n}\n\nimpl FlatpakModule {\n\n pub fn to_module(&self) -> SoftwareModule {\n\n let mut software_module = SoftwareModule::default();\n\n software_module.name = self.name.to_string();\n\n if self.buildsystem == \"cmake\" {\n\n software_module.build_system = BuildSystem::Cmake;\n\n }\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 95, "score": 18.95574992532916 }, { "content": "\n\n#[derive(Debug, Default, Deserialize, Serialize)]\n\n#[serde(default)]\n\npub struct PyProjectDependency {}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n pub fn test_file_path_matches() {\n\n assert!(PyProjectManifest::file_path_matches(\"pyproject.toml\"));\n\n assert!(PyProjectManifest::file_path_matches(\"./pyproject.toml\"));\n\n assert!(PyProjectManifest::file_path_matches(\"./path/to/the/pyproject.toml\"));\n\n assert!(!PyProjectManifest::file_path_matches(\"com.example.appName.yaml\"));\n\n assert!(!PyProjectManifest::file_path_matches(\"\"));\n\n assert!(!PyProjectManifest::file_path_matches(\"/////////////\"));\n\n }\n\n}\n", "file_path": "panbuild/manifests/pyproject.rs", "rank": 96, "score": 18.620942929509823 }, { "content": " // add-ld-path, download-if, enable-if, merge-dirs, subdirectory-suffix, locale-subset,\n\n // version, versions. See the flatpak metadata documentation for more information on these.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub remove_after_build: Option<bool>,\n\n}\n\n\n\n// Build options specify the build environment of a module,\n\n// and can be specified globally as well as per-module.\n\n// Options can also be specified on a per-architecture basis using the arch property.\n\n#[derive(Deserialize, Serialize, Debug, Default)]\n\n#[derive(Hash)]\n\n#[serde(rename_all = \"kebab-case\")]\n\n#[serde(default)]\n\npub struct FlatpakBuildOptions {\n\n // This is set in the environment variable CFLAGS during the build.\n\n // Multiple specifications of this (in e.g. per-arch area) are concatenated, separated by spaces.\n\n #[serde(skip_serializing_if = \"String::is_empty\")]\n\n pub cflags: String,\n\n\n\n // If this is true, clear cflags from previous build options before adding it from these options.\n", "file_path": "panbuild/manifests/flatpak.rs", "rank": 97, "score": 18.469145256091252 }, { "content": "];\n\n\n\n// The main building blocks of a snap are parts.\n\n// They are used to declare pieces of code that will be pulled into your snap package.\n\n// The parts keys and values in snapcraft.yaml detail how parts are configured and built\n\n// by the snapcraft command.\n\n//\n\n// See Snapcraft top-level metadata and Snapcraft apps and services metadata for\n\n// details on how apps and parts are configured within snapcraft.yaml.\n\n// <part-name> represents the specific name of a building block which can be\n\n// then referenced by the command line tool (i.e. snapcraft).\n\n#[derive(Deserialize, Serialize, Debug, Default)]\n\n#[serde(rename_all = \"kebab-case\")]\n\n#[serde(default)]\n\npub struct SnapcraftPart {\n\n // Ensures that all the <part-names> listed in after are staged before this part begins its lifecycle.\n\n pub after: Vec<String>,\n\n\n\n // A list of named attributes to modify the behaviour of plugins.\n\n pub build_attributes: Vec<String>,\n", "file_path": "panbuild/manifests/snap.rs", "rank": 98, "score": 18.09876753470295 }, { "content": " pub fn file_path_matches(path: &str) -> bool {\n\n if path.to_lowercase().ends_with(\"debian/control\") {\n\n return true;\n\n }\n\n return false;\n\n }\n\n\n\n pub fn parse(manifest_content: &String) -> Option<DebianManifest> {\n\n let paragraphs = parse_paragraphs(manifest_content);\n\n if paragraphs.len() < 2 {\n\n eprintln!(\"There is only {} paragraph in the debian control file?\", paragraphs.len());\n\n return None;\n\n }\n\n\n\n let mut debian_manifest = DebianManifest::default();\n\n\n\n let first_paragraph = parse_paragraph(&paragraphs[0]);\n\n debian_manifest.source = first_paragraph.get(\"Source\").unwrap_or(&\"\".to_string()).to_string();\n\n debian_manifest.maintainer = first_paragraph.get(\"Maintainer\").unwrap_or(&\"\".to_string()).to_string();\n\n debian_manifest.version = first_paragraph.get(\"Version\").unwrap_or(&\"\".to_string()).to_string();\n", "file_path": "panbuild/manifests/debian.rs", "rank": 99, "score": 17.81680422970412 } ]
Rust
crates/swift-bridge-ir/src/parse/parse_struct.rs
Jomy10/swift-bridge
4a21fd134a2fd4d2418081ba962b97e6d56b155f
use crate::bridged_type::{SharedStruct, StructFields, StructSwiftRepr}; use crate::errors::{ParseError, ParseErrors}; use proc_macro2::{Ident, TokenTree}; use syn::parse::{Parse, ParseStream}; use syn::{ItemStruct, LitStr, Token}; pub(crate) struct SharedStructDeclarationParser<'a> { pub item_struct: ItemStruct, pub errors: &'a mut ParseErrors, } enum StructAttr { SwiftRepr((StructSwiftRepr, LitStr)), SwiftName(LitStr), Error(StructAttrParseError), AlreadyDeclared, } enum StructAttrParseError { InvalidSwiftRepr(LitStr), UnrecognizedAttribute(Ident), } #[derive(Default)] struct StructAttribs { swift_repr: Option<(StructSwiftRepr, LitStr)>, swift_name: Option<LitStr>, already_declared: bool, } struct ParsedAttribs(Vec<StructAttr>); impl Parse for ParsedAttribs { fn parse(input: ParseStream) -> syn::Result<Self> { if input.is_empty() { return Ok(ParsedAttribs(vec![])); } let opts = syn::punctuated::Punctuated::<_, syn::token::Comma>::parse_terminated(input)?; Ok(ParsedAttribs(opts.into_iter().collect())) } } impl Parse for StructAttr { fn parse(input: ParseStream) -> syn::Result<Self> { let key: Ident = input.parse()?; let attr = match key.to_string().as_str() { "swift_repr" => { input.parse::<Token![=]>()?; let repr: LitStr = input.parse()?; match repr.value().as_str() { "class" => StructAttr::SwiftRepr((StructSwiftRepr::Class, repr)), "struct" => StructAttr::SwiftRepr((StructSwiftRepr::Structure, repr)), _ => StructAttr::Error(StructAttrParseError::InvalidSwiftRepr(repr)), } } "swift_name" => { input.parse::<Token![=]>()?; let name = input.parse()?; StructAttr::SwiftName(name) } "already_declared" => StructAttr::AlreadyDeclared, _ => { move_input_cursor_to_next_comma(input); StructAttr::Error(StructAttrParseError::UnrecognizedAttribute(key)) } }; Ok(attr) } } impl<'a> SharedStructDeclarationParser<'a> { pub fn parse(self) -> Result<SharedStruct, syn::Error> { let item_struct = self.item_struct; let mut attribs = StructAttribs::default(); for attr in item_struct.attrs { let sections: ParsedAttribs = attr.parse_args()?; for attr in sections.0 { match attr { StructAttr::SwiftRepr((repr, lit_str)) => { attribs.swift_repr = Some((repr, lit_str)); } StructAttr::SwiftName(name) => { attribs.swift_name = Some(name); } StructAttr::Error(err) => match err { StructAttrParseError::InvalidSwiftRepr(val) => { self.errors.push(ParseError::StructInvalidSwiftRepr { swift_repr_attr_value: val.clone(), }); attribs.swift_repr = Some((StructSwiftRepr::Structure, val)); } StructAttrParseError::UnrecognizedAttribute(attribute) => { self.errors .push(ParseError::StructUnrecognizedAttribute { attribute }); } }, StructAttr::AlreadyDeclared => { attribs.already_declared = true; } }; } } let swift_repr = if item_struct.fields.len() == 0 { if let Some((swift_repr, lit_str)) = attribs.swift_repr { if swift_repr == StructSwiftRepr::Class { self.errors.push(ParseError::EmptyStructHasSwiftReprClass { struct_ident: item_struct.ident.clone(), swift_repr_attr_value: lit_str, }); } } StructSwiftRepr::Structure } else if let Some((swift_repr, _)) = attribs.swift_repr { swift_repr } else { self.errors.push(ParseError::StructMissingSwiftRepr { struct_ident: item_struct.ident.clone(), }); StructSwiftRepr::Structure }; let shared_struct = SharedStruct { name: item_struct.ident, swift_repr, fields: StructFields::from_syn_fields(item_struct.fields), swift_name: attribs.swift_name, already_declared: attribs.already_declared, }; Ok(shared_struct) } } fn move_input_cursor_to_next_comma(input: ParseStream) { if !input.peek(Token![,]) { let _ = input.step(|cursor| { let mut current_cursor = *cursor; while let Some((tt, next)) = current_cursor.token_tree() { match &tt { TokenTree::Punct(punct) if punct.as_char() == ',' => { return Ok(((), current_cursor)); } _ => current_cursor = next, } } Ok(((), current_cursor)) }); } } #[cfg(test)] mod tests { use super::*; use crate::test_utils::{parse_errors, parse_ok}; use quote::{quote, ToTokens}; #[test] fn parse_unit_struct() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { struct Foo; struct Bar(); struct Bazz {} } }; let module = parse_ok(tokens); assert_eq!(module.types.types().len(), 3); for (idx, name) in vec!["Foo", "Bar", "Bazz"].into_iter().enumerate() { let ty = &module.types.types()[idx].unwrap_shared_struct(); assert_eq!(ty.name, name); assert_eq!(ty.swift_repr, StructSwiftRepr::Structure); } } #[test] fn error_if_missing_swift_repr() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { struct Foo { bar: u8 } } }; let errors = parse_errors(tokens); assert_eq!(errors.len(), 1); match &errors[0] { ParseError::StructMissingSwiftRepr { struct_ident } => { assert_eq!(struct_ident, "Foo"); } _ => panic!(), }; } #[test] fn error_if_invalid_swift_repr() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(swift_repr = "an-invalid-value")] struct Foo { bar: u8 } } }; let errors = parse_errors(tokens); assert_eq!(errors.len(), 1); match &errors[0] { ParseError::StructInvalidSwiftRepr { swift_repr_attr_value, } => { assert_eq!(swift_repr_attr_value.value(), "an-invalid-value"); } _ => panic!(), }; } #[test] fn error_if_empty_struct_swift_repr_set_to_class() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(swift_repr = "class")] struct Foo; #[swift_bridge(swift_repr = "class")] struct Bar; #[swift_bridge(swift_repr = "class")] struct Buzz; } }; let errors = parse_errors(tokens); assert_eq!(errors.len(), 3); for (idx, struct_name) in vec!["Foo", "Bar", "Buzz"].into_iter().enumerate() { match &errors[idx] { ParseError::EmptyStructHasSwiftReprClass { struct_ident, swift_repr_attr_value, } => { assert_eq!(struct_ident, struct_name); assert_eq!(swift_repr_attr_value.value(), "class"); } _ => panic!(), }; } } #[test] fn parse_struct_with_named_u8_field() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(swift_repr = "struct")] struct Foo { bar: u8 } } }; let module = parse_ok(tokens); let ty = module.types.types()[0].unwrap_shared_struct(); match &ty.fields { StructFields::Named(fields) => { let field = &fields[0]; assert_eq!(field.name, "bar"); assert_eq!(field.ty.to_token_stream().to_string(), "u8"); } _ => panic!(), }; } #[test] fn parse_swift_name_attribute() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(swift_name = "FfiFoo")] struct Foo; } }; let module = parse_ok(tokens); let ty = module.types.types()[0].unwrap_shared_struct(); assert_eq!(ty.swift_name.as_ref().unwrap().value(), "FfiFoo"); } #[test] fn parses_multiple_struct_attributes() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(swift_name = "FfiFoo", swift_repr = "class")] struct Foo { fied: u8 } } }; let module = parse_ok(tokens); let ty = module.types.types()[0].unwrap_shared_struct(); assert_eq!(ty.swift_name.as_ref().unwrap().value(), "FfiFoo"); assert_eq!(ty.swift_repr, StructSwiftRepr::Class); } #[test] fn parses_struct_already_declared_attribute() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(already_declared, swift_repr = "struct")] struct SomeType; } }; let module = parse_ok(tokens); let ty = module.types.types()[0].unwrap_shared_struct(); assert!(ty.already_declared); } #[test] fn error_if_attribute_unrecognized() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(unrecognized, invalid_attribute = "hi", swift_repr = "struct")] struct SomeType; } }; let errors = parse_errors(tokens); assert_eq!(errors.len(), 2); match &errors[0] { ParseError::StructUnrecognizedAttribute { attribute } => { assert_eq!(&attribute.to_string(), "unrecognized"); } _ => panic!(), }; match &errors[1] { ParseError::StructUnrecognizedAttribute { attribute } => { assert_eq!(&attribute.to_string(), "invalid_attribute"); } _ => panic!(), }; } }
use crate::bridged_type::{SharedStruct, StructFields, StructSwiftRepr}; use crate::errors::{ParseError, ParseErrors}; use proc_macro2::{Ident, TokenTree}; use syn::parse::{Parse, ParseStream}; use syn::{ItemStruct, LitStr, Token}; pub(crate) struct SharedStructDeclarationParser<'a> { pub item_struct: ItemStruct, pub errors: &'a mut ParseErrors, } enum StructAttr { SwiftRepr((StructSwiftRepr, LitStr)), SwiftName(LitStr), Error(StructAttrParseError), AlreadyDeclared, } enum StructAttrParseError { InvalidSwiftRepr(LitStr), UnrecognizedAttribute(Ident), } #[derive(Default)] struct StructAttribs { swift_repr: Option<(StructSwiftRepr, LitStr)>, swift_name: Option<LitStr>, already_declared: bool, } struct ParsedAttribs(Vec<StructAttr>); impl Parse for ParsedAttribs { fn parse(input: ParseStream) -> syn::Result<Self> { if input.is_empty() { return Ok(ParsedAttribs(vec![])); } let opts = syn::punctuated::Punctuated::<_, syn::token::Comma>::parse_terminated(input)?; Ok(ParsedAttribs(opts.into_iter().collect())) } } impl Parse for StructAttr { fn parse(input: ParseStream) -> syn::Result<Self> { let key: Ident = input.parse()?; let attr = match key.to_string().as_str() { "swift_repr" => { input.parse::<Token![=]>()?; let repr: LitStr = input.parse()?; match repr.value().as_str() { "class" => StructAttr::SwiftRepr((StructSwiftRepr::Class, repr)), "struct" => StructAttr::SwiftRepr((StructSwiftRepr::Structure, repr)), _ => StructAttr::Error(StructAttrParseError::InvalidSwiftRepr(repr)), } } "swift_name" => { input.parse::<Token![=]>()?; let name = input.parse()?; StructAttr::SwiftName(name) } "already_declared" => StructAttr::AlreadyDeclared, _ => { move_input_cursor_to_next_comma(input); StructAttr::Error(StructAttrParseError::UnrecognizedAttribute(key)) } }; Ok(attr) } } impl<'a> SharedStructDeclarationParser<'a> { pub fn parse(self) -> Result<SharedStruct, syn::Error> { let item_struct = self.item_struct; let mut attribs = StructAttribs::default(); for attr in item_struct.attrs { let sections: ParsedAttribs = attr.parse_args()?; for attr in sections.0 { match attr { StructAttr::SwiftRepr((repr, lit_str)) => { attribs.swift_repr = Some((repr, lit_str)); } StructAttr::SwiftName(name) => { attribs.swift_name = Some(name); } StructAttr::Error(err) => match err { StructAttrParseError::InvalidSwiftRepr(val) => { self.errors.push(ParseError::StructInvalidSwiftRepr { swift_repr_attr_value: val.clone(), }); attribs.swift_repr = Some((StructSwiftRepr::Structure, val)); } StructAttrParseError::UnrecognizedAttribute(attribute) => { self.errors .push(ParseError::StructUnrecognizedAttribute { attribute }); } }, StructAttr::AlreadyDeclared => { attribs.already_declared = true; } }; } } let swift_repr = if item_struct.fields.len() == 0 { if let Some((swift_repr, lit_str)) = attribs.swift_repr { if swift_repr == StructSwiftRepr::Class { self.errors.push(ParseError::EmptyStructHasSwiftReprClass { struct_ident: item_struct.ident.clone(), swift_repr_attr_value: lit_str, }); } } StructSwiftRepr::Structure } else if let Some((swift_repr, _)) = attribs.swift_repr { swift_repr } else { self.errors.push(ParseError::StructMissingSwiftRepr { struct_ident: item_struct.ident.clone(), }); StructSwiftRepr::Structure }; let shared_struct = SharedStruct { name: item_struct.ident, swift_repr, fields: StructFields::from_syn_fields(item_struct.fields), swift_name: attribs.swift_name, already_declared: attribs.already_declared, }; Ok(shared_struct) } } fn move_input_cursor_to_next_comma(input: ParseStream) { if !input.peek(Token![,]) { let _ = input.step(|cursor| { let mut current_cursor = *cursor; while let Some((tt, next)) = current_cursor.token_tree() { match &tt { TokenTree::Punct(punct) if punct.as_char() == ',' => { return Ok(((), current_cursor)); } _ => current_cursor = next, } } Ok(((), current_cursor)) }); } } #[cfg(test)] mod tests { use super::*; use crate::test_utils::{parse_errors, parse_ok}; use quote::{quote, ToTokens}; #[test] fn parse_unit_struct() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { struct Foo; struct Bar(); struct Bazz {} } }; let module = parse_ok(tokens); assert_eq!(module.types.types().len(), 3); for (idx, name) in vec!["Foo", "Bar", "Bazz"].into_iter().enumerate() { let ty = &module.types.types()[idx].unwrap_shared_struct(); assert_eq!(ty.name, name); assert_eq!(ty.swift_repr, StructSwiftRepr::Structure); } } #[test] fn error_if_missing_swift_repr() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { struct Foo { bar: u8 } } }; let errors = parse_errors(tokens); assert_eq!(errors.len(), 1); match &errors[0] { ParseError::StructMissingSwiftRepr { struct_ident } => { assert_eq!(struct_ident, "Foo"); } _ => panic!(), }; } #[test] fn error_if_invalid_swift_repr() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(swift_repr = "an-invalid-value")] struct Foo { bar: u8 } } }; let errors = parse_errors(tokens); assert_eq!(errors.len(), 1); match &errors[0] { ParseError::StructInvalidSwiftRepr { swift_repr_attr_value, } => { assert_eq!(swift_repr_attr_value.value(), "an-invalid-value"); } _ => panic!(), }; } #[test] fn error_if_empty_struct_swift_repr_set_to_class() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(swift_repr = "class")] struct Foo; #[swift_bridge(swift_repr = "class")] struct Bar; #[swift_bridge(swift_repr = "class")] struct Buzz; } }; let errors = parse_errors(tokens); assert_eq!(errors.len(), 3); for (idx, struct_name) in vec!["Foo", "Bar", "Buzz"].into_iter().enumerate() { match &errors[idx] { ParseError::EmptyStructHasSwiftReprClass { struct_ident, swift_repr_attr_value, } => { assert_eq!(struct_ident, struct_name); assert_eq!(swift_repr_attr_value.value(), "class"); } _ => panic!(), }; } } #[test] fn parse_struct_with_named_u8_field() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(swift_repr = "struct")] struct Foo { bar: u8 } } }; let module = parse_ok(tokens); let ty = module.types.types()[0].unwrap_shared_struct(); match &ty.fields { StructFields::Named(fields) => { let field = &fields[0]; assert_eq!(field.name, "bar"); assert_eq!(field.ty.to_token_stream().to_string(), "u8"); } _ => panic!(), }; } #[test] fn parse_swift_name_attribute() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(swift_name = "FfiFoo")] struct Foo; } }; let module = parse_ok(tokens);
#[test] fn parses_multiple_struct_attributes() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(swift_name = "FfiFoo", swift_repr = "class")] struct Foo { fied: u8 } } }; let module = parse_ok(tokens); let ty = module.types.types()[0].unwrap_shared_struct(); assert_eq!(ty.swift_name.as_ref().unwrap().value(), "FfiFoo"); assert_eq!(ty.swift_repr, StructSwiftRepr::Class); } #[test] fn parses_struct_already_declared_attribute() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(already_declared, swift_repr = "struct")] struct SomeType; } }; let module = parse_ok(tokens); let ty = module.types.types()[0].unwrap_shared_struct(); assert!(ty.already_declared); } #[test] fn error_if_attribute_unrecognized() { let tokens = quote! { #[swift_bridge::bridge] mod ffi { #[swift_bridge(unrecognized, invalid_attribute = "hi", swift_repr = "struct")] struct SomeType; } }; let errors = parse_errors(tokens); assert_eq!(errors.len(), 2); match &errors[0] { ParseError::StructUnrecognizedAttribute { attribute } => { assert_eq!(&attribute.to_string(), "unrecognized"); } _ => panic!(), }; match &errors[1] { ParseError::StructUnrecognizedAttribute { attribute } => { assert_eq!(&attribute.to_string(), "invalid_attribute"); } _ => panic!(), }; } }
let ty = module.types.types()[0].unwrap_shared_struct(); assert_eq!(ty.swift_name.as_ref().unwrap().value(), "FfiFoo"); }
function_block-function_prefix_line
[ { "content": "fn swift_calls_rust_struct_with_no_fields(arg: ffi::StructWithNoFields) -> ffi::StructWithNoFields {\n\n arg\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/shared_types/shared_struct.rs", "rank": 0, "score": 256582.61491422064 }, { "content": "fn rust_echo_mut_u8(ptr: *mut u8) -> *mut u8 {\n\n ptr\n\n}\n", "file_path": "crates/swift-integration-tests/src/pointer.rs", "rank": 1, "score": 251422.7805392816 }, { "content": "fn rust_fn_return_opaque_swift_type() -> ffi::SomeSwiftType {\n\n let some_swift_type = ffi::SomeSwiftType::new();\n\n\n\n some_swift_type.set_text(\"I was initialized from Rust\");\n\n\n\n some_swift_type\n\n}\n", "file_path": "crates/swift-integration-tests/src/rust_function_uses_opaque_swift_type.rs", "rank": 3, "score": 222615.91317158923 }, { "content": "fn swift_calls_rust_struct_repr_struct_one_primitive_field(\n\n arg: ffi::StructReprStructWithOnePrimitiveField,\n\n) -> ffi::StructReprStructWithOnePrimitiveField {\n\n arg\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/shared_types/shared_struct.rs", "rank": 4, "score": 213955.16241024653 }, { "content": "fn get_struct() -> SomeType {\n\n SomeType\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/function_attributes/into_return_type.rs", "rank": 5, "score": 213431.66165752622 }, { "content": "fn reflect_enum_with_no_data(arg: ffi::EnumWithNoData) -> ffi::EnumWithNoData {\n\n arg\n\n}\n", "file_path": "crates/swift-integration-tests/src/shared_types/shared_enum.rs", "rank": 6, "score": 211208.6692193566 }, { "content": "fn extern_rust_struct_rename_3(arg: StructName3) -> StructName3 {\n\n arg\n\n}\n", "file_path": "crates/swift-integration-tests/src/struct_attributes/swift_name.rs", "rank": 7, "score": 209008.24207591917 }, { "content": "fn extern_rust_struct_rename_1(arg: StructName1) -> StructName1 {\n\n arg\n\n}\n", "file_path": "crates/swift-integration-tests/src/struct_attributes/swift_name.rs", "rank": 8, "score": 209008.24207591917 }, { "content": "fn extern_rust_struct_rename_2(arg: StructName2) -> StructName2 {\n\n arg\n\n}\n", "file_path": "crates/swift-integration-tests/src/struct_attributes/swift_name.rs", "rank": 9, "score": 209008.24207591917 }, { "content": "class RustFnReturnOpaqueSwiftTypeTests: XCTestCase {\n\n\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n\n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n\n\n func testRustFnReturnOpaqueSwiftType() throws {\n\n let someSwiftType = rust_fn_return_opaque_swift_type()\n\n XCTAssertEqual(someSwiftType.text, \"I was initialized from Rust\")\n\n }\n\n}\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/RustFnUsesOpaqueSwiftTypeTests.swift", "rank": 10, "score": 207535.0220643277 }, { "content": "fn get_already_declared_struct() -> SomeType {\n\n SomeType\n\n}\n\n\n\nimpl Into<SomeType> for AnotherType {\n\n fn into(self) -> SomeType {\n\n SomeType\n\n }\n\n}\n\n\n\nimpl Into<ffi::SomeStruct> for SomeType {\n\n fn into(self) -> ffi::SomeStruct {\n\n ffi::SomeStruct\n\n }\n\n}\n\nimpl Into<ffi2::AlreadyDeclaredStruct> for SomeType {\n\n fn into(self) -> ffi2::AlreadyDeclaredStruct {\n\n ffi2::AlreadyDeclaredStruct\n\n }\n\n}\n", "file_path": "crates/swift-integration-tests/src/function_attributes/into_return_type.rs", "rank": 11, "score": 206126.9486862572 }, { "content": "/// Tests for attributes on shared structs types.\n\nclass SharedStructAttributeTests: XCTestCase {\n\n\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n\n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n\n\n /// Verify that we can call a function that uses a type that was already declared in a different bridge module.\n\n /// See crates/swift-integration-tests/src/struct_attributes/already_declared.rs\n\n func testSharedStructAlreadyDeclaredCallInitializer() throws {\n\n let val = AlreadyDeclaredStructTest(field: 123)\n\n \n\n XCTAssertEqual(\n\n reflect_already_declared_struct(val).field,\n\n 123\n\n )\n\n }\n\n}\n\n\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/SharedStructAttributeTests.swift", "rank": 12, "score": 200266.47701801657 }, { "content": "/// Converts both token streams to strings, removes all of the whitespace then checks that the outer\n\n/// token stream contains the inner one.\n\npub fn assert_tokens_contain(outer: &TokenStream, inner: &TokenStream) {\n\n let outer_string = outer.to_string();\n\n let outer_string = outer_string.replace(\" \", \"\").replace(\"\\n\", \"\");\n\n\n\n let inner_string = inner.to_string();\n\n let inner_string = inner_string.replace(\" \", \"\").replace(\"\\n\", \"\");\n\n\n\n let is_contained = outer_string.contains(&inner_string);\n\n\n\n assert!(\n\n is_contained,\n\n r#\"\n\nOuter tokens do not contain the inner tokens. \n\n\n\nOuter Tokens:\n\n{}\n\n\n\nInner Tokens:\n\n{}\n\n\"#,\n\n outer.to_string(),\n\n inner.to_string()\n\n )\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/test_utils.rs", "rank": 14, "score": 190823.68153787375 }, { "content": "/// Converts both token streams to strings, removes all of the whitespace then checks that the outer\n\n/// token stream does not contain the inner one.\n\npub fn assert_tokens_do_not_contain(outer: &TokenStream, inner: &TokenStream) {\n\n let outer_string = outer.to_string();\n\n let outer_string = outer_string.replace(\" \", \"\").replace(\"\\n\", \"\");\n\n\n\n let inner_string = inner.to_string();\n\n let inner_string = inner_string.replace(\" \", \"\").replace(\"\\n\", \"\");\n\n\n\n let is_contained = outer_string.contains(&inner_string);\n\n\n\n assert!(\n\n !is_contained,\n\n r#\"\n\nOuter tokens do not contain the inner tokens. \n\n\n\nOuter Tokens:\n\n{}\n\n\n\nInner Tokens:\n\n{}\n\n\"#,\n\n outer.to_string(),\n\n inner.to_string()\n\n )\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/test_utils.rs", "rank": 15, "score": 190823.68153787375 }, { "content": "pub fn assert_tokens_eq(left: &TokenStream, right: &TokenStream) {\n\n assert_eq!(\n\n token_stream_to_vec(&left),\n\n token_stream_to_vec(&right),\n\n r#\"\n\nLeft Tokens:\n\n{}\n\n\n\nRight Tokens:\n\n{}\n\n\"#,\n\n left.to_string(),\n\n right.to_string()\n\n )\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/test_utils.rs", "rank": 16, "score": 190818.72652705235 }, { "content": "class SwiftFnUsesOpaqueRustTypeTests: XCTestCase {\n\n\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n\n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n\n\n func testRustFnCallsWithFnWithOwnedOpaqueArg() throws {\n\n test_call_swift_fn_with_owned_opaque_rust_arg()\n\n }\n\n}\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/SwiftFnUsesOpaqueRustTypeTests.swift", "rank": 17, "score": 190540.21287933845 }, { "content": "fn rust_negate_bool(start: bool) -> bool {\n\n !start\n\n}\n", "file_path": "crates/swift-integration-tests/src/bool.rs", "rank": 19, "score": 185288.63731382025 }, { "content": "struct AnotherType;\n\n\n", "file_path": "crates/swift-integration-tests/src/function_attributes/into_return_type.rs", "rank": 20, "score": 183631.19323171084 }, { "content": "fn reflect_already_declared_struct(arg: AlreadyDeclaredStructTest) -> AlreadyDeclaredStructTest {\n\n arg\n\n}\n", "file_path": "crates/swift-integration-tests/src/struct_attributes/already_declared.rs", "rank": 21, "score": 183536.48402076453 }, { "content": "func rust_calls_struct_repr_struct_one_primitive_field(arg: StructReprStructWithOnePrimitiveField) -> StructReprStructWithOnePrimitiveField {\n\n arg\n\n}\n\n\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunner/SharedStruct.swift", "rank": 22, "score": 183115.7491405269 }, { "content": "fn another_function() {}\n", "file_path": "crates/swift-integration-tests/src/function_attributes/rust_name.rs", "rank": 23, "score": 182287.2781053007 }, { "content": "fn rust_reflect_struct_with_option_fields(\n\n arg: ffi::StructWithOptionFields,\n\n) -> ffi::StructWithOptionFields {\n\n arg\n\n}\n", "file_path": "crates/swift-integration-tests/src/option.rs", "rank": 24, "score": 181821.8282082021 }, { "content": "#[cfg(feature = \"this_is_enabled\")]\n\nfn conditionally_exposed_fn() -> u8 {\n\n 123\n\n}\n", "file_path": "crates/swift-integration-tests/src/conditional_compilation.rs", "rank": 25, "score": 179165.14652559347 }, { "content": "fn get_str_value_return_with() -> &'static str {\n\n \"123\"\n\n}\n\n\n\nmod some_module {\n\n pub fn convert_str_to_u32(val: &str) -> u32 {\n\n val.parse().unwrap()\n\n }\n\n}\n", "file_path": "crates/swift-integration-tests/src/function_attributes/return_with.rs", "rank": 26, "score": 178822.5310822731 }, { "content": "struct __swift_bridge__$FfiSomeType __swift_bridge__$some_function(struct __swift_bridge__$FfiSomeType arg);\n\n\"#,\n\n )\n\n }\n\n\n\n #[test]\n\n fn already_declared_struct() {\n\n CodegenTest {\n\n bridge_module: bridge_module_tokens().into(),\n\n expected_rust_tokens: expected_rust_tokens(),\n\n expected_swift_code: expected_swift_code(),\n\n expected_c_header: expected_c_header(),\n\n }\n\n .test();\n\n }\n\n}\n", "file_path": "crates/swift-bridge-ir/src/codegen/codegen_tests/already_declared_attribute_codegen_tests.rs", "rank": 28, "score": 176621.92015025235 }, { "content": "/// Executes the correct function depending on the cli input\n\npub fn handle_matches(matches: ArgMatches) {\n\n match matches.subcommand_name() {\n\n Some(\"create-package\") => {\n\n handle_create_package(matches.subcommand_matches(\"create-package\").unwrap())\n\n }\n\n _ => unreachable!(\"No subcommand or unknown subcommand given\"), // Shouldn't happen\n\n }\n\n}\n\n\n", "file_path": "crates/swift-bridge-cli/src/clap_exec.rs", "rank": 29, "score": 172864.56749020197 }, { "content": "fn get_another_type() -> AnotherType {\n\n AnotherType\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/function_attributes/into_return_type.rs", "rank": 30, "score": 170085.6432306846 }, { "content": "fn rust_echo_mut_c_void(ptr: *mut c_void) -> *mut c_void {\n\n ptr\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/pointer.rs", "rank": 31, "score": 169418.79917781963 }, { "content": "fn rust_echo_const_u8(ptr: *const u8) -> *const u8 {\n\n ptr\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/pointer.rs", "rank": 32, "score": 169374.12800142457 }, { "content": "fn run_bool_test() {\n\n assert_eq!(ffi::swift_negate_bool(true), false);\n\n assert_eq!(ffi::swift_negate_bool(false), true);\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/bool.rs", "rank": 33, "score": 169217.82551051988 }, { "content": "class SharedEnumTests: XCTestCase {\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n \n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n \n\n func testEnumWithNoData() {\n\n let enumWithNoData1 = EnumWithNoData.Variant1\n\n let enumWithNoData2 = EnumWithNoData.Variant2\n\n \n\n let reflected1 = reflect_enum_with_no_data(enumWithNoData1)\n\n let reflected2 = reflect_enum_with_no_data(enumWithNoData2)\n\n \n\n switch (reflected1, reflected2) {\n\n case (.Variant1, .Variant2):\n\n break;\n\n default:\n\n fatalError()\n\n }\n\n }\n\n}\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/SharedEnumTests.swift", "rank": 34, "score": 160425.48820206075 }, { "content": "class SharedStructTests: XCTestCase {\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n \n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n \n\n /// Run all of the tests that are defined on the Rust side in\n\n /// crates/swift-integration-tests/src/shared_types/shared_struct.rs\n\n func testRust() {\n\n test_rust_calls_swift()\n\n }\n\n \n\n func testStructWithNoFields() {\n\n let _: StructWithNoFields = swift_calls_rust_struct_with_no_fields(StructWithNoFields())\n\n }\n\n \n\n func testStructReprStructWithOnePrimitiveField() {\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/SharedStructTests.swift", "rank": 35, "score": 159631.87255294254 }, { "content": "/// We declare a few shared structs that are renamed using the `swift_name` attribute.\n\n/// We then use them as function arg and return types.\n\n///\n\n/// Related: crates/swift-bridge-ir/src/codegen/codegen_tests/shared_struct_codegen_tests.rs\n\n/// - shared_struct_swift_name_attribute\n\n#[swift_bridge::bridge]\n\nmod ffi {\n\n #[swift_bridge(swift_name = \"StructRename1\")]\n\n struct StructName1;\n\n\n\n #[swift_bridge(swift_name = \"StructRename2\", swift_repr = \"struct\")]\n\n struct StructName2 {\n\n field: u8,\n\n }\n\n\n\n #[swift_bridge(swift_name = \"StructRename3\", swift_repr = \"struct\")]\n\n struct StructName3(u8);\n\n\n\n extern \"Rust\" {\n\n fn extern_rust_struct_rename_1(arg: StructName1) -> StructName1;\n", "file_path": "crates/swift-integration-tests/src/struct_attributes/swift_name.rs", "rank": 36, "score": 159613.19917186326 }, { "content": " fn extern_rust_struct_rename_2(arg: StructName2) -> StructName2;\n\n fn extern_rust_struct_rename_3(arg: StructName3) -> StructName3;\n\n }\n\n\n\n extern \"Swift\" {\n\n fn extern_swift_struct_rename_1(arg: StructName1) -> StructName1;\n\n fn extern_swift_struct_rename_2(arg: StructName2) -> StructName2;\n\n fn extern_swift_struct_rename_3(arg: StructName3) -> StructName3;\n\n }\n\n}\n\n\n\nuse ffi::{StructName1, StructName2, StructName3};\n\n\n", "file_path": "crates/swift-integration-tests/src/struct_attributes/swift_name.rs", "rank": 37, "score": 159591.54653207958 }, { "content": "fn run_opaque_swift_class_tests() {\n\n use ffi::ASwiftStack;\n\n use std::ptr::slice_from_raw_parts;\n\n\n\n let mut stack = ASwiftStack::new();\n\n\n\n stack.push(5);\n\n stack.push(10);\n\n\n\n assert_eq!(stack.len(), 2);\n\n\n\n let ptr = stack.as_ptr();\n\n let len = stack.len();\n\n\n\n let vals: &[u8] = unsafe { &*slice_from_raw_parts(ptr, len) };\n\n\n\n assert_eq!(vals, &[5, 10]);\n\n assert_eq!(vals, stack.as_slice());\n\n\n\n stack.pop();\n\n assert_eq!(stack.len(), 1);\n\n}\n", "file_path": "crates/swift-integration-tests/src/import_opaque_swift_class.rs", "rank": 38, "score": 158764.45963173255 }, { "content": "/// Tests for attributes on opaque types.\n\nclass OpaqueTypeAttributeTests: XCTestCase {\n\n\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n\n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n\n\n /// Verify that we can call an initializers, methods and associated functions that were declared in a different module from\n\n /// where the opaque Rust type was defined.\n\n /// This ensures that our code generation properly generates Swift convenience initializers inside of class extensions.\n\n /// See crates/swift-integration-tests/src/type_attributes/already_declared.rs\n\n func testExternRustAlreadyDeclaredCallInitializer() throws {\n\n let val = AlreadyDeclaredTypeTest()\n\n \n\n XCTAssert(val.a_ref_method())\n\n XCTAssert(val.a_ref_mut_method())\n\n XCTAssert(val.an_owned_method())\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/OpaqueTypeAttributeTests.swift", "rank": 39, "score": 157489.49920516904 }, { "content": "/// Tests the #[swift_bridge(Identifiable)] attribute.\n\nclass FunctionAttributeIdentifiableTests: XCTestCase {\n\n\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n\n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n\n\n /// Verify that the `swift_bridge(Identifiable)` attribute works.\n\n func testIdentifiable() throws {\n\n XCTAssertEqual(verifyIsIdentifiable(IdentifiableFnNamedId()).id(), 123)\n\n XCTAssertEqual(IdentifiableFnNotNamedId().id, 123)\n\n \n\n XCTAssertEqual(verifyIsIdentifiable(IdentifiableU8()).id(), 123)\n\n XCTAssertEqual(verifyIsIdentifiable(IdentifiableI8()).id(), 123)\n\n \n\n \"hello world\".toRustStr({rustStr in\n\n XCTAssertEqual(verifyIsIdentifiable(IdentifiableStr()).id(), rustStr)\n\n })\n\n }\n\n}\n\n\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/FunctionAttributeIdentifiableTests.swift", "rank": 40, "score": 157489.37819333724 }, { "content": "struct __swift_bridge__$SomeEnum __swift_bridge__$some_function(struct __swift_bridge__$SomeEnum arg);\n\n \"#,\n\n )\n\n }\n\n\n\n #[test]\n\n fn using_enum_in_extern_rust_fn() {\n\n CodegenTest {\n\n bridge_module: bridge_module_tokens().into(),\n\n expected_rust_tokens: expected_rust_tokens(),\n\n expected_swift_code: expected_swift_code(),\n\n expected_c_header: expected_c_header(),\n\n }\n\n .test();\n\n }\n\n}\n\n\n\n/// Verify that we can use `Option<Enum>` as Rust function arg and return type.\n\nmod extern_rust_option_enum {\n\n use super::*;\n", "file_path": "crates/swift-bridge-ir/src/codegen/codegen_tests/shared_enum_codegen_tests.rs", "rank": 41, "score": 157207.54832396528 }, { "content": "class OpaqueSwiftStructTests: XCTestCase {\n\n \n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n \n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n \n\n /// Run the Rust tests that create and use an opaque Swift struct.\n\n func testOpaqueSwiftStruct() throws {\n\n run_opaque_swift_class_tests()\n\n }\n\n}\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/OpaqueSwiftStructTests.swift", "rank": 42, "score": 156942.00624328834 }, { "content": "class OpaqueRustStructTests: XCTestCase {\n\n \n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n \n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n \n\n /// Verify that we can create and work with an opaque Rust struct via a generated class\n\n func testOpaqueRustStruct() throws {\n\n let stack = ARustStack()\n\n \n\n XCTAssertEqual(stack.len(), 0)\n\n stack.push(5)\n\n stack.push(10)\n\n XCTAssertEqual(stack.len(), 2)\n\n \n\n XCTAssertEqual(stack.as_slice()[0], 5)\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/OpaqueRustStructTests.swift", "rank": 43, "score": 156942.00624328834 }, { "content": "/// Verify that we can pass and return u8 pointers across the boundary.\n\nfn rust_run_u8_pointer_tests() {\n\n let num = &5u8;\n\n let num_mut = &mut 10u8;\n\n\n\n let ptr = num as *const u8;\n\n let ptr_mut = num_mut as *mut u8;\n\n\n\n let ptr_copy = ffi::swift_echo_const_u8(ptr);\n\n let ptr_mut_copy = ffi::swift_echo_mut_u8(ptr_mut);\n\n\n\n assert_eq!(unsafe { *ptr_copy }, 5);\n\n\n\n assert_eq!(unsafe { *ptr_mut_copy }, 10);\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/pointer.rs", "rank": 44, "score": 153898.60114877808 }, { "content": "struct __private__FfiSlice __swift_bridge__$bar(void);\n\n \"#;\n\n\n\n let module = parse_ok(tokens);\n\n assert_eq!(\n\n module\n\n .generate_c_header_inner(&CodegenConfig::no_features_enabled())\n\n .trim(),\n\n expected.trim()\n\n );\n\n }\n\n\n\n fn parse_ok(tokens: TokenStream) -> SwiftBridgeModule {\n\n let module_and_errors: SwiftBridgeModuleAndErrors = syn::parse2(tokens).unwrap();\n\n module_and_errors.module\n\n }\n\n\n\n /// Verify that we generate a proper header for a Rust function that returns an owned Swift\n\n /// type.\n\n #[test]\n", "file_path": "crates/swift-bridge-ir/src/codegen/generate_c_header.rs", "rank": 45, "score": 153106.01622217678 }, { "content": "struct __private__FfiSlice __swift_bridge__$foo(void);\n", "file_path": "crates/swift-bridge-ir/src/codegen/generate_c_header.rs", "rank": 46, "score": 153025.2383369963 }, { "content": "struct __swift_bridge__$Option$SomeEnum __swift_bridge__$some_function(struct __swift_bridge__$Option$SomeEnum arg);\n\n \"#,\n\n )\n\n }\n\n\n\n #[test]\n\n fn option_enum() {\n\n CodegenTest {\n\n bridge_module: bridge_module_tokens().into(),\n\n expected_rust_tokens: expected_rust_tokens(),\n\n expected_swift_code: expected_swift_code(),\n\n expected_c_header: expected_c_header(),\n\n }\n\n .test();\n\n }\n\n}\n", "file_path": "crates/swift-bridge-ir/src/codegen/codegen_tests/shared_enum_codegen_tests.rs", "rank": 47, "score": 152195.0492294357 }, { "content": "fn test_call_swift_fn_with_owned_opaque_rust_arg() {\n\n let some_rust_type = SomeRustType::new(5);\n\n let counter = some_rust_type.counter.clone();\n\n\n\n // Unwrap fails since there is a strong reference to the Rc held in `some_rust_type`\n\n let counter = Rc::try_unwrap(counter).err().unwrap();\n\n\n\n ffi::increment_some_owned_opaque_rust_type(some_rust_type, 10);\n\n\n\n // Unwrap succeeds since Swift freed the owned `some_rust_type` along with it's Rc at the end of\n\n // the `increment_some_rust_type` function.\n\n let counter = Rc::try_unwrap(counter).unwrap();\n\n\n\n assert_eq!(counter.take(), 15);\n\n}\n\n\n\npub struct SomeRustType {\n\n counter: Rc<RefCell<u32>>,\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/swift_function_uses_opaque_rust_type.rs", "rank": 48, "score": 149820.46485893385 }, { "content": "struct TypeA;\n\n\n", "file_path": "crates/swift-integration-tests/src/function_attributes/args_into.rs", "rank": 49, "score": 149406.95711693572 }, { "content": "fn test_rust_calls_swift() {\n\n self::tests::test_rust_calls_swift_struct_with_no_fields();\n\n self::tests::test_rust_calls_struct_repr_struct_one_primitive_field();\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/shared_types/shared_struct.rs", "rank": 50, "score": 148714.78266337916 }, { "content": "enum TypeB {\n\n Foo(u8),\n\n}\n\n\n\nimpl Into<TypeA> for ffi::SomeStruct {\n\n fn into(self) -> TypeA {\n\n TypeA\n\n }\n\n}\n\n\n\nimpl Into<TypeB> for ffi::AnotherStruct {\n\n fn into(self) -> TypeB {\n\n TypeB::Foo(self.foo)\n\n }\n\n}\n", "file_path": "crates/swift-integration-tests/src/function_attributes/args_into.rs", "rank": 51, "score": 148475.38089602624 }, { "content": "struct BridgeModule {\n\n /// The bridge module's tokens\n\n pub tokens: TokenStream,\n\n /// A mock representation of the features that are enabled for the crate that contains the\n\n /// bridge module.\n\n pub enabled_crate_features: Vec<&'static str>,\n\n}\n\n\n\nimpl From<TokenStream> for BridgeModule {\n\n fn from(tokens: TokenStream) -> Self {\n\n BridgeModule {\n\n tokens,\n\n enabled_crate_features: vec![],\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/codegen/codegen_tests.rs", "rank": 52, "score": 147279.0277375686 }, { "content": "fn swift_calls_rust_tuple_struct(\n\n arg: ffi::StructReprStructTupleStruct,\n\n) -> ffi::StructReprStructTupleStruct {\n\n arg\n\n}\n\n\n\n#[deny(unused)]\n\nmod tests {\n\n use super::ffi;\n\n\n\n pub(super) fn test_rust_calls_swift_struct_with_no_fields() {\n\n let _: ffi::StructWithNoFields =\n\n ffi::rust_calls_swift_struct_with_no_fields(ffi::StructWithNoFields);\n\n }\n\n\n\n pub(super) fn test_rust_calls_struct_repr_struct_one_primitive_field() {\n\n let arg = ffi::StructReprStructWithOnePrimitiveField { named_field: 10 };\n\n\n\n let val = ffi::rust_calls_struct_repr_struct_one_primitive_field(arg);\n\n\n\n assert_eq!(val.named_field, 10);\n\n }\n\n}\n", "file_path": "crates/swift-integration-tests/src/shared_types/shared_struct.rs", "rank": 53, "score": 147128.92465617423 }, { "content": "enum ExpectedRustTokens {\n\n /// The generated Rust token stream matches the provided stream.\n\n #[allow(unused)]\n\n Exact(TokenStream),\n\n /// The generated Rust tokens stream contains the provided stream.\n\n Contains(TokenStream),\n\n /// The generated Rust tokens stream does not contain the provided stream.\n\n DoesNotContain(TokenStream),\n\n /// The generated Rust tokens stream contains the provided stream.\n\n ContainsMany(Vec<TokenStream>),\n\n /// Skip testing Rust tokens\n\n // We use a variant instead of Option<ExpectRustTokens> as not to make it seem like no Rust\n\n // tokens got generated.\n\n SkipTest,\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/codegen/codegen_tests.rs", "rank": 54, "score": 146150.16676575225 }, { "content": "#[doc(hidden)]\n\npub trait SharedStruct {\n\n /// The FFI friendly representation of this struct.\n\n ///\n\n /// ```\n\n /// struct MyStruct {\n\n /// field: &'static str\n\n /// }\n\n /// // This is the auto generated ffi representation.\n\n /// #[repr(C)]\n\n /// struct __swift_bridge__MyStruct {\n\n /// field: swift_bridge::string::RustStr\n\n /// }\n\n /// ```\n\n type FfiRepr;\n\n}\n\n\n\n// The code generation automatically implements this for all shared enum.\n\n// This trait is private and should not be used outside of swift-bridge.\n", "file_path": "src/lib.rs", "rank": 55, "score": 144497.49395724223 }, { "content": "func some_function(_ some_arg: SomeStruct, _ another_arg: AnotherStruct, _ arg3: UInt8) {\n\n __swift_bridge__$some_function(some_arg.intoFfiRepr(), another_arg.intoFfiRepr(), arg3)\n\n}\n\n\"#,\n\n )\n\n }\n\n\n\n fn expected_c_header() -> ExpectedCHeader {\n\n ExpectedCHeader::SkipTest\n\n }\n\n\n\n #[test]\n\n fn function_args_into_attribute() {\n\n CodegenTest {\n\n bridge_module: bridge_module_tokens().into(),\n\n expected_rust_tokens: expected_rust_tokens(),\n\n expected_swift_code: expected_swift_code(),\n\n expected_c_header: expected_c_header(),\n\n }\n\n .test();\n", "file_path": "crates/swift-bridge-ir/src/codegen/codegen_tests/function_attribute_codegen_tests.rs", "rank": 56, "score": 143784.82254799575 }, { "content": "struct __private__OptionF32 __swift_bridge__$some_function(struct __private__OptionU8 arg);\n\n \"#,\n\n );\n\n\n\n #[test]\n\n fn extern_rust_fn_option_primitive() {\n\n CodegenTest {\n\n bridge_module: bridge_module_tokens().into(),\n\n expected_rust_tokens: expected_rust_tokens(),\n\n expected_swift_code: expected_swift_code(),\n\n expected_c_header: EXPECTED_C_HEADER,\n\n }\n\n .test();\n\n }\n\n}\n\n\n\n/// Test code generation for Rust function that accepts and returns Option<String>.\n\nmod extern_rust_fn_option_string {\n\n use super::*;\n\n\n", "file_path": "crates/swift-bridge-ir/src/codegen/codegen_tests/option_codegen_tests.rs", "rank": 57, "score": 143654.20919090073 }, { "content": "func rust_calls_swift_struct_with_no_fields(arg: StructWithNoFields) -> StructWithNoFields {\n\n arg\n\n}\n\n\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunner/SharedStruct.swift", "rank": 58, "score": 143590.1861900723 }, { "content": "fn rust_reflect_option_enum_with_no_data(\n\n arg: Option<ffi::OptionEnumWithNoData>,\n\n) -> Option<ffi::OptionEnumWithNoData> {\n\n arg\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/option.rs", "rank": 59, "score": 143358.3745084728 }, { "content": "fn rust_reflect_option_struct_with_no_data(\n\n arg: Option<ffi::OptionStruct>,\n\n) -> Option<ffi::OptionStruct> {\n\n arg\n\n}\n", "file_path": "crates/swift-integration-tests/src/option.rs", "rank": 60, "score": 142664.54535758507 }, { "content": "fn token_stream_to_vec(tokens: &TokenStream) -> Vec<String> {\n\n tokens\n\n .clone()\n\n .into_iter()\n\n .map(|t| t.to_string().trim().to_string())\n\n .collect()\n\n}\n", "file_path": "crates/swift-bridge-ir/src/test_utils.rs", "rank": 61, "score": 141688.0059928074 }, { "content": "/// Trims both generated and expected.\n\npub fn assert_trimmed_generated_does_not_contain_trimmed_expected(generated: &str, expected: &str) {\n\n assert!(\n\n !generated.trim().contains(&expected.trim()),\n\n r#\"Expected was contained by generated.\n\nGenerated:\n\n{}\n\nExpected:\n\n{}\"#,\n\n generated.trim(),\n\n expected.trim()\n\n );\n\n}\n\n\n\npub(crate) fn parse_ok(tokens: TokenStream) -> SwiftBridgeModule {\n\n let module_and_errors: SwiftBridgeModuleAndErrors = syn::parse2(tokens).unwrap();\n\n module_and_errors.module\n\n}\n\n\n\npub(crate) fn parse_errors(tokens: TokenStream) -> ParseErrors {\n\n let parsed: SwiftBridgeModuleAndErrors = syn::parse2(tokens).unwrap();\n\n parsed.errors\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/test_utils.rs", "rank": 62, "score": 137139.37480815742 }, { "content": "/// Trims both generated and expected.\n\npub fn assert_trimmed_generated_contains_trimmed_expected(generated: &str, expected: &str) {\n\n assert!(\n\n generated.trim().contains(&expected.trim()),\n\n r#\"Expected was not contained by generated.\n\nGenerated:\n\n{}\n\nExpected:\n\n{}\"#,\n\n generated.trim(),\n\n expected.trim()\n\n );\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/test_utils.rs", "rank": 63, "score": 137139.37480815742 }, { "content": "/// Trims both generated and expected.\n\npub fn assert_trimmed_generated_equals_trimmed_expected(generated: &str, expected: &str) {\n\n assert_eq!(\n\n generated.trim(),\n\n expected.trim(),\n\n r#\"Expected did not equal generated.\n\nGenerated:\n\n{}\n\nExpected:\n\n{}\"#,\n\n generated.trim(),\n\n expected.trim()\n\n );\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/test_utils.rs", "rank": 64, "score": 137139.37480815742 }, { "content": "#[proc_macro_attribute]\n\npub fn bridge(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let args = parse_macro_input!(args as SwiftBridgeModuleAttrs);\n\n let mut module = parse_macro_input!(input as SwiftBridgeModule);\n\n\n\n for arg in args.attributes {\n\n match arg {\n\n SwiftBridgeModuleAttr::SwiftBridgePath(path) => {\n\n module.set_swift_bridge_path(path);\n\n }\n\n }\n\n }\n\n\n\n let tokens = quote! {\n\n #module\n\n };\n\n tokens.into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod ui_tests {\n\n #[test]\n\n fn ui() {\n\n let t = trybuild::TestCases::new();\n\n t.compile_fail(\"tests/ui/*.rs\");\n\n }\n\n}\n", "file_path": "crates/swift-bridge-macro/src/lib.rs", "rank": 65, "score": 136541.60548309045 }, { "content": "fn pat_ty_type_reference(pat_ty: &PatType) -> Option<&TypeReference> {\n\n match pat_ty.ty.deref() {\n\n Type::Reference(reference) => Some(reference),\n\n _ => None,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::test_utils::{assert_tokens_eq, parse_ok};\n\n\n\n /// Verify that we generate a Rust associated function that calls a Swift static class method.\n\n #[test]\n\n fn static_class_method() {\n\n let start = quote! {\n\n mod foo {\n\n extern \"Swift\" {\n\n type Foo;\n\n\n", "file_path": "crates/swift-bridge-ir/src/parsed_extern_fn/to_rust_impl_call_swift.rs", "rank": 66, "score": 136512.3381841155 }, { "content": "/// Parse rust sources files for `#\\[swift_bridge::bridge\\]` headers and generate the corresponding\n\n/// Swift files.\n\npub fn parse_bridges(\n\n rust_source_files: impl IntoIterator<Item = impl AsRef<Path>>,\n\n) -> GeneratedCode {\n\n let mut generated_code = GeneratedCode::new();\n\n\n\n for rust_file in rust_source_files.into_iter() {\n\n let rust_file: &Path = rust_file.as_ref();\n\n\n\n let file = std::fs::read_to_string(rust_file).unwrap();\n\n let gen = match parse_file(&file) {\n\n Ok(generated) => generated,\n\n Err(e) => {\n\n // TODO: Return an error...\n\n panic!(\n\n r#\"\n\nError while parsing {:?}\n\n{}\n\n\"#,\n\n rust_file, e\n\n )\n", "file_path": "crates/swift-bridge-build/src/lib.rs", "rank": 67, "score": 134753.1751155225 }, { "content": "fn test_args_into(_some_arg: TypeA, _another_arg: TypeB) {}\n\n\n", "file_path": "crates/swift-integration-tests/src/function_attributes/args_into.rs", "rank": 68, "score": 133532.49404681876 }, { "content": "// self: &Foo would return &Foo\n\n// _foo: &Foo would not return &Foo\n\nfn pat_ty_type_reference_if_arg_self(fn_arg: &FnArg) -> Option<&TypeReference> {\n\n match fn_arg {\n\n FnArg::Typed(pat_ty) if pat_type_pat_is_self(pat_ty) => {\n\n if let Some(reference) = pat_ty_type_reference(pat_ty) {\n\n Some(reference)\n\n } else {\n\n None\n\n }\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/parsed_extern_fn/to_rust_impl_call_swift.rs", "rank": 69, "score": 131763.89574148023 }, { "content": "fn vec_functions(ty_name: &str) -> String {\n\n format!(\n\n r#\"\n\nvoid* __swift_bridge__$Vec_{ty_name}$new(void);\n\nvoid __swift_bridge__$Vec_{ty_name}$drop(void* vec_ptr);\n\nvoid __swift_bridge__$Vec_{ty_name}$push(void* vec_ptr, void* item_ptr);\n\nvoid* __swift_bridge__$Vec_{ty_name}$pop(void* vec_ptr);\n\nvoid* __swift_bridge__$Vec_{ty_name}$get(void* vec_ptr, uintptr_t index);\n\nvoid* __swift_bridge__$Vec_{ty_name}$get_mut(void* vec_ptr, uintptr_t index);\n\nuintptr_t __swift_bridge__$Vec_{ty_name}$len(void* vec_ptr);\n\nvoid* __swift_bridge__$Vec_{ty_name}$as_ptr(void* vec_ptr);\n\n\"#,\n\n ty_name = ty_name\n\n )\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/codegen/generate_c_header.rs", "rank": 70, "score": 131133.32452176325 }, { "content": "struct SomeStruct {\n\n var field: UInt8\n\n\"#,\n\n r#\"\n", "file_path": "crates/swift-bridge-ir/src/codegen/codegen_tests/shared_struct_codegen_tests.rs", "rank": 71, "score": 129567.12754220249 }, { "content": " class public func bar() {\n\n __swift_bridge__$Foo$bar()\n\n }\n\n}\n\n\"#;\n\n\n\n assert_trimmed_generated_contains_trimmed_expected(&generated, expected);\n\n }\n\n\n\n /// Verify that we generate a Swift function that allows us to access a static class method\n\n /// from Rust using a pointer.\n\n #[test]\n\n fn extern_swift_static_class_method() {\n\n let tokens = quote! {\n\n mod foo {\n\n extern \"Swift\" {\n\n type Foo;\n\n\n\n #[swift_bridge(associated_to = Foo)]\n\n fn bar(arg: u8);\n\n }\n\n }\n\n };\n\n let module: SwiftBridgeModule = parse_quote!(#tokens);\n\n let generated = module.generate_swift(&CodegenConfig::no_features_enabled());\n\n\n\n let expected = r#\"\n\n@_cdecl(\"__swift_bridge__$Foo$bar\")\n", "file_path": "crates/swift-bridge-ir/src/codegen/generate_swift.rs", "rank": 72, "score": 128546.7876301547 }, { "content": "struct AnotherStruct {\n\n var _0: UInt8\n\n\"#,\n\n ])\n\n }\n\n\n\n fn expected_c_header() -> ExpectedCHeader {\n\n ExpectedCHeader::ContainsManyAfterTrim(vec![\n\n r#\"\n\ntypedef struct __swift_bridge__$SomeStruct { uint8_t field; } __swift_bridge__$SomeStruct;\n\n \"#,\n\n r#\"\n\ntypedef struct __swift_bridge__$AnotherStruct { uint8_t _0; } __swift_bridge__$AnotherStruct;\n\n \"#,\n\n ])\n\n }\n\n\n\n #[test]\n\n fn struct_with_primitive_field() {\n\n CodegenTest {\n", "file_path": "crates/swift-bridge-ir/src/codegen/codegen_tests/shared_struct_codegen_tests.rs", "rank": 73, "score": 128480.19622284973 }, { "content": "func extern_swift_struct_rename_3(arg: StructRename3) -> StructRename3 {\n\n arg\n\n}\n\n\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunner/SharedStructAttributes.swift", "rank": 75, "score": 125218.56349337436 }, { "content": "func extern_swift_struct_rename_2(arg: StructRename2) -> StructRename2 {\n\n arg\n\n}\n\n\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunner/SharedStructAttributes.swift", "rank": 76, "score": 125218.56349337436 }, { "content": "func extern_swift_struct_rename_1(arg: StructRename1) -> StructRename1 {\n\n arg\n\n}\n\n\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunner/SharedStructAttributes.swift", "rank": 77, "score": 125218.56349337436 }, { "content": "//\n\n// SharedStructAttributeTests.swift\n\n// SwiftRustIntegrationTestRunnerTests\n\n//\n\n// Created by Frankie Nwafili on 1/6/22.\n\n//\n\n\n\nimport XCTest\n\n@testable import SwiftRustIntegrationTestRunner\n\n\n\n/// Tests for attributes on shared structs types.\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/SharedStructAttributeTests.swift", "rank": 78, "score": 124300.26456388994 }, { "content": "mod already_declared;\n\nmod swift_name;\n", "file_path": "crates/swift-integration-tests/src/struct_attributes.rs", "rank": 79, "score": 124118.56524528441 }, { "content": "//! If this file compiles then we know that our return value was converted.\n\n\n\n#[swift_bridge::bridge]\n\nmod ffi {\n\n extern \"Rust\" {\n\n #[swift_bridge(return_with = some_module::convert_str_to_u32)]\n\n fn get_str_value_return_with() -> u32;\n\n }\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/function_attributes/return_with.rs", "rank": 80, "score": 122302.80912149127 }, { "content": "/// The CLI application\n\npub fn cli() -> Command<'static> {\n\n Command::new(\"swift-bridge\")\n\n .about(\"facilitates Rust and Swift interop.\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .subcommand_required(true)\n\n .subcommand(create_package_command())\n\n}\n\n\n", "file_path": "crates/swift-bridge-cli/src/clap_app.rs", "rank": 81, "score": 121600.7766405277 }, { "content": "//\n\n// SharedStructAttributes.swift\n\n// SwiftRustIntegrationTestRunner\n\n//\n\n// Created by Frankie Nwafili on 1/20/22.\n\n//\n\n\n\nimport Foundation\n\n\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunner/SharedStructAttributes.swift", "rank": 82, "score": 119874.12190715801 }, { "content": "use ffi2::AlreadyDeclaredStruct;\n\n\n\n#[swift_bridge::bridge]\n\nmod ffi {\n\n struct SomeStruct;\n\n\n\n #[swift_bridge(already_declared, swift_repr = \"struct\")]\n\n struct AlreadyDeclaredStruct;\n\n\n\n extern \"Rust\" {\n\n type SomeType;\n\n\n\n // The `get_another_type` function returns \"AnotherType\".\n\n // Yet here we are trying to return \"SomeType\".\n\n // So, if this compiles it means that our `into_return_type` macro is working.\n\n #[swift_bridge(into_return_type)]\n\n fn get_another_type() -> SomeType;\n\n\n\n // Verify that our code compiles when we use `into_return_type` on a shared struct.\n\n #[swift_bridge(into_return_type)]\n", "file_path": "crates/swift-integration-tests/src/function_attributes/into_return_type.rs", "rank": 83, "score": 119668.28128985623 }, { "content": " fn get_struct() -> SomeStruct;\n\n\n\n // Verify that our code compiles when we use `into_return_type` on an already declared\n\n // shared struct.\n\n #[swift_bridge(into_return_type)]\n\n fn get_already_declared_struct() -> AlreadyDeclaredStruct;\n\n }\n\n}\n\n#[swift_bridge::bridge]\n\nmod ffi2 {\n\n struct AlreadyDeclaredStruct;\n\n}\n\n\n\npub struct SomeType;\n\n\n", "file_path": "crates/swift-integration-tests/src/function_attributes/into_return_type.rs", "rank": 84, "score": 119666.76891790114 }, { "content": "#[swift_bridge::bridge]\n\nmod ffi {\n\n extern \"Rust\" {\n\n // If this compiles then we're successfully using the `rust_name` during code generation.\n\n #[swift_bridge(rust_name = \"another_function\")]\n\n fn some_function();\n\n }\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/function_attributes/rust_name.rs", "rank": 85, "score": 119662.5125296388 }, { "content": "//\n\n// RustFnReturnOpaqueSwiftTypeTests.swift\n\n// SwiftRustIntegrationTestRunnerTests\n\n//\n\n// Created by Frankie Nwafili on 11/27/21.\n\n//\n\n\n\nimport XCTest\n\n@testable import SwiftRustIntegrationTestRunner\n\n\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/RustFnUsesOpaqueSwiftTypeTests.swift", "rank": 86, "score": 119632.66088027353 }, { "content": "//\n\n// SwiftFnUsesOpaqueRustTypeTests.swift\n\n// SwiftRustIntegrationTestRunnerTests\n\n//\n\n// Created by Frankie Nwafili on 11/28/21.\n\n//\n\n\n\nimport XCTest\n\n@testable import SwiftRustIntegrationTestRunner\n\n\n\n\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/SwiftFnUsesOpaqueRustTypeTests.swift", "rank": 87, "score": 119632.61494302342 }, { "content": "//! Verify that the `#[swift_bridge(already_declared)]` module prevents us from emitting the\n\n//! same type definitions twice.\n\n//!\n\n//! If the Xcode project is able to compile then we know that our attribute works,\n\n//! because otherwise we would get build time errors that the class was defined twice.\n\n\n\nuse self::ffi1::AlreadyDeclaredStructTest;\n\n\n\n#[swift_bridge::bridge]\n\nmod ffi1 {\n\n #[swift_bridge(swift_repr = \"struct\")]\n\n struct AlreadyDeclaredStructTest {\n\n field: u8,\n\n }\n\n}\n\n\n\n#[swift_bridge::bridge]\n\nmod ffi2 {\n\n #[swift_bridge(already_declared, swift_repr = \"struct\")]\n\n struct AlreadyDeclaredStructTest;\n\n\n\n extern \"Rust\" {\n\n fn reflect_already_declared_struct(\n\n arg: AlreadyDeclaredStructTest,\n\n ) -> AlreadyDeclaredStructTest;\n\n }\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/struct_attributes/already_declared.rs", "rank": 88, "score": 118775.026283521 }, { "content": "/// Generates an xcframework embedded in a Swift Package from the Rust project.\n\n///\n\n/// - Also see the [relevant book chapter](https://chinedufn.github.io/swift-bridge/building/swift-packages/index.html)\n\npub fn create_package(config: CreatePackageConfig) {\n\n // Create output directory //\n\n let output_dir: &Path = config.out_dir.as_ref();\n\n if !&output_dir.exists() {\n\n fs::create_dir_all(&output_dir).expect(\"Couldn't create output directory\");\n\n }\n\n\n\n // Generate RustXcframework //\n\n gen_xcframework(&output_dir, &config);\n\n\n\n // Generate Swift Package //\n\n gen_package(&output_dir, &config);\n\n}\n\n\n", "file_path": "crates/swift-bridge-build/src/package.rs", "rank": 89, "score": 118521.38451820685 }, { "content": "fn run_string_tests() {\n\n // let string = ffi::create_swift_string();\n\n // assert_eq!(string.len(), 5);\n\n // assert_eq!(&string, \"hello\");\n\n}\n\n\n", "file_path": "crates/swift-integration-tests/src/string.rs", "rank": 90, "score": 117630.67189786914 }, { "content": "fn run_option_tests() {\n\n // assert_eq!(ffi::create_swift_option_u8_some(), Some(55));\n\n // assert_eq!(ffi::create_swift_option_u8_none(), None);\n\n}\n\n\n\npub struct OptTestOpaqueRustType {\n\n field: u8,\n\n}\n\nimpl OptTestOpaqueRustType {\n\n fn new(field: u8) -> Self {\n\n Self { field }\n\n }\n\n\n\n fn field(&self) -> u8 {\n\n self.field\n\n }\n\n}\n\n\n\nuse self::reflect_primitives::*;\n\n#[rustfmt::skip]\n", "file_path": "crates/swift-integration-tests/src/option.rs", "rank": 91, "score": 117630.67189786914 }, { "content": "class PointerTests: XCTestCase {\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n\n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n\n\n func testSwiftCallRustCvoid() throws {\n\n let value = [1, 2, 3]\n\n \n\n let pointer = UnsafeRawPointer(value)\n\n let pointer_mut = UnsafeMutableRawPointer(mutating: value)\n\n \n\n let pointer_copy = rust_echo_const_c_void(pointer)\n\n let pointer_mut_copy = rust_echo_mut_c_void(pointer_mut)\n\n \n\n XCTAssertEqual(pointer, pointer_copy)\n\n XCTAssertEqual(pointer_mut, pointer_mut_copy)\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/PointerTests.swift", "rank": 92, "score": 117615.71897103197 }, { "content": "class AsyncTests: XCTestCase {\n\n\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n\n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n\n\n func testSwiftCallsRustAsyncFn() async throws {\n\n await rust_async_return_null()\n\n }\n\n \n\n func testSwiftCallsRustAsyncFnRetU8() async throws {\n\n let num = await rust_async_return_u8()\n\n XCTAssertEqual(num, 123)\n\n }\n\n \n\n func testSwiftCallsRustAsyncFnRetStruct() async throws {\n\n let _: AsyncRustFnReturnStruct = await rust_async_return_struct()\n\n }\n\n}\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/AsyncTests.swift", "rank": 93, "score": 117615.71897103197 }, { "content": "class OptionTests: XCTestCase {\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n \n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n \n\n func testSwiftCallRustOptionU8() throws {\n\n XCTAssertEqual(rust_reflect_option_u8(70), 70)\n\n XCTAssertEqual(rust_reflect_option_u8(nil), nil)\n\n }\n\n \n\n func testSwiftCallRustOptionI8() throws {\n\n XCTAssertEqual(rust_reflect_option_i8(70), 70)\n\n XCTAssertEqual(rust_reflect_option_i8(nil), nil)\n\n }\n\n \n\n func testSwiftCallRustOptionU16() throws {\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/OptionTests.swift", "rank": 94, "score": 117615.71897103197 }, { "content": "class BooleanTests: XCTestCase {\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n\n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n\n\n func testRustTests() throws {\n\n runBoolTest()\n\n }\n\n \n\n func testSwiftTests() throws {\n\n XCTAssertEqual(rustNegateBool(true), false)\n\n XCTAssertEqual(rustNegateBool(false), true)\n\n }\n\n}\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/BooleanTests.swift", "rank": 95, "score": 117615.71897103197 }, { "content": "class StringTests: XCTestCase {\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n\n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n\n\n func testRunRustCallsSwiftTests() throws {\n\n run_string_tests()\n\n }\n\n \n\n /// Verify that we can get a RustString's length\n\n func testRustStringLen() throws {\n\n let string = \" hello \"\n\n let rustString: RustString = create_string(string)\n\n \n\n XCTAssertEqual(rustString.len(), 7)\n\n }\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/StringTests.swift", "rank": 96, "score": 117615.71897103197 }, { "content": "class VecTests: XCTestCase {\n\n override func setUpWithError() throws {\n\n // Put setup code here. This method is called before the invocation of each test method in the class.\n\n }\n\n \n\n override func tearDownWithError() throws {\n\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n\n }\n\n \n\n func testRustVecU8Len() throws {\n\n let vec = RustVec<UInt8>()\n\n XCTAssertEqual(vec.len(), 0)\n\n vec.push(value: 123)\n\n XCTAssertEqual(vec.len(), 1)\n\n }\n\n func testRustVecU8Pop() throws {\n\n let vec = RustVec<UInt8>()\n\n vec.push(value: 123)\n\n let popped = vec.pop()\n\n XCTAssertEqual(popped, 123)\n", "file_path": "SwiftRustIntegrationTestRunner/SwiftRustIntegrationTestRunnerTests/VecTests.swift", "rank": 97, "score": 117615.71897103197 }, { "content": "struct CodegenTest {\n\n bridge_module: BridgeModule,\n\n // Gets turned into a Vec<String> and compared to a Vec<String> of the generated Rust tokens.\n\n expected_rust_tokens: ExpectedRustTokens,\n\n // Gets trimmed and compared to the generated Swift code.\n\n expected_swift_code: ExpectedSwiftCode,\n\n // Gets trimmed and compared to the generated C header.\n\n expected_c_header: ExpectedCHeader,\n\n}\n\n\n", "file_path": "crates/swift-bridge-ir/src/codegen/codegen_tests.rs", "rank": 98, "score": 117480.77391026836 } ]
Rust
mcu/bobbin-sam/samd21/src/ext/adc.rs
thomasantony/bobbin-sdk
37375ca40351352a029aceb8b0cf17650a3624f6
use periph::adc::*; use bobbin_common::bits::*; use bobbin_hal::analog::AnalogRead; use gclk; #[derive(Debug, PartialEq, Eq, Clone, Copy)] #[repr(u8)] pub enum Resolution { Bits12 = 0x0, Bits16 = 0x1, Bits10 = 0x2, Bits8 = 0x3, } impl AdcPeriph { pub fn init(&self) { while gclk::GCLK.status().syncbusy() != 0 {} gclk::GCLK.set_clkctrl(|r| r .set_id(0x1e) .set_gen(0x0) .set_clken(true) ); self.wait_busy(); self.set_ctrlb(|r| r.set_prescaler(0x7).set_ressel(0x2)); self.set_sampctrl(|r| r.set_samplen(0x3f)); self.wait_busy(); self.set_inputctrl(|r| r.set_muxneg(0x18)); self.set_avgctrl(|r| r.set_samplenum(0x0).set_adjres(0x0)); self.with_inputctrl(|r| r.set_gain(0xf)); self.set_refctrl(|r| r.set_refsel(0x2)); } } impl AdcPeriph { pub fn wait_busy(&self) -> &Self { while self.status().syncbusy() != 0 {} self } pub fn enabled(&self) -> bool { self.ctrla().enable() != 0 } pub fn set_enabled(&self, value: bool) -> &Self { self.with_ctrla(|r| r.set_enable(value)) } pub fn resolution(&self) -> Resolution { match self.ctrlb().ressel() { U2::B00 => Resolution::Bits12, U2::B01 => Resolution::Bits16, U2::B10 => Resolution::Bits10, U2::B11 => Resolution::Bits8, } } pub fn set_resolution(&self, value: Resolution) -> &Self { self.with_ctrlb(|r| r.set_ressel(value as u8)) } pub fn muxpos(&self) -> U5 { self.inputctrl().muxpos() } pub fn set_muxpos(&self, value: U5) -> &Self { self.with_inputctrl(|r| r.set_muxpos(value)) } pub fn muxneg(&self) -> U5 { self.inputctrl().muxneg() } pub fn set_muxneg(&self, value: U5) -> &Self { self.with_inputctrl(|r| r.set_muxneg(value)) } pub fn result_ready(&self) -> bool { self.intflag().resrdy() != 0 } pub fn clr_result_ready(&self) -> &Self { self.set_intflag(|r| r.set_resrdy(1)) } pub fn wait_result_ready(&self) -> &Self { while !self.result_ready() {} self } pub fn trigger(&self) -> &Self { self.set_swtrig(|r| r.set_start(1)) } pub fn result_16(&self) -> U16 { self.result().result_16() } pub fn result_12(&self) -> U12 { self.result().result_12() } pub fn result_10(&self) -> U10 { self.result().result_10() } pub fn result_8(&self) -> U8 { self.result().result_8() } } macro_rules! impl_analog_read { ($t:ty, $res:expr, $meth:ident) => ( impl AnalogRead<$t> for AdcCh { fn start(&self) -> &Self { self.periph .set_enabled(false) .set_resolution($res) .set_muxpos(self.index.into()) .set_enabled(true) .clr_result_ready() .trigger(); self } fn is_complete(&self) -> bool { self.periph.result_ready() } fn read(&self) -> $t { self.periph.$meth() } } ) } impl_analog_read!(U8, Resolution::Bits8, result_8); impl_analog_read!(U10, Resolution::Bits10, result_10); impl_analog_read!(U12, Resolution::Bits12, result_12);
use periph::adc::*; use bobbin_common::bits::*; use bobbin_hal::analog::AnalogRead; use gclk; #[derive(Debug, PartialEq, Eq, Clone, Copy)] #[repr(u8)] pub enum Resolution { Bits12 = 0x0, Bits16 = 0x1, Bits10 = 0x2, Bits8 = 0x3, } impl AdcPeriph { pub fn init(&self) { while gclk::GCLK.status().syncbusy() != 0 {} gclk::GCLK.set_clkctrl(|r| r .set_id(0x1e) .set_gen(0x0) .set_clken(true) ); self.wait_busy();
} impl AdcPeriph { pub fn wait_busy(&self) -> &Self { while self.status().syncbusy() != 0 {} self } pub fn enabled(&self) -> bool { self.ctrla().enable() != 0 } pub fn set_enabled(&self, value: bool) -> &Self { self.with_ctrla(|r| r.set_enable(value)) } pub fn resolution(&self) -> Resolution { match self.ctrlb().ressel() { U2::B00 => Resolution::Bits12, U2::B01 => Resolution::Bits16, U2::B10 => Resolution::Bits10, U2::B11 => Resolution::Bits8, } } pub fn set_resolution(&self, value: Resolution) -> &Self { self.with_ctrlb(|r| r.set_ressel(value as u8)) } pub fn muxpos(&self) -> U5 { self.inputctrl().muxpos() } pub fn set_muxpos(&self, value: U5) -> &Self { self.with_inputctrl(|r| r.set_muxpos(value)) } pub fn muxneg(&self) -> U5 { self.inputctrl().muxneg() } pub fn set_muxneg(&self, value: U5) -> &Self { self.with_inputctrl(|r| r.set_muxneg(value)) } pub fn result_ready(&self) -> bool { self.intflag().resrdy() != 0 } pub fn clr_result_ready(&self) -> &Self { self.set_intflag(|r| r.set_resrdy(1)) } pub fn wait_result_ready(&self) -> &Self { while !self.result_ready() {} self } pub fn trigger(&self) -> &Self { self.set_swtrig(|r| r.set_start(1)) } pub fn result_16(&self) -> U16 { self.result().result_16() } pub fn result_12(&self) -> U12 { self.result().result_12() } pub fn result_10(&self) -> U10 { self.result().result_10() } pub fn result_8(&self) -> U8 { self.result().result_8() } } macro_rules! impl_analog_read { ($t:ty, $res:expr, $meth:ident) => ( impl AnalogRead<$t> for AdcCh { fn start(&self) -> &Self { self.periph .set_enabled(false) .set_resolution($res) .set_muxpos(self.index.into()) .set_enabled(true) .clr_result_ready() .trigger(); self } fn is_complete(&self) -> bool { self.periph.result_ready() } fn read(&self) -> $t { self.periph.$meth() } } ) } impl_analog_read!(U8, Resolution::Bits8, result_8); impl_analog_read!(U10, Resolution::Bits10, result_10); impl_analog_read!(U12, Resolution::Bits12, result_12);
self.set_ctrlb(|r| r.set_prescaler(0x7).set_ressel(0x2)); self.set_sampctrl(|r| r.set_samplen(0x3f)); self.wait_busy(); self.set_inputctrl(|r| r.set_muxneg(0x18)); self.set_avgctrl(|r| r.set_samplenum(0x0).set_adjres(0x0)); self.with_inputctrl(|r| r.set_gain(0xf)); self.set_refctrl(|r| r.set_refsel(0x2)); }
function_block-function_prefix_line
[ { "content": "pub fn read_peripheral<R: std::io::Read>(r: &mut EventReader<R>,\n\n attrs: &[OwnedAttribute])\n\n -> Result<Peripheral, Error> {\n\n let mut p = Peripheral::default();\n\n\n\n for a in attrs.iter() {\n\n if a.name.local_name == \"derivedFrom\" {\n\n p.derived_from = Some(a.value.clone());\n\n }\n\n }\n\n\n\n loop {\n\n let e = try!(r.next());\n\n // println!(\"read_peripheral: {:?}\", e);\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"name\" => p.name = try!(read_text(r)),\n\n \"description\" => p.description = try!(read_description(r)),\n\n \"baseAddress\" => p.address = try!(read_u64(r)),\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 0, "score": 229632.6135507547 }, { "content": "pub fn read_registers<R: std::io::Read>(r: &mut EventReader<R>)\n\n -> Result<(Vec<Register>, Vec<Cluster>), Error> {\n\n let mut regs: Vec<Register> = Vec::new();\n\n let mut clusters: Vec<Cluster> = Vec::new();\n\n loop {\n\n let e = try!(r.next());\n\n // println!(\"read_registers: {:?}\", e);\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"register\" => regs.push(try!(read_register(r))),\n\n \"cluster\" => clusters.push(try!(read_cluster(r))),\n\n _ => return Err(Error::StateError(format!(\"Expected <register> or <cluster>\"))),\n\n }\n\n }\n\n XmlEvent::EndElement { name } => {\n\n match name.local_name.as_ref() {\n\n \"registers\" => return Ok((regs, clusters)),\n\n _ => return Err(Error::StateError(format!(\"Expected </registers>\"))),\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 1, "score": 229632.61355075467 }, { "content": "pub fn read_enumerated_value<R: std::io::Read>(r: &mut EventReader<R>)\n\n -> Result<EnumeratedValue, Error> { \n\n let mut v = EnumeratedValue::default();\n\n\n\n loop {\n\n let e = try!(r.next());\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"value\" => v.value = try!(read_text(r)),\n\n \"name\" => v.name = try!(read_opt_text(r)),\n\n \"description\" => v.description = try!(read_description(r)),\n\n _ => try!(read_unknown(r)),\n\n }\n\n }\n\n XmlEvent::EndElement { name } => {\n\n match name.local_name.as_ref() {\n\n \"enumeratedValue\" => return Ok(v),\n\n _ => return Err(Error::StateError(format!(\"Expected </enumeratedValue>\"))),\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 2, "score": 227111.01448192546 }, { "content": "pub fn read_enumerated_values<R: std::io::Read>(r: &mut EventReader<R>)\n\n -> Result<Vec<EnumeratedValue>, Error> {\n\n let mut values: Vec<EnumeratedValue> = Vec::new();\n\n loop {\n\n let e = try!(r.next());\n\n // println!(\"read_fields: {:?}\", e);\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"name\" => try!(read_unknown(r)),\n\n \"usage\" => try!(read_unknown(r)),\n\n \"enumeratedValue\" => values.push(try!(read_enumerated_value(r))),\n\n _ => return Err(Error::StateError(format!(\"Expected <enumeratedValue>\"))),\n\n }\n\n }\n\n XmlEvent::EndElement { name } => {\n\n match name.local_name.as_ref() {\n\n \"enumeratedValues\" => return Ok(values),\n\n _ => return Err(Error::StateError(format!(\"Expected </enumeratedValues>\"))),\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 3, "score": 227111.01448192546 }, { "content": "pub fn read<R: Read>(r: &mut R, p: &Path) -> Result<TopLevel, ReadError> { \n\n let mut buf: Vec<u8> = Vec::new();\n\n try!(r.read_to_end(&mut buf));\n\n let ctx = Context::new(p, &buf);\n\n read_buf(&ctx)\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/reader.rs", "rank": 4, "score": 221486.81563666402 }, { "content": "pub fn read_register<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Register, Error> {\n\n let mut reg = Register::default();\n\n loop {\n\n let e = try!(r.next());\n\n // println!(\"read_register: {:?}\", e);\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"name\" => reg.name = try!(read_text(r)),\n\n \"description\" => reg.description = try!(read_description(r)),\n\n \"addressOffset\" => reg.offset = try!(read_u64(r)),\n\n \"size\" => reg.size = try!(read_opt_u64(r)),\n\n \"access\" => reg.access = try!(read_opt_text(r)).map(Access::from),\n\n \"resetValue\" => reg.reset_value = try!(read_opt_u64(r)),\n\n \"resetMask\" => reg.reset_mask = try!(read_opt_u64(r)),\n\n \"dim\" => reg.dim = try!(read_opt_u64(r)),\n\n \"dimIncrement\" => reg.dim_increment = try!(read_opt_u64(r)),\n\n \"dimIndex\" => reg.dim_index = try!(read_opt_text(r)),\n\n \"fields\" => reg.fields = try!(read_fields(r)),\n\n _ => try!(read_unknown(r)),\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 5, "score": 214280.59379131725 }, { "content": "pub fn read_interrupt<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Interrupt, Error> {\n\n let mut i = Interrupt::default();\n\n loop {\n\n let e = try!(r.next());\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"name\" => i.name = try!(read_text(r)),\n\n \"description\" => i.description = try!(read_description(r)),\n\n \"value\" => i.value = try!(read_u64(r)),\n\n _ => try!(read_unknown(r)),\n\n }\n\n }\n\n XmlEvent::EndElement { name } => {\n\n match name.local_name.as_ref() {\n\n \"interrupt\" => return Ok(i),\n\n _ => return Err(Error::StateError(format!(\"expected </interrupt>\"))),\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 6, "score": 214280.59379131725 }, { "content": "pub fn read_u64<R: std::io::Read>(r: &mut EventReader<R>) -> Result<u64, Error> {\n\n if let Some(value) = try!(read_opt_u64(r)) {\n\n Ok(value)\n\n } else {\n\n Err(Error::ParseError(format!(\"Missing number value\")))\n\n }\n\n}\n\n\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 7, "score": 214280.59379131725 }, { "content": "pub fn read_device<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Device, Error> {\n\n let mut d = Device::default();\n\n loop {\n\n let e = try!(r.next());\n\n // println!(\"read_device: {:?}\", e);\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"vendor\" => d.vendor = try!(read_opt_text(r)),\n\n \"vendor_id\" => d.vendor_id = try!(read_opt_text(r)),\n\n \"name\" => d.name = try!(read_text(r)),\n\n \"size\" => d.size = try!(read_opt_u64(r)),\n\n \"access\" => d.access = try!(read_opt_text(r)).map(Access::from),\n\n \"description\" => d.description = try!(read_description(r)),\n\n \"peripherals\" => d.peripherals = try!(read_peripherals(r)),\n\n _ => try!(read_unknown(r)),\n\n }\n\n }\n\n XmlEvent::EndElement { name } => {\n\n match name.local_name.as_ref() {\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 8, "score": 214280.59379131725 }, { "content": "pub fn read_document<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Document, Error> {\n\n let mut device: Option<Device> = None;\n\n loop {\n\n let e = try!(r.next());\n\n // println!(\"read_document: {:?}\", e);\n\n match e {\n\n XmlEvent::StartDocument { .. } => {}\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"device\" => device = Some(try!(read_device(r))),\n\n _ => return Err(Error::StateError(format!(\"Expected device\"))),\n\n }\n\n }\n\n XmlEvent::EndDocument => {\n\n if device.is_none() {\n\n return Err(Error::StateError(format!(\"No device found in document\")));\n\n }\n\n return Ok(Document { device: device.unwrap() });\n\n }\n\n _ => {}\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 9, "score": 214280.59379131725 }, { "content": "pub fn read_text<R: std::io::Read>(r: &mut EventReader<R>) -> Result<String, Error> {\n\n if let Some(text) = try!(read_opt_text(r)) {\n\n Ok(text)\n\n } else {\n\n return Err(Error::StateError(format!(\"Expected non-empty text\")))\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 10, "score": 214280.59379131725 }, { "content": "pub fn read_cluster<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Cluster, Error> {\n\n let mut c = Cluster::default();\n\n\n\n loop {\n\n let e = try!(r.next());\n\n // println!(\"read_register: {:?}\", e);\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"name\" => c.name = try!(read_text(r)),\n\n \"description\" => c.description = try!(read_description(r)),\n\n \"size\" => c.size = try!(read_opt_u64(r)),\n\n \"access\" => c.access = try!(read_opt_text(r)).map(Access::from), \n\n \"resetValue\" => c.reset_value = try!(read_opt_u64(r)),\n\n \"resetMask\" => c.reset_mask = try!(read_opt_u64(r)), \n\n \"addressOffset\" => c.offset = try!(read_u64(r)),\n\n \"dim\" => c.dim = try!(read_opt_u64(r)),\n\n \"dimIncrement\" => c.dim_increment = try!(read_opt_u64(r)),\n\n \"dimIndex\" => c.dim_index = try!(read_opt_text(r)), \n\n \"register\" => c.registers.push(try!(read_register(r))),\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 11, "score": 214280.59379131725 }, { "content": "pub fn read_field<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Field, Error> {\n\n let mut f = Field::default();\n\n let mut p_offset: Option<u64> = None;\n\n let mut p_width: Option<u64> = None;\n\n let mut p_range: Option<String> = None;\n\n let mut p_lsb: Option<u64> = None;\n\n let mut p_msb: Option<u64> = None;\n\n\n\n loop {\n\n let e = try!(r.next());\n\n // println!(\"read_field: {:?}\", e);\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"name\" => f.name = try!(read_text(r)),\n\n \"description\" => f.description = try!(read_description(r)),\n\n \"access\" => f.access = try!(read_opt_text(r)).map(Access::from),\n\n \"bitOffset\" => p_offset = try!(read_opt_u64(r)),\n\n \"bitWidth\" => p_width = try!(read_opt_u64(r)),\n\n \"bitRange\" => p_range = try!(read_opt_text(r)),\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 12, "score": 214280.59379131725 }, { "content": "pub fn read_description<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Option<String>, Error> {\n\n read_opt_text(r).map(|t| t.map(|s| normalize(s.as_ref())))\n\n}\n\n\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 13, "score": 209820.33964185126 }, { "content": "pub fn read_fields<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Vec<Field>, Error> {\n\n let mut fields: Vec<Field> = Vec::new();\n\n loop {\n\n let e = try!(r.next());\n\n // println!(\"read_fields: {:?}\", e);\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"field\" => fields.push(try!(read_field(r))),\n\n _ => return Err(Error::StateError(format!(\"Expected <field>\"))),\n\n }\n\n }\n\n XmlEvent::EndElement { name } => {\n\n match name.local_name.as_ref() {\n\n \"fields\" => return Ok(fields),\n\n _ => return Err(Error::StateError(format!(\"Expected </fields>\"))),\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 14, "score": 209820.33964185126 }, { "content": "pub fn read_peripherals<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Vec<Peripheral>, Error> {\n\n let mut periphs: Vec<Peripheral> = Vec::new();\n\n loop {\n\n let e = try!(r.next());\n\n // println!(\"read_peripherals: {:?}\", e);\n\n match e {\n\n XmlEvent::StartElement { name, attributes, .. } => {\n\n match name.local_name.as_ref() {\n\n \"peripheral\" => periphs.push(try!(read_peripheral(r, &attributes))),\n\n _ => return Err(Error::StateError(format!(\"Expected <peripheral>\"))),\n\n }\n\n }\n\n XmlEvent::EndElement { name } => {\n\n match name.local_name.as_ref() {\n\n \"peripherals\" => return Ok(periphs),\n\n _ => return Err(Error::StateError(format!(\"Expected </peripherals>\"))),\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 15, "score": 209820.33964185123 }, { "content": "pub fn read_address_block<R: std::io::Read>(r: &mut EventReader<R>) -> Result<AddressBlock, Error> {\n\n let mut b = AddressBlock::default();\n\n loop {\n\n let e = try!(r.next());\n\n match e {\n\n XmlEvent::StartElement { name, .. } => {\n\n match name.local_name.as_ref() {\n\n \"offset\" => b.offset = try!(read_u64(r)),\n\n \"size\" => b.size = try!(read_u64(r)),\n\n \"usage\" => b.usage = try!(read_text(r)),\n\n _ => try!(read_unknown(r)),\n\n }\n\n }\n\n XmlEvent::EndElement { name } => {\n\n match name.local_name.as_ref() {\n\n \"addressBlock\" => return Ok(b),\n\n _ => return Err(Error::StateError(format!(\"expected </addressBlock>\"))),\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n \n\n}\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 16, "score": 209724.67807172256 }, { "content": "pub fn read_opt_text<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Option<String>, Error> {\n\n let mut result: Option<String> = None;\n\n loop {\n\n let e = try!(r.next());\n\n match e {\n\n XmlEvent::Characters(s) => result = Some(s),\n\n XmlEvent::EndElement { .. } => return Ok(result),\n\n _ => return Err(Error::StateError(format!(\"Unexpected text end\"))),\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 17, "score": 207574.5957982536 }, { "content": "pub fn copy_file(src_path: &Path, dst_path: &Path) -> Result<()> {\n\n if src_path.exists() {\n\n copy_file_with(src_path, dst_path, |s| s)\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "dsl/bobbin-chip/src/codegen/board.rs", "rank": 18, "score": 191671.06152745063 }, { "content": "pub fn copy_file(src_path: &Path, dst_path: &Path) -> Result<()> {\n\n copy_file_with(src_path, dst_path, |s| s)\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/periph.rs", "rank": 19, "score": 191671.06152745063 }, { "content": "pub fn copy_file_with<F: FnOnce(String) -> String>(src_path: &Path, dst_path: &Path, f: F) -> Result<()> {\n\n let mut src = File::open(src_path)?;\n\n let mut dst = File::create(dst_path)?;\n\n let mut data = String::new();\n\n src.read_to_string(&mut data)?; \n\n let data = f(data);\n\n dst.write(&data.as_bytes())?; \n\n Ok(())\n\n}", "file_path": "dsl/bobbin-chip/src/codegen/board.rs", "rank": 20, "score": 185114.21120268616 }, { "content": "pub fn copy_file_with<F: FnOnce(String) -> String>(src_path: &Path, dst_path: &Path, f: F) -> Result<()> {\n\n let mut src = File::open(src_path)?;\n\n let mut dst = File::create(dst_path)?;\n\n let mut data = String::new();\n\n src.read_to_string(&mut data)?; \n\n let data = f(data);\n\n dst.write(&data.as_bytes())?; \n\n Ok(())\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/periph.rs", "rank": 21, "score": 185114.21120268616 }, { "content": "pub fn init() {\n\n PA17.port().gate_enable();\n\n PA17.set_mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 1 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<PortPin> {\n\n LedHigh::new(PA17_PIN)\n\n }\n\n}", "file_path": "board/arduino-zero/src/led.rs", "rank": 22, "score": 181545.08867713067 }, { "content": "pub fn init() { \n\n PTB22.port().gate_enable();\n\n PTB22.connect_to(PB22); \n\n PB22.set_dir_output().set_output(true);\n\n\n\n PTB21.port().gate_enable();\n\n PTB21.connect_to(PB21);\n\n PB21.set_dir_output().set_output(true);\n\n\n\n PTE26.port().gate_enable();\n\n PTE26.connect_to(PE26);\n\n PE26.set_dir_output().set_output(true);\n\n\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n 1 => &LED1,\n", "file_path": "board/frdm-k64f/src/led.rs", "rank": 23, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n PB0.port().gate_enable();\n\n PB0.mode_output();\n\n\n\n PB7.port().gate_enable();\n\n PB7.mode_output();\n\n\n\n PB14.port().gate_enable();\n\n PB14.mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n 1 => &LED1,\n\n 2 => &LED2,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 3 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<GpioPin> { LedHigh::new(PB0_PIN) }\n\n pub fn led1(&self) -> LedHigh<GpioPin> { LedHigh::new(PB7_PIN) }\n\n pub fn led2(&self) -> LedHigh<GpioPin> { LedHigh::new(PB14_PIN) }\n\n}", "file_path": "board/nucleo-f429zi/src/led.rs", "rank": 24, "score": 181545.08867713067 }, { "content": "pub fn init() { \n\n}", "file_path": "board/feather-m0/src/btn.rs", "rank": 25, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n PB3.port().gate_enable();\n\n PB3.mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 1 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<GpioPin> { LedHigh::new(PB3_PIN) }\n\n}", "file_path": "board/nucleo-l432kc/src/led.rs", "rank": 26, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n BTN0.port().gate_enable();\n\n BTN0.mode_input().pull_up();\n\n}\n\npub use mcu::pin::*;\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnLow<GpioPin> { BtnLow::new(PA12_PIN) }\n\n}", "file_path": "board/nucleo-l432kc/src/btn.rs", "rank": 27, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n BTN0.port().gate_enable();\n\n BTN0.mode_input().pull_down();\n\n}\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnHigh<GpioPin> { BtnHigh::new(PA0_PIN) }\n\n}", "file_path": "board/discovery-stm32f3/src/btn.rs", "rank": 28, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n BTN0.port().gate_enable();\n\n BTN0.mode_input().pull_down();\n\n}\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnHigh<GpioPin> { BtnHigh::new(PC13_PIN) }\n\n}", "file_path": "board/nucleo-f746zg/src/btn.rs", "rank": 29, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n PA11.port().gate_enable();\n\n PA11.set_mode_input().set_pull_enabled(true).set_output(true);\n\n}\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnLow<PortPin> {\n\n BtnLow::new(PA11_PIN)\n\n }\n\n}", "file_path": "board/arduino-zero/src/btn.rs", "rank": 30, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n BTN0.port().gate_enable();\n\n BTN0.mode_input().pull_down();\n\n}\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnHigh<GpioPin> { BtnHigh::new(PA0_PIN) }\n\n}", "file_path": "board/discovery-stm32f429i/src/btn.rs", "rank": 31, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n PE9.port().gate_enable();\n\n PE9.mode_output();\n\n\n\n PE8.port().gate_enable();\n\n PE8.mode_output();\n\n\n\n PE10.port().gate_enable();\n\n PE10.mode_output();\n\n\n\n PE15.port().gate_enable();\n\n PE15.mode_output();\n\n\n\n PE11.port().gate_enable();\n\n PE11.mode_output();\n\n\n\n PE14.port().gate_enable();\n\n PE14.mode_output();\n\n\n\n PE12.port().gate_enable();\n", "file_path": "board/discovery-stm32f3/src/led.rs", "rank": 32, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n BTN0_PT.port().gate_enable();\n\n BTN0_PT.connect_to(BTN0);\n\n\n\n BTN1_PT.port().gate_enable();\n\n BTN1_PT.connect_to(BTN1);\n\n\n\n\n\n BTN0.set_dir_input();\n\n BTN1.set_dir_input();\n\n}\n\n\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnLow<GpioCh> { BtnLow::new(PC6_CH) }\n\n pub fn btn1(&self) -> BtnLow<GpioCh> { BtnLow::new(PA4_CH) }\n\n}", "file_path": "board/frdm-k64f/src/btn.rs", "rank": 33, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n PG13.port().gate_enable();\n\n PG13.mode_output();\n\n\n\n PG14.port().gate_enable();\n\n PG14.mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n 1 => &LED1,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 2 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<GpioPin> { LedHigh::new(PG13_PIN) }\n\n pub fn led1(&self) -> LedHigh<GpioPin> { LedHigh::new(PG14_PIN) }\n\n}", "file_path": "board/discovery-stm32f429i/src/led.rs", "rank": 34, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n PB0.port().gate_enable();\n\n PB0.mode_output();\n\n\n\n PB7.port().gate_enable();\n\n PB7.mode_output();\n\n\n\n PB14.port().gate_enable();\n\n PB14.mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n 1 => &LED1,\n\n 2 => &LED2,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 3 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<GpioPin> { LedHigh::new(PB0_PIN) }\n\n pub fn led1(&self) -> LedHigh<GpioPin> { LedHigh::new(PB7_PIN) }\n\n pub fn led2(&self) -> LedHigh<GpioPin> { LedHigh::new(PB14_PIN) }\n\n}", "file_path": "board/nucleo-f746zg/src/led.rs", "rank": 35, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n BTN0.port().gate_enable();\n\n BTN0.mode_input().pull_down();\n\n}\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnHigh<GpioPin> { BtnHigh::new(PC13_PIN) }\n\n}", "file_path": "board/nucleo-f429zi/src/btn.rs", "rank": 36, "score": 181545.08867713067 }, { "content": "pub fn init() {\n\n PA17.port().gate_enable();\n\n PA17.set_mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 1 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<PortPin> {\n\n LedHigh::new(PA17_PIN)\n\n }\n\n}", "file_path": "board/feather-m0/src/led.rs", "rank": 37, "score": 181545.08867713067 }, { "content": "/// Searches for a linker script in `link/` based on enabled Cargo features and copies it to the top\n\n/// crate's build directory. Matching files with a `.ld` extension are copied to `link.ld`, and files\n\n/// with a `.x` extension are copied to `memory.x`.\n\npub fn setup_linker() {\n\n let cfg_target_os = env::var(\"CARGO_CFG_TARGET_OS\").unwrap();\n\n if cfg_target_os != \"none\" {\n\n return\n\n }\n\n\n\n if let Some((ld_script, dst)) = find_ld_script(\"link\").unwrap() {\n\n copy_link_script(ld_script, dst);\n\n }\n\n}\n\n\n", "file_path": "lib/bobbin-build/src/lib.rs", "rank": 38, "score": 179106.24275436025 }, { "content": "pub fn init() { \n\n}", "file_path": "board-src/template-thumbv6/src/btn.rs", "rank": 39, "score": 179097.95018986377 }, { "content": "pub fn init() {\n\n}\n", "file_path": "board-src/template-thumbv7/src/led.rs", "rank": 40, "score": 179097.95018986377 }, { "content": "pub fn init() { \n\n}", "file_path": "board-src/template-thumbv7/src/btn.rs", "rank": 41, "score": 179097.95018986377 }, { "content": "pub fn reset_vm() {\n\n MEM.with(|m| m.borrow_mut().reset());\n\n} \n\n\n", "file_path": "lib/bobbin-mcu/src/rw.rs", "rank": 42, "score": 179097.95018986377 }, { "content": "pub fn init() {\n\n}\n", "file_path": "board-src/template-thumbv6/src/led.rs", "rank": 43, "score": 179097.95018986377 }, { "content": "#[inline]\n\npub fn sleep() {\n\n #[cfg(target_os=\"none\")]\n\n unsafe { asm!(\"\n\n cpsid i\n\n wfi\n\n cpsie i\n\n \")}\n\n}\n", "file_path": "mcu/bobbin-cortexm/src/ext/mod.rs", "rank": 44, "score": 179097.95018986377 }, { "content": "pub fn gen_peripheral_impl<W: Write>(_cfg: &Config, out: &mut W, p: &Peripheral) -> Result<()> {\n\n let p_type = format!(\"{}Periph\", to_camel(&p.group_name.as_ref().unwrap()));\n\n let ch_type = format!(\"{}Ch\", to_camel(&p.group_name.as_ref().unwrap()));\n\n\n\n\n\n // TODO: Description must be added at top of file\n\n // if let Some(ref desc) = p.description {\n\n // let desc = desc.trim();\n\n // if desc.len() > 0 {\n\n // try!(writeln!(out, \"// //! {}\", desc));\n\n // try!(writeln!(out, \"\"));\n\n // }\n\n // }\n\n\n\n // try!(writeln!(out, \"#[allow(unused_imports)] use {}::*;\", cfg.common));\n\n // try!(writeln!(out, \"\"));\n\n\n\n if let Some(ref desc) = p.description {\n\n try!(gen_doc(out, 0, desc));\n\n }\n", "file_path": "dsl/bobbin-chip/src/codegen/modules.rs", "rank": 45, "score": 176927.2810026712 }, { "content": "pub fn run_48mhz() {\n\n // See https://github.com/arduino/ArduinoCore-samd/blob/master/bootloaders/zero/board_init.c\n\n // * At reset:\n\n // * - OSC8M clock source is enabled with a divider by 8 (1MHz).\n\n // * - Generic Clock Generator 0 (GCLKMAIN) is using OSC8M as source.\n\n // * We need to:\n\n // * 1) Enable XOSC32K clock (External on-board 32.768Hz oscillator), will be used as DFLL48M reference.\n\n // * 2) Put XOSC32K as source of Generic Clock Generator 1\n\n // * 3) Put Generic Clock Generator 1 as source for Generic Clock Multiplexer 0 (DFLL48M reference)\n\n // * 4) Enable DFLL48M clock\n\n // * 5) Switch Generic Clock Generator 0 to DFLL48M. CPU will run at 48MHz.\n\n // * 6) Modify PRESCaler value of OSCM to have 8MHz\n\n // * 7) Put OSC8M as source for Generic Clock Generator 3\n\n\n\n /* Set 1 Flash Wait State for 48MHz, cf tables 20.9 and 35.27 in SAMD21 Datasheet */\n\n // NVMCTRL->CTRLB.bit.RWS = NVMCTRL_CTRLB_RWS_HALF_Val;\n\n nvmctrl::NVMCTRL.with_ctrlb(|r| r.set_rws(0x1));\n\n\n\n /* Turn on the digital interface clock */\n\n pm::PM.with_apbamask(|r| r.set_gclk(1));\n", "file_path": "mcu/bobbin-sam/samd21/src/ext/clock.rs", "rank": 46, "score": 174478.38762294105 }, { "content": "pub fn enable_instruction_cache() {\n\n // Enable Instruction Cache\n\n SCB.set_iciallu(|r| r);\n\n #[cfg(target_os=\"none\")]\n\n unsafe { \n\n asm!(\"dsb\");\n\n asm!(\"isb\");\n\n }\n\n SCB.with_ccr(|r| r.set_ic(1)); \n\n}\n\n\n", "file_path": "mcu/bobbin-cortexm/src/ext/mod.rs", "rank": 47, "score": 174478.38762294105 }, { "content": "pub fn enable_pll_external_mode() {\n\n let rcc = rcc::RCC;\n\n let flash = flash::FLASH;\n\n //let mut pwr = pwr::PWR;\n\n\n\n // Configure flash settings.\n\n // Prefetch Buffer Enabled + Two Wait States\n\n flash.with_acr(|r| r.set_prftbe(1).set_latency(0b010));\n\n\n\n // Configure Prescalers\n\n\n\n // AHB (HCLK) = SYSCLK\n\n // APB1 = HCLK / 2\n\n // APB2 = HCLK\n\n rcc.with_cfgr(|r| r.set_hpre(0b000).set_ppre1(0b100).set_ppre2(0b000));\n\n\n\n // Enable internal high-speed oscillator.\n\n rcc.with_cr(|r| r.set_hsion(1));\n\n\n\n // Wait for HSI Ready\n", "file_path": "mcu/bobbin-stm32/stm32f303x/src/ext/clock.rs", "rank": 48, "score": 170192.0810313639 }, { "content": "pub fn enable_pll_hsi_mode() {\n\n let rcc = RCC;\n\n let flash = FLASH;\n\n let pwr = PWR;\n\n\n\n // Enable internal high-speed oscillator.\n\n rcc.with_cr(|r| r.set_hsion(1));\n\n\n\n // Wait for HSI Ready\n\n while rcc.cr().hsirdy() == 0 {}\n\n\n\n // // Select HSI as SYSCLK source. \n\n // rcc.with_cfgr(|r| r.set_sw(0b00));\n\n // while RCC.cfgr().sws() != 0b00 {}\n\n\n\n pwr.with_csr1(|r| r.set_vosrdy(1));\n\n\n\n // Set prescalers for AHB, ADC, ABP1, ABP2\n\n\n\n // HPRE = HPRE_DIV_NONE\n", "file_path": "mcu/bobbin-stm32/stm32f74x/src/ext/clock.rs", "rank": 49, "score": 170192.0810313639 }, { "content": "pub fn gen_peripheral_group_impl<W: Write>(_cfg: &Config, out: &mut W, pg: &PeripheralGroup) -> Result<()> {\n\n let pg_name = if let Some(ref prototype) = pg.prototype {\n\n if let Some(ref name) = prototype.group_name {\n\n format!(\"{}\", name)\n\n } else {\n\n format!(\"{}\", pg.name)\n\n }\n\n } else {\n\n format!(\"{}\", pg.name)\n\n };\n\n let pg_type = format!(\"{}Periph\", to_camel(&pg_name));\n\n let ch_type = format!(\"{}Ch\", to_camel(&pg_name));\n\n\n\n\n\n // try!(writeln!(out, \"#[allow(unused_imports)] use {}::*;\", cfg.common));\n\n // try!(writeln!(out, \"\"));\n\n\n\n // Generate Periphal Group Impl\n\n\n\n if pg.modules.len() == 0 {\n", "file_path": "dsl/bobbin-chip/src/codegen/modules.rs", "rank": 50, "score": 169706.92116385602 }, { "content": "pub fn counter_pair<'a, T: Sync + Copy + 'a>(counter: &'a mut T) -> (CounterGet<'a, T>, CounterSet<'a, T>) {\n\n (\n\n CounterGet { counter: counter as *mut T, _phantom: PhantomData },\n\n CounterSet { counter: counter as *mut T, _phantom: PhantomData },\n\n )\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn test_counter() {\n\n let mut c = 0u32;\n\n let (c_get, c_set) = counter_pair(&mut c);\n\n assert_eq!(c_get.get(), 0);\n\n assert_eq!(c_set.get(), 0);\n\n\n\n c_set.set(1);\n\n assert_eq!(c_get.get(), 1);\n\n assert_eq!(c_set.get(), 1);\n\n }\n\n}", "file_path": "lib/bobbin-ipc/src/counter.rs", "rank": 51, "score": 166950.13802957552 }, { "content": "#[inline]\n\npub fn get_active_irq() -> u8 {\n\n SCB.icsr().vectactive().value()\n\n}\n\n\n", "file_path": "mcu/bobbin-cortexm/src/ext/mod.rs", "rank": 52, "score": 164825.25059458625 }, { "content": "#[doc(hidden)]\n\npub fn write_str(s: &str) {\n\n with_console(|c| {\n\n c.write_str(s).ok();\n\n });\n\n}\n\n\n", "file_path": "lib/bobbin-sys/src/console.rs", "rank": 53, "score": 163571.37367241722 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/nucleo-f429zi/src/sys.rs", "rank": 54, "score": 163571.37367241722 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/nucleo-l432kc/src/sys.rs", "rank": 55, "score": 163571.37367241722 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/frdm-k64f/src/sys.rs", "rank": 56, "score": 163571.37367241722 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/arduino-zero/src/sys.rs", "rank": 57, "score": 163571.37367241722 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/discovery-stm32f3/src/sys.rs", "rank": 58, "score": 163571.37367241722 }, { "content": "#[doc(hidden)]\n\npub fn write(buf: &[u8]) {\n\n with_console(|c| {\n\n c.write(buf);\n\n });\n\n}\n\n\n", "file_path": "lib/bobbin-sys/src/console.rs", "rank": 59, "score": 163571.37367241722 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/discovery-stm32f429i/src/sys.rs", "rank": 60, "score": 163571.37367241722 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/feather-m0/src/sys.rs", "rank": 61, "score": 163571.37367241722 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/nucleo-f746zg/src/sys.rs", "rank": 62, "score": 163571.37367241722 }, { "content": "#[doc(hidden)]\n\npub fn write_u16_hex(v: u16) {\n\n with_console(|c| {\n\n c.write_u16_hex(v);\n\n });\n\n}\n\n\n", "file_path": "lib/bobbin-sys/src/console.rs", "rank": 63, "score": 161305.63216308545 }, { "content": "pub fn dump_can(can: Can0) {\n\n println!(\"MCR: {:?}\\nCTRL1: {:?}\\nCTRL2: {:?}\", can.mcr(), can.ctrl1(), can.ctrl2());\n\n println!(\"IFLAG1: {:?}\\nESR1: {:?}\\nESR2: {:?}\\nECR: {:?}\\nTIMER: {:?}\", \n\n can.iflag1(), can.esr1(), can.esr2(), can.ecr(), can.timer()\n\n );\n\n}\n\n\n", "file_path": "board/frdm-k64f/examples/flexcan.rs", "rank": 64, "score": 161305.63216308545 }, { "content": "#[doc(hidden)]\n\npub fn write_u8_hex(v: u8) {\n\n with_console(|c| {\n\n c.write_u8_hex(v);\n\n });\n\n}\n\n\n", "file_path": "lib/bobbin-sys/src/console.rs", "rank": 65, "score": 161305.63216308545 }, { "content": "pub fn run() -> Result<!, Error> {\n\n let spi = SPI1;\n\n let spi_miso = PA6;\n\n let spi_mosi = PA7;\n\n let spi_sck = PA5;\n\n let spi_nss = PE3; \n\n\n\n spi.gate_enable();\n\n spi_miso.port().gate_enable();\n\n spi_mosi.port().gate_enable();\n\n spi_sck.port().gate_enable();\n\n spi_nss.port().gate_enable();\n\n\n\n spi_miso.connect_to(spi);\n\n spi_mosi.connect_to(spi);\n\n spi_sck.connect_to(spi);\n\n \n\n spi_sck.speed_high().push_pull();\n\n spi_miso.speed_high().pull_up();\n\n spi_mosi.speed_high().push_pull();\n", "file_path": "board/discovery-stm32f3/examples/l3gd20-spi.rs", "rank": 66, "score": 161305.63216308545 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board-src/template-thumbv6/src/sys.rs", "rank": 67, "score": 161305.63216308545 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board-src/template-thumbv7/src/sys.rs", "rank": 68, "score": 161305.63216308545 }, { "content": "#[doc(hidden)]\n\npub fn write_u32_hex(v: u32) {\n\n with_console(|c| {\n\n c.write_u32_hex(v);\n\n });\n\n}", "file_path": "lib/bobbin-sys/src/console.rs", "rank": 69, "score": 161305.63216308545 }, { "content": "fn read_unknown<R: std::io::Read>(r: &mut EventReader<R>) -> Result<(), Error> {\n\n let mut depth = 1;\n\n loop {\n\n match try!(r.next()) {\n\n XmlEvent::StartElement { .. } => depth += 1,\n\n XmlEvent::EndElement { .. } => depth -= 1,\n\n _ => {}\n\n }\n\n if depth == 0 {\n\n return Ok(());\n\n }\n\n }\n\n}\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 70, "score": 160046.80849470463 }, { "content": "#[doc(hidden)]\n\npub fn with_console<F: FnOnce(&mut Console)>(f: F) {\n\n unsafe {\n\n if let Some(ref mut console) = CONSOLE {\n\n f(console)\n\n }\n\n }\n\n}\n\n\n", "file_path": "lib/bobbin-sys/src/console.rs", "rank": 71, "score": 158828.1273257368 }, { "content": "/// Returns a string of spaces for the given indentation level.\n\npub fn indent(size: usize) -> String {\n\n const INDENT: &'static str = \" \";\n\n (0..size)\n\n .map(|_| INDENT)\n\n .fold(String::with_capacity(size * INDENT.len()), |r, s| r + s)\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/writer.rs", "rank": 72, "score": 156810.4788218146 }, { "content": "pub fn field_with(name: &str) -> String {\n\n format!(\"with_{}\", field_name(name))\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 73, "score": 154627.96378083088 }, { "content": "pub fn indent(size: usize) -> String {\n\n const INDENT: &'static str = \" \";\n\n (0..size)\n\n .map(|_| INDENT)\n\n .fold(String::with_capacity(size * INDENT.len()), |r, s| r + s)\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 74, "score": 154627.96378083088 }, { "content": "#[doc(hidden)]\n\npub fn write_fmt(args: fmt::Arguments) { \n\n with_console(|c| {\n\n c.write_fmt(args).ok();\n\n });\n\n}\n\n\n", "file_path": "lib/bobbin-sys/src/console.rs", "rank": 75, "score": 154627.96378083088 }, { "content": "pub fn mkdir(path: &Path) -> Result<()> {\n\n if !path.exists() {\n\n fs::create_dir(&path)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/periph.rs", "rank": 76, "score": 154627.96378083088 }, { "content": "pub fn to_camel(word: &str) -> String {\n\n let mut result = String::new();\n\n \n\n let mut last_dash = false;\n\n for (i, c) in word.chars().enumerate() {\n\n if i == 0 {\n\n result.push(c.to_uppercase().next().unwrap());\n\n continue; \n\n }\n\n if c == '_' {\n\n last_dash = true;\n\n continue;\n\n }\n\n if last_dash {\n\n result.push(c.to_uppercase().next().unwrap());\n\n last_dash = false;\n\n continue\n\n }\n\n result.push(c.to_lowercase().next().unwrap());\n\n }\n\n result\n\n}\n\n\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 77, "score": 154627.96378083088 }, { "content": "pub fn mkdir(path: &Path) -> Result<()> {\n\n if !path.exists() {\n\n fs::create_dir(&path)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "dsl/bobbin-chip/src/codegen/board.rs", "rank": 78, "score": 154627.96378083088 }, { "content": "pub fn field_reg(name: &str) -> String {\n\n let name = field_name(name);\n\n if name.chars().next().unwrap().is_digit(10) {\n\n format!(\"_{}_reg\", name)\n\n } else {\n\n format!(\"{}_reg\", name)\n\n }\n\n \n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 79, "score": 152524.17223023745 }, { "content": "pub fn field_mut(name: &str) -> String {\n\n let name = field_name(name);\n\n if name.chars().next().unwrap().is_digit(10) {\n\n format!(\"_{}_mut\", name)\n\n } else {\n\n format!(\"{}_mut\", name)\n\n }\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 80, "score": 152524.17223023745 }, { "content": "pub fn field_writer(name: &str) -> String {\n\n format!(\"write_{}\", field_name(name))\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 81, "score": 152524.17223023745 }, { "content": "pub fn field_ptr(name: &str) -> String {\n\n let name = field_name(name);\n\n if name.chars().next().unwrap().is_digit(10) {\n\n format!(\"_{}_ptr\", name)\n\n } else {\n\n format!(\"{}_ptr\", name)\n\n }\n\n \n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 82, "score": 152524.17223023745 }, { "content": "pub fn field_name(name: &str) -> String {\n\n let mut result = String::new();\n\n for (i,c) in name.chars().enumerate() {\n\n if i == 0 && c == '_' {\n\n continue;\n\n }\n\n result.push(c.to_lowercase().next().unwrap());\n\n }\n\n result \n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 83, "score": 152524.17223023745 }, { "content": "pub fn field_getter(name: &str) -> String { \n\n match name {\n\n \"AS\" | \"DO\" | \"MOD\" | \"MATCH\" | \"TYPE\" | \"RESET\" | \"LOOP\" | \"IF\" | \"IN\" | \"BREAK\" | \"REF\" | \"FN\" => {\n\n format!(\"_{}\", field_name(name))\n\n },\n\n _ => {\n\n if name.chars().next().unwrap().is_digit(10) { \n\n format!(\"_{}\", field_name(name))\n\n } else {\n\n String::from(name.to_lowercase())\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 84, "score": 152524.17223023745 }, { "content": "pub fn field_test(name: &str) -> String {\n\n format!(\"test_{}\", field_name(name))\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 85, "score": 152524.17223023745 }, { "content": "pub fn field_reader(name: &str) -> String {\n\n format!(\"read_{}\", field_name(name))\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 86, "score": 152524.17223023745 }, { "content": "pub fn field_setter(name: &str) -> String {\n\n format!(\"set_{}\", field_name(name))\n\n}\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 87, "score": 152524.17223023745 }, { "content": "fn read_opt_u64<R: std::io::Read>(r: &mut EventReader<R>) -> Result<Option<u64>, Error> {\n\n let text = try!(read_opt_text(r));\n\n if let Some(mut text) = text {\n\n text = text.to_lowercase();\n\n if text.starts_with(\"0x\") {\n\n if let Ok(v) = u64::from_str_radix(&text[2..], 16) {\n\n return Ok(Some(v))\n\n } else {\n\n return Err(Error::ParseError(format!(\"Invalid hex number: {:?}\", text)))\n\n }\n\n } \n\n if let Ok(v) = text.parse::<u64>() {\n\n Ok(Some(v))\n\n } else {\n\n Err(Error::ParseError(format!(\"Invalid number: {:?}\", text)))\n\n }\n\n } else {\n\n return Ok(None);\n\n }\n\n}\n\n\n", "file_path": "dsl/bobbin-svd/src/lib.rs", "rank": 88, "score": 151687.84465298068 }, { "content": "pub fn test_pit(tim_ch: &PitCh) {\n\n test_timer(tim_ch, 1024);\n\n test_timer_down(tim_ch, 1024);\n\n}", "file_path": "mcu/bobbin-kinetis/kinetis-common/src/ext/pit.rs", "rank": 89, "score": 151139.88466050822 }, { "content": "#[doc(hidden)]\n\npub fn write_u32(v: u32, base: u32) {\n\n with_console(|c| {\n\n c.write_u32(v, base);\n\n });\n\n}\n\n\n", "file_path": "lib/bobbin-sys/src/console.rs", "rank": 90, "score": 150993.24251846512 }, { "content": "pub fn gen_cargo_toml(path: &Path) -> Result<()> {\n\n let mut out = File::create(path)?;\n\n\n\n writeln!(out, \"[package]\")?;\n\n\n\n Ok(())\n\n \n\n}\n\n\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/crates.rs", "rank": 91, "score": 150494.92028280592 }, { "content": "pub fn add_region(addr: usize, len: usize) {\n\n MEM.with(|m| m.borrow_mut().add_region(addr, len));\n\n} \n\n\n\npub unsafe fn read_volatile<T>(addr: *const T) -> T {\n\n MEM.with(|m| m.borrow().read(addr))\n\n}\n\n\n\npub unsafe fn write_volatile<T>(addr: *mut T, value: T) {\n\n MEM.with(|m| m.borrow_mut().write(addr, value));\n\n}\n\n\n\n\n\n\n", "file_path": "lib/bobbin-mcu/src/rw.rs", "rank": 92, "score": 148889.4509678717 }, { "content": "#[inline]\n\npub fn read_u16(addr: *const u16) -> u16 {\n\n read(addr)\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/rw.rs", "rank": 93, "score": 148889.4509678717 }, { "content": "#[inline]\n\npub fn read_u8(addr: *const u8) -> u8 {\n\n read(addr)\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/rw.rs", "rank": 94, "score": 148889.4509678717 }, { "content": "pub fn group_name(periphs: &[Peripheral]) -> Option<&str> {\n\n for p in periphs.iter() {\n\n if let Some(ref group_name) = p.group_name {\n\n return Some(group_name)\n\n }\n\n }\n\n None\n\n}", "file_path": "dsl/bobbin-chip/src/lib.rs", "rank": 95, "score": 148889.4509678717 }, { "content": "#[inline]\n\npub fn read_u32(addr: *const u32) -> u32 {\n\n read(addr)\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/rw.rs", "rank": 96, "score": 148889.4509678717 }, { "content": "/// Returns a new Sexp containing a Symbol token.\n\npub fn symbol<'a>(s: &'a str) -> Sexp<'a> {\n\n Sexp::Token(Token::Symbol(s))\n\n}\n\n\n", "file_path": "dsl/bobbin-sexp/sexp/src/lib.rs", "rank": 97, "score": 147658.18557359587 }, { "content": "/// Returns a new Sexp containing a String token.\n\npub fn string<'a>(s: &'a str) -> Sexp<'a> {\n\n Sexp::Token(Token::String(s))\n\n}\n\n\n", "file_path": "dsl/bobbin-sexp/sexp/src/lib.rs", "rank": 98, "score": 147658.18557359587 }, { "content": "/// Returns a new Sexp containing a Number token.\n\npub fn number<'a>(s: &'a str) -> Sexp<'a> {\n\n Sexp::Token(Token::Number(s))\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_basic() {\n\n let abc = symbol(\"abc\");\n\n let def = symbol(\"def\");\n\n let list = Sexp::from(vec![abc.clone(), def.clone()]);\n\n assert!(list.list().is_some());\n\n assert_eq!(list.list(), Some(&[abc.clone(), def.clone()][..]));\n\n assert!(list.list().unwrap()[0] == abc);\n\n assert!(list.list().unwrap()[1] == def);\n\n\n\n assert_eq!(list.first(), Some(&abc));\n\n assert_eq!(list.rest(), Some(&[def.clone()][..]));\n\n }\n\n}", "file_path": "dsl/bobbin-sexp/sexp/src/lib.rs", "rank": 99, "score": 147658.18557359587 } ]
Rust
src/validate.rs
nlehuby/transport-validator
ea4ed1efd10c7a4ae91ebff92e4114bd95db1a20
use crate::{issues, metadatas, validators}; use serde::Serialize; use std::collections::BTreeMap; use std::convert::TryFrom; use std::error::Error; fn create_unloadable_model_error(error: gtfs_structures::Error) -> issues::Issue { let msg = if let Some(inner) = error.source() { format!("{}: {}", error, inner) } else { format!("{}", error) }; let mut issue = issues::Issue::new( issues::Severity::Fatal, issues::IssueType::UnloadableModel, "A fatal error has occured while loading the model, many rules have not been checked", ) .details(&msg); if let gtfs_structures::Error::CSVError { file_name, source, line_in_error, } = error { issue.related_file = Some(issues::RelatedFile { file_name, line: source .position() .and_then(|p| line_in_error.map(|l| (p.line(), l))) .map(|(line_number, line_in_error)| issues::RelatedLine { line_number, headers: line_in_error.headers, values: line_in_error.values, }), }); } issue } #[derive(Serialize, Debug)] pub struct Response { pub metadata: Option<metadatas::Metadata>, pub validations: BTreeMap<issues::IssueType, Vec<issues::Issue>>, } pub fn validate_and_metadata(rgtfs: gtfs_structures::RawGtfs, max_issues: usize) -> Response { let mut validations = BTreeMap::new(); let mut issues: Vec<_> = validators::raw_gtfs::validate(&rgtfs) .into_iter() .chain(validators::invalid_reference::validate(&rgtfs)) .chain(validators::file_presence::validate(&rgtfs)) .collect(); let mut metadata = metadatas::extract_metadata(&rgtfs); match gtfs_structures::Gtfs::try_from(rgtfs) { Ok(ref gtfs) => { issues.extend( validators::unused_stop::validate(&gtfs) .into_iter() .chain(validators::duration_distance::validate(&gtfs)) .chain(validators::check_name::validate(&gtfs)) .chain(validators::check_id::validate(&gtfs)) .chain(validators::stops::validate(&gtfs)) .chain(validators::route_type::validate(&gtfs)) .chain(validators::shapes::validate(&gtfs)) .chain(validators::agency::validate(&gtfs)) .chain(validators::duplicate_stops::validate(&gtfs)) .chain(validators::fare_attributes::validate(&gtfs)) .chain(validators::feed_info::validate(&gtfs)) .chain(validators::stop_times::validate(&gtfs)) .chain(validators::interpolated_stoptimes::validate(&gtfs)), ); issues .iter_mut() .for_each(|issue| issue.push_related_geojson(&gtfs)); } Err(e) => { issues.push(create_unloadable_model_error(e)); } } for issue in issues { validations .entry(issue.issue_type) .or_insert_with(Vec::new) .push(issue); } for (issue_type, issues) in validations.iter_mut() { metadata.issues_count.insert(*issue_type, issues.len()); issues.truncate(max_issues); } Response { metadata: Some(metadata), validations, } } pub fn generate_validation(input: &str, max_issues: usize) -> Response { log::info!("Starting validation: {}", input); let raw_gtfs = gtfs_structures::RawGtfs::new(input); process(raw_gtfs, max_issues) } pub fn process( raw_gtfs: Result<gtfs_structures::RawGtfs, gtfs_structures::Error>, max_issues: usize, ) -> Response { match raw_gtfs { Ok(raw_gtfs) => self::validate_and_metadata(raw_gtfs, max_issues), Err(e) => { let mut validations = BTreeMap::new(); validations.insert( issues::IssueType::InvalidArchive, vec![issues::Issue::new( issues::Severity::Fatal, issues::IssueType::InvalidArchive, "", ) .details(format!("{}", e).as_ref())], ); Response { metadata: None, validations, } } } } pub fn generate_validation_from_reader<T: std::io::Read + std::io::Seek>( reader: T, max_issues: usize, ) -> Response { let g = gtfs_structures::RawGtfs::from_reader(reader); process(g, max_issues) } pub fn validate(input: &str, max_issues: usize) -> Result<String, anyhow::Error> { Ok(serde_json::to_string(&generate_validation( input, max_issues, ))?) } #[test] fn test_invalid_stop_points() { let issues = generate_validation("test_data/invalid_stop_file", 1000); let unloadable_model_errors = &issues.validations[&issues::IssueType::UnloadableModel]; assert_eq!(unloadable_model_errors.len(), 1); let unloadable_model_error = &unloadable_model_errors[0]; assert_eq!(unloadable_model_error, &issues::Issue { severity: issues::Severity::Fatal, issue_type: issues::IssueType::UnloadableModel, object_id: "A fatal error has occured while loading the model, many rules have not been checked".to_string(), object_type: None, object_name: None, related_objects: vec![], details: Some( "impossible to read csv file \'stops.txt\': CSV deserialize error: record 12 (line: 13, byte: 739): invalid float literal".to_string() ), related_file: Some(issues::RelatedFile { file_name: "stops.txt".to_owned(), line: Some(issues::RelatedLine { line_number: 13, headers: vec!["stop_id", "stop_name", "stop_desc", "stop_lat", "stop_lon", "zone_id", "stop_url", "location_type", "parent_station"].into_iter().map(|s| s.to_owned()).collect(), values: vec!["stop_with_bad_coord", "Moo", "", "baaaaaad_coord", "-116.40094", "", "", "", "1"].into_iter().map(|s| s.to_owned()).collect() }), }), geojson: None }); assert_eq!( issues.validations[&issues::IssueType::InvalidReference], vec![issues::Issue { severity: issues::Severity::Fatal, issue_type: issues::IssueType::InvalidReference, object_id: "AAMV".to_string(), object_type: Some(gtfs_structures::ObjectType::Route), object_name: None, related_file: None, related_objects: vec![issues::RelatedObject { id: "AAMV4".to_string(), object_type: Some(gtfs_structures::ObjectType::Trip), name: Some("route id: AAMV, service id: WE".to_string()) }], details: Some("The route is referenced by a trip but does not exists".to_string()), geojson: None }] ); }
use crate::{issues, metadatas, validators}; use serde::Serialize; use std::collections::BTreeMap; use std::convert::TryFrom; use std::error::Error; fn create_unloadable_model_error(error: gtfs_structures::Error) -> issues::Issue { let msg = if let Some(inner) = error.source() { format!("{}: {}", error, inner) } else { format!("{}", error) }; let mut issue = issues::Issue::new( issues::Severity::Fata
#[derive(Serialize, Debug)] pub struct Response { pub metadata: Option<metadatas::Metadata>, pub validations: BTreeMap<issues::IssueType, Vec<issues::Issue>>, } pub fn validate_and_metadata(rgtfs: gtfs_structures::RawGtfs, max_issues: usize) -> Response { let mut validations = BTreeMap::new(); let mut issues: Vec<_> = validators::raw_gtfs::validate(&rgtfs) .into_iter() .chain(validators::invalid_reference::validate(&rgtfs)) .chain(validators::file_presence::validate(&rgtfs)) .collect(); let mut metadata = metadatas::extract_metadata(&rgtfs); match gtfs_structures::Gtfs::try_from(rgtfs) { Ok(ref gtfs) => { issues.extend( validators::unused_stop::validate(&gtfs) .into_iter() .chain(validators::duration_distance::validate(&gtfs)) .chain(validators::check_name::validate(&gtfs)) .chain(validators::check_id::validate(&gtfs)) .chain(validators::stops::validate(&gtfs)) .chain(validators::route_type::validate(&gtfs)) .chain(validators::shapes::validate(&gtfs)) .chain(validators::agency::validate(&gtfs)) .chain(validators::duplicate_stops::validate(&gtfs)) .chain(validators::fare_attributes::validate(&gtfs)) .chain(validators::feed_info::validate(&gtfs)) .chain(validators::stop_times::validate(&gtfs)) .chain(validators::interpolated_stoptimes::validate(&gtfs)), ); issues .iter_mut() .for_each(|issue| issue.push_related_geojson(&gtfs)); } Err(e) => { issues.push(create_unloadable_model_error(e)); } } for issue in issues { validations .entry(issue.issue_type) .or_insert_with(Vec::new) .push(issue); } for (issue_type, issues) in validations.iter_mut() { metadata.issues_count.insert(*issue_type, issues.len()); issues.truncate(max_issues); } Response { metadata: Some(metadata), validations, } } pub fn generate_validation(input: &str, max_issues: usize) -> Response { log::info!("Starting validation: {}", input); let raw_gtfs = gtfs_structures::RawGtfs::new(input); process(raw_gtfs, max_issues) } pub fn process( raw_gtfs: Result<gtfs_structures::RawGtfs, gtfs_structures::Error>, max_issues: usize, ) -> Response { match raw_gtfs { Ok(raw_gtfs) => self::validate_and_metadata(raw_gtfs, max_issues), Err(e) => { let mut validations = BTreeMap::new(); validations.insert( issues::IssueType::InvalidArchive, vec![issues::Issue::new( issues::Severity::Fatal, issues::IssueType::InvalidArchive, "", ) .details(format!("{}", e).as_ref())], ); Response { metadata: None, validations, } } } } pub fn generate_validation_from_reader<T: std::io::Read + std::io::Seek>( reader: T, max_issues: usize, ) -> Response { let g = gtfs_structures::RawGtfs::from_reader(reader); process(g, max_issues) } pub fn validate(input: &str, max_issues: usize) -> Result<String, anyhow::Error> { Ok(serde_json::to_string(&generate_validation( input, max_issues, ))?) } #[test] fn test_invalid_stop_points() { let issues = generate_validation("test_data/invalid_stop_file", 1000); let unloadable_model_errors = &issues.validations[&issues::IssueType::UnloadableModel]; assert_eq!(unloadable_model_errors.len(), 1); let unloadable_model_error = &unloadable_model_errors[0]; assert_eq!(unloadable_model_error, &issues::Issue { severity: issues::Severity::Fatal, issue_type: issues::IssueType::UnloadableModel, object_id: "A fatal error has occured while loading the model, many rules have not been checked".to_string(), object_type: None, object_name: None, related_objects: vec![], details: Some( "impossible to read csv file \'stops.txt\': CSV deserialize error: record 12 (line: 13, byte: 739): invalid float literal".to_string() ), related_file: Some(issues::RelatedFile { file_name: "stops.txt".to_owned(), line: Some(issues::RelatedLine { line_number: 13, headers: vec!["stop_id", "stop_name", "stop_desc", "stop_lat", "stop_lon", "zone_id", "stop_url", "location_type", "parent_station"].into_iter().map(|s| s.to_owned()).collect(), values: vec!["stop_with_bad_coord", "Moo", "", "baaaaaad_coord", "-116.40094", "", "", "", "1"].into_iter().map(|s| s.to_owned()).collect() }), }), geojson: None }); assert_eq!( issues.validations[&issues::IssueType::InvalidReference], vec![issues::Issue { severity: issues::Severity::Fatal, issue_type: issues::IssueType::InvalidReference, object_id: "AAMV".to_string(), object_type: Some(gtfs_structures::ObjectType::Route), object_name: None, related_file: None, related_objects: vec![issues::RelatedObject { id: "AAMV4".to_string(), object_type: Some(gtfs_structures::ObjectType::Trip), name: Some("route id: AAMV, service id: WE".to_string()) }], details: Some("The route is referenced by a trip but does not exists".to_string()), geojson: None }] ); }
l, issues::IssueType::UnloadableModel, "A fatal error has occured while loading the model, many rules have not been checked", ) .details(&msg); if let gtfs_structures::Error::CSVError { file_name, source, line_in_error, } = error { issue.related_file = Some(issues::RelatedFile { file_name, line: source .position() .and_then(|p| line_in_error.map(|l| (p.line(), l))) .map(|(line_number, line_in_error)| issues::RelatedLine { line_number, headers: line_in_error.headers, values: line_in_error.values, }), }); } issue }
function_block-function_prefixed
[ { "content": "fn validate_speeds(gtfs: &gtfs_structures::Gtfs) -> Result<Vec<Issue>, gtfs_structures::Error> {\n\n let mut issues_by_stops_and_type = std::collections::HashMap::new();\n\n\n\n for trip in gtfs.trips.values() {\n\n let route = gtfs.get_route(&trip.route_id)?;\n\n for (departure, arrival) in trip.stop_times.iter().tuple_windows() {\n\n if let Some((distance, duration)) = distance_and_duration(departure, arrival) {\n\n let issue_kind = if distance < 10.0 {\n\n Some((\n\n Severity::Information,\n\n IssueType::CloseStops,\n\n format!(\"distance between the stops is {:.0} meter(s)\", distance),\n\n ))\n\n // Some timetable are rounded to the minute. For short distances this can result in a null duration\n\n // If stops are more than 500m appart, they should need at least a minute\n\n } else if duration == 0.0 && distance > 500.0 {\n\n Some((\n\n Severity::Warning,\n\n IssueType::NullDuration,\n\n format!(\n", "file_path": "src/validators/duration_distance.rs", "rank": 2, "score": 127663.24863961089 }, { "content": "pub fn extract_metadata(gtfs: &gtfs_structures::RawGtfs) -> Metadata {\n\n use gtfs_structures::PickupDropOffType;\n\n use gtfs_structures::RouteType::*;\n\n\n\n let start_end = gtfs\n\n .calendar\n\n .as_ref()\n\n .and_then(|c| c.as_ref().ok())\n\n .unwrap_or(&vec![])\n\n .iter()\n\n .flat_map(|c| vec![c.start_date, c.end_date].into_iter())\n\n .chain(\n\n gtfs.calendar_dates\n\n .as_ref()\n\n .and_then(|c| c.as_ref().ok())\n\n .unwrap_or(&vec![])\n\n .iter()\n\n .filter(|cd| cd.exception_type == gtfs_structures::Exception::Added)\n\n .map(|c| c.date),\n\n )\n", "file_path": "src/metadatas.rs", "rank": 4, "score": 102127.44584991815 }, { "content": "fn make_issue(\n\n feed: &gtfs_structures::FeedInfo,\n\n severity: Severity,\n\n issue_type: IssueType,\n\n) -> Issue {\n\n Issue::new(severity, issue_type, \"\").name(&format!(\"{}\", feed))\n\n}\n\n\n", "file_path": "src/validators/feed_info.rs", "rank": 5, "score": 98190.0752708827 }, { "content": "fn make_duplicate_stops_issue<\n\n T: gtfs_structures::Id + gtfs_structures::Type + std::fmt::Display,\n\n>(\n\n o: &T,\n\n) -> Issue {\n\n Issue::new_with_obj(Severity::Information, IssueType::DuplicateStops, o)\n\n}\n\n\n", "file_path": "src/validators/duplicate_stops.rs", "rank": 6, "score": 94171.15213936756 }, { "content": "pub fn validate(gtfs: &Gtfs) -> Vec<Issue> {\n\n gtfs.routes\n\n .iter()\n\n .filter_map(|(_, route)| get_non_standard_route_type(route))\n\n .map(|(route, route_type)| {\n\n Issue::new_with_obj(Severity::Information, IssueType::InvalidRouteType, route).details(\n\n &format!(\n\n \"The route type '{}' is not part of the main GTFS specification\",\n\n route_type\n\n ),\n\n )\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/route_type.rs", "rank": 7, "score": 91917.11451753673 }, { "content": "fn validate_coord(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let missing_coord = gtfs.stops.values().filter_map(|stop| check_coord(stop));\n\n let valid = gtfs\n\n .stops\n\n .values()\n\n .filter(|stop| !valid_coord(stop))\n\n .map(|stop| make_invalid_coord_issue(&**stop));\n\n missing_coord.chain(valid).collect()\n\n}\n\n\n", "file_path": "src/validators/stops.rs", "rank": 8, "score": 90306.28602782855 }, { "content": "fn make_issue<T: gtfs_structures::Id>(o: &T, issue_type: IssueType) -> Issue {\n\n Issue::new(Severity::Error, issue_type, o.id()).object_type(gtfs_structures::ObjectType::Fare)\n\n}\n\n\n", "file_path": "src/validators/fare_attributes.rs", "rank": 9, "score": 90226.61215761185 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let missing_coord = gtfs\n\n .shapes\n\n .iter()\n\n .filter(|(_id, shapes)| !shapes.iter().all(has_coord))\n\n .map(|(id, _shapes)| {\n\n Issue::new(Severity::Warning, IssueType::MissingCoordinates, id)\n\n .object_type(gtfs_structures::ObjectType::Shape)\n\n });\n\n let valid = gtfs\n\n .shapes\n\n .iter()\n\n .filter(|(_id, shapes)| !shapes.iter().all(valid_coord))\n\n .map(|(id, _shapes)| {\n\n Issue::new(Severity::Error, IssueType::InvalidCoordinates, id)\n\n .object_type(gtfs_structures::ObjectType::Shape)\n\n });\n\n missing_coord.chain(valid).collect()\n\n}\n\n\n", "file_path": "src/validators/shapes.rs", "rank": 11, "score": 89634.27615684507 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let missing_url = gtfs\n\n .agencies\n\n .iter()\n\n .filter(|agency| !has_url(agency))\n\n .map(|agency| Issue::new_with_obj(Severity::Warning, IssueType::MissingUrl, agency));\n\n let invalid_url = gtfs\n\n .agencies\n\n .iter()\n\n .filter(|agency| !valid_url(agency))\n\n .map(|agency| {\n\n Issue::new_with_obj(Severity::Warning, IssueType::InvalidUrl, agency)\n\n .details(&format!(\"Publisher url {} is invalid\", agency.url))\n\n });\n\n let invalid_tz = gtfs\n\n .agencies\n\n .iter()\n\n .filter(|agency| !valid_timezone(agency))\n\n .map(|agency| Issue::new_with_obj(Severity::Error, IssueType::InvalidTimezone, agency));\n\n missing_url.chain(invalid_url).chain(invalid_tz).collect()\n\n}\n\n\n", "file_path": "src/validators/agency.rs", "rank": 12, "score": 89634.27615684507 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n validate_coord(gtfs)\n\n .into_iter()\n\n .chain(validate_parent_id(gtfs))\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/stops.rs", "rank": 13, "score": 89634.27615684507 }, { "content": "// Check if the parent of the stop is correct\n\n// Note: we don't check if the parent exists, because it is checked by the `InvalidReference` issue\n\nfn validate_parent_id(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let stops_by_id: std::collections::HashMap<_, _> = gtfs\n\n .stops\n\n .iter()\n\n .map(|(_, stop)| (stop.id.clone(), stop.clone()))\n\n .collect();\n\n\n\n gtfs.stops\n\n .iter()\n\n .filter_map(|(_, stop)| {\n\n let parent = stop\n\n .parent_station\n\n .as_ref()\n\n .and_then(|parent| stops_by_id.get(parent));\n\n let details = match stop.location_type {\n\n LocationType::StopArea => {\n\n // a stop area is forbidden to have a parent station\n\n stop.parent_station\n\n .as_ref()\n\n .map(|_p| \"it's not valid for a stop area to have a parent station\")\n", "file_path": "src/validators/stops.rs", "rank": 14, "score": 88765.54587046898 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n gtfs.trips\n\n .values()\n\n .filter_map(impossible_to_interpolate_st)\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/interpolated_stoptimes.rs", "rank": 15, "score": 88091.11978895795 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let route_issues = gtfs\n\n .routes\n\n .iter()\n\n .filter(|&(_, route)| !has_name(route))\n\n .map(|(_, route)| make_missing_name_issue(route));\n\n let stop_issues = gtfs\n\n .stops\n\n .values()\n\n .filter(|&stop| !has_name(&**stop))\n\n .map(|stop| make_missing_name_issue(&**stop));\n\n let agency_issues = gtfs\n\n .agencies\n\n .iter()\n\n .filter(|&agency| !has_name(&*agency))\n\n .map(|agency| make_missing_name_issue(agency));\n\n let feed_info_issues = gtfs\n\n .feed_info\n\n .iter()\n\n .filter(|&feed_info| !has_name(&*feed_info))\n\n .map(|_feed_info| Issue::new(Severity::Warning, IssueType::MissingName, \"\"));\n\n route_issues\n\n .chain(stop_issues)\n\n .chain(agency_issues)\n\n .chain(feed_info_issues)\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/check_name.rs", "rank": 16, "score": 88091.11978895795 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let mut used_stops = HashSet::new();\n\n\n\n // A stop can be used for a stop time\n\n for trip in gtfs.trips.values() {\n\n for stop_time in &trip.stop_times {\n\n used_stops.insert(stop_time.stop.id.to_owned());\n\n }\n\n }\n\n\n\n // A stop can be the parent station\n\n for stop in gtfs.stops.values() {\n\n for parent in &stop.parent_station {\n\n if used_stops.contains(&stop.id) {\n\n used_stops.insert(parent.to_owned());\n\n }\n\n }\n\n }\n\n\n\n gtfs.stops\n\n .iter()\n\n .filter(|&(_, stop)| !used_stops.contains(&stop.id))\n\n .map(|(_, stop)| make_unused_stop_issue(&**stop))\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/unused_stop.rs", "rank": 17, "score": 88091.11978895795 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n validate_speeds(gtfs).unwrap_or_else(|e| {\n\n vec![Issue::new(\n\n Severity::Fatal,\n\n IssueType::InvalidReference,\n\n &format!(\"{}\", e),\n\n )]\n\n })\n\n}\n\n\n", "file_path": "src/validators/duration_distance.rs", "rank": 18, "score": 88091.11978895795 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let missing_price = gtfs\n\n .fare_attributes\n\n .values()\n\n .filter(|fare_attributes| !has_price(*fare_attributes))\n\n .map(|fare_attributes| make_issue(fare_attributes, IssueType::MissingPrice));\n\n let invalid_currency = gtfs\n\n .fare_attributes\n\n .values()\n\n .filter(|fare_attributes| !valid_currency(*fare_attributes))\n\n .map(|fare_attributes| make_issue(fare_attributes, IssueType::InvalidCurrency));\n\n let invalid_transfers = gtfs\n\n .fare_attributes\n\n .values()\n\n .filter(|fare_attributes| !valid_transfers(*fare_attributes))\n\n .map(|fare_attributes| make_issue(fare_attributes, IssueType::InvalidTransfers));\n\n let invalid_duration = gtfs\n\n .fare_attributes\n\n .values()\n\n .filter(|fare_attributes| !valid_duration(*fare_attributes))\n\n .map(|fare_attributes| make_issue(fare_attributes, IssueType::InvalidTransferDuration));\n\n missing_price\n\n .chain(invalid_currency)\n\n .chain(invalid_transfers)\n\n .chain(invalid_duration)\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/fare_attributes.rs", "rank": 19, "score": 88091.11978895795 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let mut issues = vec![];\n\n\n\n for (stop_a, stop_b) in gtfs\n\n .stops\n\n .values()\n\n .filter(|stop| stop.location_type != gtfs_structures::LocationType::StationEntrance)\n\n .tuple_combinations()\n\n {\n\n if duplicate_stops(stop_a, stop_b) {\n\n issues.push(make_duplicate_stops_issue(&**stop_a).add_related_object(&**stop_b));\n\n }\n\n }\n\n issues\n\n}\n\n\n", "file_path": "src/validators/duplicate_stops.rs", "rank": 20, "score": 88091.11978895795 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let mut wrong_stops = HashMap::new();\n\n gtfs.trips.values().for_each(|trip| {\n\n trip.stop_times.iter().for_each(|st| {\n\n if st.stop.location_type != LocationType::StopPoint {\n\n let issue = wrong_stops.entry(st.stop.id.clone()).or_insert_with(|| {\n\n Issue::new_with_obj(\n\n Severity::Warning,\n\n IssueType::InvalidStopLocationTypeInTrip,\n\n &*st.stop,\n\n )\n\n .details(&format!(\n\n \"A {:?} cannot be referenced by a stop time\",\n\n st.stop.location_type\n\n ))\n\n });\n\n\n\n if issue.related_objects.len() < MAX_TRIPS {\n\n // we do not add more than 20 trip as related object\n\n issue.push_related_object(trip);\n\n }\n\n }\n\n })\n\n });\n\n\n\n // dbg!(wrong_stops);\n\n\n\n wrong_stops.into_iter().map(|(_, v)| v).collect()\n\n}\n\n\n", "file_path": "src/validators/stop_times.rs", "rank": 21, "score": 88091.11978895795 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let r = gtfs\n\n .routes\n\n .values()\n\n .filter(|route| !has_id(*route))\n\n .map(|route| make_missing_id_issue(route));\n\n let t = gtfs\n\n .trips\n\n .values()\n\n .filter(|trip| !has_id(*trip))\n\n .map(|trip| make_missing_id_issue(trip));\n\n let c = gtfs\n\n .calendar\n\n .values()\n\n .filter(|calendar| !has_id(*calendar))\n\n .map(|calendar| make_missing_id_issue(calendar));\n\n let st = gtfs\n\n .stops\n\n .values()\n\n .filter(|&stop| !has_id(&**stop))\n", "file_path": "src/validators/check_id.rs", "rank": 22, "score": 88091.11978895795 }, { "content": "pub fn validate(gtfs: &gtfs_structures::Gtfs) -> Vec<Issue> {\n\n let missing_url = gtfs\n\n .feed_info\n\n .iter()\n\n .filter(|feed_info| !has_url(feed_info))\n\n .map(|feed_info| make_issue(feed_info, Severity::Warning, IssueType::MissingUrl));\n\n let invalid_url = gtfs\n\n .feed_info\n\n .iter()\n\n .filter(|feed_info| !valid_url(feed_info))\n\n .map(|feed_info| {\n\n make_issue(feed_info, Severity::Warning, IssueType::InvalidUrl)\n\n .details(&format!(\"Publisher url {} is invalid\", feed_info.url))\n\n });\n\n let missing_lang = gtfs\n\n .feed_info\n\n .iter()\n\n .filter(|feed_info| !has_lang(feed_info))\n\n .map(|feed_info| make_issue(feed_info, Severity::Warning, IssueType::MissingLanguage));\n\n let invalid_lang = gtfs\n", "file_path": "src/validators/feed_info.rs", "rank": 23, "score": 88091.11978895795 }, { "content": "/// Check that the links in the GTFS are valid\n\n///\n\n/// There are not that many link in the gtfs, we check:\n\n/// * the stop times's stops and trips\n\n/// * the trips routes and calendar\n\npub fn validate(raw_gtfs: &gtfs_structures::RawGtfs) -> Vec<Issue> {\n\n let id_container = Ids::new(raw_gtfs);\n\n\n\n id_container\n\n .check_stop_times(&raw_gtfs.stop_times)\n\n .into_iter()\n\n .chain(id_container.check_trips(&raw_gtfs.trips))\n\n .chain(id_container.check_routes(&raw_gtfs.routes))\n\n .chain(id_container.check_stops(&raw_gtfs.stops))\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/invalid_reference.rs", "rank": 24, "score": 85193.67904957708 }, { "content": "pub fn validate(raw_gtfs: &gtfs_structures::RawGtfs) -> Vec<Issue> {\n\n missing_files(raw_gtfs)\n\n .into_iter()\n\n .chain(extra_files(raw_gtfs).into_iter())\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/file_presence.rs", "rank": 25, "score": 85191.04640929853 }, { "content": "pub fn validate(raw_gtfs: &gtfs_structures::RawGtfs) -> Vec<Issue> {\n\n check_duplicates(&raw_gtfs.stops, Severity::Warning)\n\n .into_iter()\n\n .chain(check_duplicates(&raw_gtfs.routes, Severity::Warning).into_iter())\n\n .chain(check_duplicates(&raw_gtfs.trips, Severity::Warning).into_iter())\n\n .chain(\n\n check_duplicates(\n\n &raw_gtfs.calendar.as_ref().unwrap_or(&Ok(vec![])),\n\n Severity::Error,\n\n )\n\n .into_iter(),\n\n )\n\n .chain(\n\n check_duplicates(\n\n &raw_gtfs.fare_attributes.as_ref().unwrap_or(&Ok(vec![])),\n\n Severity::Warning,\n\n )\n\n .into_iter(),\n\n )\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/raw_gtfs.rs", "rank": 26, "score": 85191.04640929853 }, { "content": "fn check_coord(stop: &gtfs_structures::Stop) -> Option<Issue> {\n\n if stop.location_type != LocationType::GenericNode\n\n && stop.location_type != LocationType::BoardingArea\n\n && !has_coord(stop)\n\n {\n\n // the coordinates are optional for generic nodes and boarding area\n\n Some(\n\n make_missing_coord_issue(stop).details(match (stop.longitude, stop.latitude) {\n\n (None, None) => \"Latitude and longitude are missing\",\n\n (Some(lon), Some(lat)) if lon == 0.0 && lat == 0.0 => {\n\n \"Latitude and longitude are missing\"\n\n }\n\n (Some(lon), _) if lon == 0.0 => \"Longitude is missing\",\n\n (_, Some(lat)) if lat == 0.0 => \"Latitude is missing\",\n\n _ => \"Coordinates are ok\",\n\n }),\n\n )\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/validators/stops.rs", "rank": 27, "score": 84165.41721361373 }, { "content": "fn make_missing_name_issue<T: gtfs_structures::Id + gtfs_structures::Type>(o: &T) -> Issue {\n\n Issue::new(Severity::Warning, IssueType::MissingName, o.id()).object_type(o.object_type())\n\n}\n\n\n", "file_path": "src/validators/check_name.rs", "rank": 28, "score": 82081.60122577938 }, { "content": "fn impossible_to_interpolate_st(trip: &gtfs_structures::Trip) -> Option<Issue> {\n\n let first_st = trip.stop_times.first();\n\n let last_st = trip.stop_times.last();\n\n\n\n if let (Some(first_st), Some(last_st)) = (first_st, last_st) {\n\n if first_st.departure_time.is_none()\n\n || first_st.arrival_time.is_none()\n\n || last_st.departure_time.is_none()\n\n || last_st.arrival_time.is_none()\n\n {\n\n Some(\n\n Issue::new_with_obj(\n\n Severity::Error,\n\n IssueType::ImpossibleToInterpolateStopTimes,\n\n trip,\n\n )\n\n .details(\"The first and last stop time of a trip cannot have empty departure/arrivals as they cannot be interpolated\"),\n\n )\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/validators/interpolated_stoptimes.rs", "rank": 29, "score": 81018.7731911447 }, { "content": "fn has_on_demand_dropoff(\n\n stop_time: &gtfs_structures::RawStopTime,\n\n pickup_dropoff_type: gtfs_structures::PickupDropOffType,\n\n) -> bool {\n\n stop_time\n\n .drop_off_type\n\n .map_or(false, |drop_off_type| drop_off_type == pickup_dropoff_type)\n\n}\n\n\n", "file_path": "src/metadatas.rs", "rank": 30, "score": 79880.07993859441 }, { "content": "fn has_on_demand_pickup(\n\n stop_time: &gtfs_structures::RawStopTime,\n\n pickup_dropoff_type: gtfs_structures::PickupDropOffType,\n\n) -> bool {\n\n stop_time\n\n .pickup_type\n\n .map_or(false, |pickup_time| pickup_time == pickup_dropoff_type)\n\n}\n\n\n", "file_path": "src/metadatas.rs", "rank": 31, "score": 79880.07993859441 }, { "content": "#[test]\n\nfn test_has_fares() {\n\n let raw_gtfs =\n\n gtfs_structures::RawGtfs::new(\"test_data/fare_attributes\").expect(\"Failed to load data\");\n\n let metadatas = extract_metadata(&raw_gtfs);\n\n assert!(metadatas.has_fares);\n\n}\n\n\n", "file_path": "src/metadatas.rs", "rank": 32, "score": 79880.07993859441 }, { "content": "#[test]\n\nfn test_has_shapes() {\n\n let raw_gtfs = gtfs_structures::RawGtfs::new(\"test_data/shapes\").expect(\"Failed to load data\");\n\n let metadatas = extract_metadata(&raw_gtfs);\n\n assert!(metadatas.has_shapes);\n\n}\n\n\n", "file_path": "src/metadatas.rs", "rank": 33, "score": 79880.07993859441 }, { "content": "fn extra_files(raw_gtfs: &gtfs_structures::RawGtfs) -> Vec<Issue> {\n\n raw_gtfs\n\n .files\n\n .iter()\n\n .filter(|f| {\n\n !MANDATORY_FILES.iter().any(|m| f.ends_with(m))\n\n && !OPTIONAL_FILES.iter().any(|o| f.ends_with(o))\n\n })\n\n .map(|f| {\n\n Issue::new(Severity::Information, IssueType::ExtraFile, f)\n\n .details(\"This file shouldn’t be in the archive\")\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/file_presence.rs", "rank": 34, "score": 79549.31086800696 }, { "content": "fn missing_files(raw_gtfs: &gtfs_structures::RawGtfs) -> Vec<Issue> {\n\n MANDATORY_FILES\n\n .iter()\n\n .filter(|m| !raw_gtfs.files.iter().any(|f| f.ends_with(*m)))\n\n .map(|m| {\n\n Issue::new(Severity::Fatal, IssueType::MissingMandatoryFile, m)\n\n .details(\"The mandatory file was not found\")\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/validators/file_presence.rs", "rank": 35, "score": 79549.31086800696 }, { "content": "#[test]\n\nfn test_no_fares_no_shapes() {\n\n let raw_gtfs =\n\n gtfs_structures::RawGtfs::new(\"test_data/no_fares_no_shapes\").expect(\"Failed to load data\");\n\n let metadatas = extract_metadata(&raw_gtfs);\n\n assert!(!metadatas.has_fares);\n\n assert!(!metadatas.has_shapes);\n\n}\n\n\n", "file_path": "src/metadatas.rs", "rank": 36, "score": 78207.60308459819 }, { "content": "fn has_on_demand_pickup_dropoff(\n\n stop_time: &gtfs_structures::RawStopTime,\n\n pickup_dropoff_type: gtfs_structures::PickupDropOffType,\n\n) -> bool {\n\n has_on_demand_pickup(stop_time, pickup_dropoff_type)\n\n || has_on_demand_dropoff(stop_time, pickup_dropoff_type)\n\n}\n\n\n", "file_path": "src/metadatas.rs", "rank": 37, "score": 78207.60308459819 }, { "content": "fn get_related_stop_ids(issue: &issues::Issue) -> Vec<String> {\n\n let related_objects = &issue.related_objects;\n\n related_objects\n\n .iter()\n\n .filter(|o| o.object_type == Some(ObjectType::Stop))\n\n .map(|s| s.id.clone())\n\n .collect()\n\n}\n\n\n", "file_path": "src/visualization.rs", "rank": 38, "score": 75920.47235896728 }, { "content": "#[test]\n\nfn test_stop_need_phone_agency() {\n\n let raw_gtfs = gtfs_structures::RawGtfs::new(\"test_data/arrange_by_phone_stops\")\n\n .expect(\"Failed to load data\");\n\n let metadatas = extract_metadata(&raw_gtfs);\n\n assert!(metadatas.some_stops_need_phone_agency);\n\n assert!(!metadatas.some_stops_need_phone_driver);\n\n}\n\n\n", "file_path": "src/metadatas.rs", "rank": 39, "score": 75123.66391768766 }, { "content": "#[test]\n\nfn test_stop_need_phone_driver() {\n\n let raw_gtfs = gtfs_structures::RawGtfs::new(\"test_data/coordinate_with_driver_stops\")\n\n .expect(\"Failed to load data\");\n\n let metadatas = extract_metadata(&raw_gtfs);\n\n assert!(!metadatas.some_stops_need_phone_agency);\n\n assert!(metadatas.some_stops_need_phone_driver);\n\n}\n", "file_path": "src/metadatas.rs", "rank": 40, "score": 75123.66391768766 }, { "content": "#[test]\n\nfn test_valid() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/stops\").unwrap();\n\n let issues = validate(&gtfs);\n\n let invalid_coord_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::InvalidCoordinates)\n\n .collect();\n\n\n\n assert_eq!(1, invalid_coord_issue.len());\n\n assert_eq!(\"PARENT\", invalid_coord_issue[0].object_id);\n\n assert_eq!(\n\n IssueType::InvalidCoordinates,\n\n invalid_coord_issue[0].issue_type\n\n );\n\n}\n\n\n", "file_path": "src/validators/stops.rs", "rank": 41, "score": 75011.19712260446 }, { "content": "#[test]\n\nfn test_valid() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/shapes\").unwrap();\n\n let issues = validate(&gtfs);\n\n let invalid_coord_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::InvalidCoordinates)\n\n .collect();\n\n\n\n assert_eq!(1, invalid_coord_issue.len());\n\n assert_eq!(\"A_shp\", invalid_coord_issue[0].object_id);\n\n assert_eq!(\n\n IssueType::InvalidCoordinates,\n\n invalid_coord_issue[0].issue_type\n\n );\n\n}\n", "file_path": "src/validators/shapes.rs", "rank": 42, "score": 75011.19712260446 }, { "content": "pub fn generate_issue_visualization(\n\n issue: &issues::Issue,\n\n gtfs: &Gtfs,\n\n) -> Option<FeatureCollection> {\n\n match issue.object_type {\n\n Some(ObjectType::Stop) => {\n\n let stop_id = issue.object_id.clone();\n\n let related_stop_ids = get_related_stop_ids(issue);\n\n\n\n // a vec containing the stop_id and the related stop ids features\n\n let stop_features: Vec<_> = [stop_id.clone()]\n\n .iter()\n\n .chain(related_stop_ids.iter())\n\n .map(|stop_id| geojson_feature_point(&stop_id, gtfs))\n\n .flatten()\n\n .collect();\n\n\n\n let line_string_features: Vec<_> = related_stop_ids\n\n .iter()\n\n .map(|related_stop| {\n", "file_path": "src/visualization.rs", "rank": 43, "score": 74130.28182122562 }, { "content": "#[test]\n\nfn test_valid_timezone() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/agency\").unwrap();\n\n let issues = validate(&gtfs);\n\n let invalid_tz_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::InvalidTimezone)\n\n .collect();\n\n\n\n assert_eq!(1, invalid_tz_issue.len());\n\n assert_eq!(\"BIBUS\", invalid_tz_issue[0].object_name.as_ref().unwrap());\n\n assert_eq!(IssueType::InvalidTimezone, invalid_tz_issue[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/agency.rs", "rank": 44, "score": 73805.39481550941 }, { "content": "#[test]\n\nfn test_valid_url() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/agency\").unwrap();\n\n let issues = validate(&gtfs);\n\n let invalid_url_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::InvalidUrl)\n\n .filter(|issue| issue.object_name == Some(\"Ter\".to_string()))\n\n .collect();\n\n\n\n assert_eq!(1, invalid_url_issue.len());\n\n assert_eq!(\"2\", invalid_url_issue[0].object_id);\n\n assert_eq!(IssueType::InvalidUrl, invalid_url_issue[0].issue_type);\n\n}\n", "file_path": "src/validators/agency.rs", "rank": 45, "score": 73805.39481550941 }, { "content": "#[test]\n\nfn test_valid() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/route_type_invalid\").unwrap();\n\n let issues = validate(&gtfs);\n\n\n\n assert_eq!(1, issues.len());\n\n assert_eq!(\"CITY\", issues[0].object_id);\n\n assert_eq!(IssueType::InvalidRouteType, issues[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/route_type.rs", "rank": 46, "score": 73805.39481550941 }, { "content": "fn make_invalid_parent_issue<T: gtfs_structures::Id + gtfs_structures::Type + std::fmt::Display>(\n\n o: &T,\n\n) -> Issue {\n\n Issue::new_with_obj(Severity::Warning, IssueType::InvalidStopParent, o)\n\n}\n\n\n", "file_path": "src/validators/stops.rs", "rank": 47, "score": 73662.770588051 }, { "content": "fn make_missing_coord_issue<T: gtfs_structures::Id + gtfs_structures::Type + std::fmt::Display>(\n\n o: &T,\n\n) -> Issue {\n\n Issue::new_with_obj(Severity::Warning, IssueType::MissingCoordinates, o)\n\n}\n", "file_path": "src/validators/stops.rs", "rank": 48, "score": 73662.770588051 }, { "content": "fn make_invalid_coord_issue<T: gtfs_structures::Id + gtfs_structures::Type + std::fmt::Display>(\n\n o: &T,\n\n) -> Issue {\n\n Issue::new_with_obj(Severity::Error, IssueType::InvalidCoordinates, o)\n\n}\n\n\n", "file_path": "src/validators/stops.rs", "rank": 49, "score": 73662.770588051 }, { "content": "#[test]\n\nfn test_valid_currency() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/fare_attributes\").unwrap();\n\n let issues = validate(&gtfs);\n\n let invalid_currency_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::InvalidCurrency)\n\n .collect();\n\n\n\n assert_eq!(1, invalid_currency_issue.len());\n\n assert_eq!(\"61\", invalid_currency_issue[0].object_id);\n\n assert_eq!(\n\n IssueType::InvalidCurrency,\n\n invalid_currency_issue[0].issue_type\n\n );\n\n}\n\n\n", "file_path": "src/validators/fare_attributes.rs", "rank": 50, "score": 72653.0106143294 }, { "content": "#[test]\n\nfn test_valid_lang() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/feed_info\").unwrap();\n\n let issues = validate(&gtfs);\n\n let invalid_lang_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::InvalidLanguage)\n\n .filter(|issue| issue.object_name == Some(\"SNCF\".to_string()))\n\n .collect();\n\n assert_eq!(1, invalid_lang_issue.len());\n\n assert_eq!(IssueType::InvalidLanguage, invalid_lang_issue[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/feed_info.rs", "rank": 51, "score": 72653.0106143294 }, { "content": "#[test]\n\nfn test_valid_url() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/feed_info\").unwrap();\n\n let issues = validate(&gtfs);\n\n let invalid_url_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::InvalidUrl)\n\n .filter(|issue| issue.object_name == Some(\"BIBUS\".to_string()))\n\n .collect();\n\n\n\n assert_eq!(1, invalid_url_issue.len());\n\n assert_eq!(IssueType::InvalidUrl, invalid_url_issue[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/feed_info.rs", "rank": 52, "score": 72653.0106143294 }, { "content": "#[test]\n\nfn test_valid_transfers() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/fare_attributes\").unwrap();\n\n let issues = validate(&gtfs);\n\n let invalid_transfers_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::InvalidTransfers)\n\n .collect();\n\n\n\n assert_eq!(1, invalid_transfers_issue.len());\n\n assert_eq!(\"61\", invalid_transfers_issue[0].object_id);\n\n assert_eq!(\n\n IssueType::InvalidTransfers,\n\n invalid_transfers_issue[0].issue_type\n\n );\n\n}\n", "file_path": "src/validators/fare_attributes.rs", "rank": 53, "score": 72653.0106143294 }, { "content": "fn make_missing_id_issue<T: gtfs_structures::Id + gtfs_structures::Type + std::fmt::Display>(\n\n o: &T,\n\n) -> Issue {\n\n Issue::new_with_obj(Severity::Error, IssueType::MissingId, o)\n\n}\n\n\n", "file_path": "src/validators/check_id.rs", "rank": 54, "score": 72423.69704774255 }, { "content": "fn make_unused_stop_issue<T: gtfs_structures::Id + gtfs_structures::Type + std::fmt::Display>(\n\n o: &T,\n\n) -> Issue {\n\n Issue::new_with_obj(Severity::Information, IssueType::UnusedStop, o)\n\n}\n\n\n", "file_path": "src/validators/unused_stop.rs", "rank": 55, "score": 72423.69704774255 }, { "content": "#[test]\n\nfn test_valid_lang_upper() {\n\n assert!(valid_lang(&gtfs_structures::FeedInfo {\n\n name: \"bob\".to_owned(),\n\n url: \"http://bob.com\".to_owned(),\n\n lang: \"FR\".to_owned(),\n\n start_date: None,\n\n end_date: None,\n\n version: None,\n\n contact_email: None,\n\n contact_url: None,\n\n default_lang: None\n\n }));\n\n}\n", "file_path": "src/validators/feed_info.rs", "rank": 56, "score": 71550.4202184144 }, { "content": "#[test]\n\nfn test() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/check_id\").unwrap();\n\n let issues = validate(&gtfs);\n\n let stop_id_issues: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.object_name == Some(\"Null Island\".to_string()))\n\n .collect();\n\n\n\n assert_eq!(1, stop_id_issues.len());\n\n assert_eq!(\n\n \"Null Island\",\n\n stop_id_issues[0].object_name.as_ref().unwrap()\n\n );\n\n assert_eq!(IssueType::MissingId, stop_id_issues[0].issue_type);\n\n}\n", "file_path": "src/validators/check_id.rs", "rank": 58, "score": 69521.08661240696 }, { "content": "#[test]\n\nfn test() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/stop_times_location_type\").unwrap();\n\n let issues = dbg!(validate(&gtfs));\n\n\n\n assert_eq!(1, issues.len());\n\n let first_issue = &issues[0];\n\n assert_eq!(\n\n IssueType::InvalidStopLocationTypeInTrip,\n\n first_issue.issue_type\n\n );\n\n assert_eq!(\"STOP_AREA\", first_issue.object_id);\n\n // 2 trips are linked to the stop 'STOP_AREA'\n\n assert_eq!(2, first_issue.related_objects.len());\n\n}\n", "file_path": "src/validators/stop_times.rs", "rank": 59, "score": 69521.08661240696 }, { "content": "#[test]\n\nfn test() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/duration_distance\").unwrap();\n\n let mut issues = validate(&gtfs);\n\n issues.sort_by(|a, b| a.issue_type.cmp(&b.issue_type));\n\n\n\n assert_eq!(5, issues.len());\n\n\n\n assert_eq!(IssueType::Slow, issues[0].issue_type);\n\n assert_eq!(\"near1\", issues[0].object_id);\n\n assert_eq!(String::from(\"near2\"), issues[0].related_objects[0].id);\n\n assert_eq!(Some(String::from(\"Near1\")), issues[0].object_name);\n\n\n\n assert_eq!(IssueType::ExcessiveSpeed, issues[1].issue_type);\n\n assert_eq!(\"near1\", issues[1].object_id);\n\n assert_eq!(String::from(\"null\"), issues[1].related_objects[0].id);\n\n assert_eq!(Some(String::from(\"Near1\")), issues[1].object_name);\n\n\n\n assert_eq!(IssueType::NegativeTravelTime, issues[2].issue_type);\n\n assert_eq!(\"near1\", issues[2].object_id);\n\n assert_eq!(String::from(\"near2\"), issues[2].related_objects[0].id);\n", "file_path": "src/validators/duration_distance.rs", "rank": 60, "score": 69521.08661240696 }, { "content": "#[test]\n\nfn test_missing() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/stops\").unwrap();\n\n let issues = validate(&gtfs);\n\n let missing_coord_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::MissingCoordinates)\n\n .collect();\n\n\n\n assert_eq!(1, missing_coord_issue.len());\n\n assert_eq!(\"AMV\", missing_coord_issue[0].object_id);\n\n assert_eq!(\n\n IssueType::MissingCoordinates,\n\n missing_coord_issue[0].issue_type\n\n );\n\n}\n\n\n", "file_path": "src/validators/stops.rs", "rank": 61, "score": 69521.08661240696 }, { "content": "#[test]\n\nfn test() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/unused_stop\").unwrap();\n\n let issues = validate(&gtfs);\n\n\n\n assert_eq!(1, issues.len());\n\n assert_eq!(\"BOGUS\", issues[0].object_id);\n\n}\n", "file_path": "src/validators/unused_stop.rs", "rank": 62, "score": 69521.08661240696 }, { "content": "#[test]\n\nfn test() {\n\n use crate::issues::RelatedObject;\n\n let gtfs = gtfs_structures::RawGtfs::new(\"test_data/invalid_references\").unwrap();\n\n let issues = validate(&gtfs);\n\n\n\n assert_eq!(issues.len(), 6);\n\n\n\n let unknown_stop_issue = issues\n\n .iter()\n\n .find(|i| i.object_id == \"unknown_stop\")\n\n .expect(\"impossible to find the issue\");\n\n\n\n assert_eq!(unknown_stop_issue.issue_type, IssueType::InvalidReference);\n\n assert_eq!(unknown_stop_issue.object_type, Some(ObjectType::Stop));\n\n assert_eq!(\n\n unknown_stop_issue.details,\n\n Some(\"The stop is referenced by a stop time but does not exists\".to_owned())\n\n );\n\n\n\n let unknown_trip_issue = issues\n", "file_path": "src/validators/invalid_reference.rs", "rank": 63, "score": 69521.08661240696 }, { "content": "#[test]\n\nfn test_stop_parent() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/stops\").unwrap();\n\n let issues = validate(&gtfs);\n\n let invalid_coord_issue: Vec<_> = dbg!(issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::InvalidStopParent)\n\n .collect());\n\n\n\n assert_eq!(6, invalid_coord_issue.len());\n\n let stop_area_issue = invalid_coord_issue\n\n .iter()\n\n .find(|i| i.object_id == \"BEATTY_AIRPORT\")\n\n .expect(\"impossible to find the BEATTY_AIRPORT issue\");\n\n assert_eq!(\n\n Some(\"it's not valid for a stop area to have a parent station\".to_owned()),\n\n stop_area_issue.details\n\n );\n\n assert_eq!(\n\n vec![crate::issues::RelatedObject {\n\n id: \"BULLFROG\".to_owned(),\n", "file_path": "src/validators/stops.rs", "rank": 64, "score": 68169.9150542312 }, { "content": "fn distance_and_duration(\n\n departure: &gtfs_structures::StopTime,\n\n arrival: &gtfs_structures::StopTime,\n\n) -> Option<(f64, f64)> {\n\n match (\n\n arrival.arrival_time,\n\n departure.departure_time,\n\n departure.stop.longitude,\n\n departure.stop.latitude,\n\n arrival.stop.longitude,\n\n arrival.stop.latitude,\n\n ) {\n\n (Some(arrival), Some(departure), Some(d_lon), Some(d_lat), Some(a_lon), Some(a_lat)) => {\n\n let dep_point = geo::Point::new(d_lon, d_lat);\n\n let arr_point = geo::Point::new(a_lon, a_lat);\n\n let duration = f64::from(arrival) - f64::from(departure);\n\n let distance = dep_point.haversine_distance(&arr_point);\n\n\n\n Some((distance, duration))\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/validators/duration_distance.rs", "rank": 65, "score": 68169.9150542312 }, { "content": "#[test]\n\nfn test_duplicates() {\n\n // in the dataset, every last line has been duplicated\n\n let gtfs = gtfs_structures::RawGtfs::new(\"test_data/duplicates\").unwrap();\n\n let issues = validate(&gtfs);\n\n assert_eq!(5, issues.len());\n\n assert_eq!(\"stop5\", issues[0].object_id);\n\n assert_eq!(IssueType::DuplicateObjectId, issues[0].issue_type);\n\n assert_eq!(\n\n Some(gtfs_structures::ObjectType::Stop),\n\n issues[0].object_type\n\n );\n\n\n\n assert_eq!(\"CITY\", issues[1].object_id);\n\n assert_eq!(IssueType::DuplicateObjectId, issues[1].issue_type);\n\n assert_eq!(\n\n Some(gtfs_structures::ObjectType::Route),\n\n issues[1].object_type\n\n );\n\n\n\n assert_eq!(\"AAMV4\", issues[2].object_id);\n", "file_path": "src/validators/raw_gtfs.rs", "rank": 66, "score": 68169.9150542312 }, { "content": "#[test]\n\nfn test_stops() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/check_name\").unwrap();\n\n let issues = validate(&gtfs);\n\n let stop_name_issues: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.object_id == \"close1\".to_string())\n\n .collect();\n\n\n\n assert_eq!(1, stop_name_issues.len());\n\n assert_eq!(\"close1\", stop_name_issues[0].object_id);\n\n assert_eq!(IssueType::MissingName, stop_name_issues[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/check_name.rs", "rank": 67, "score": 68169.9150542312 }, { "content": "#[test]\n\nfn test_missing_coord() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/shapes\").unwrap();\n\n let issues = validate(&gtfs);\n\n let missing_coord_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::MissingCoordinates)\n\n .collect();\n\n\n\n assert_eq!(1, missing_coord_issue.len());\n\n assert_eq!(\"A_shp\", missing_coord_issue[0].object_id);\n\n assert_eq!(\n\n IssueType::MissingCoordinates,\n\n missing_coord_issue[0].issue_type\n\n );\n\n}\n\n\n", "file_path": "src/validators/shapes.rs", "rank": 68, "score": 68169.9150542312 }, { "content": "#[test]\n\nfn test_missing() {\n\n let raw = gtfs_structures::RawGtfs::new(\"test_data/missing_mandatory_files\").unwrap();\n\n let validations = missing_files(&raw);\n\n assert_eq!(1, validations.len());\n\n assert_eq!(IssueType::MissingMandatoryFile, validations[0].issue_type);\n\n assert_eq!(\"stop_times.txt\", validations[0].object_id);\n\n assert_eq!(Severity::Fatal, validations[0].severity);\n\n}\n\n\n", "file_path": "src/validators/file_presence.rs", "rank": 69, "score": 68169.9150542312 }, { "content": "#[test]\n\nfn test_routes() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/check_name\").unwrap();\n\n let issues = validate(&gtfs);\n\n let route_name_issues: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.object_id == \"35\".to_string())\n\n .collect();\n\n\n\n assert_eq!(1, route_name_issues.len());\n\n assert_eq!(\"35\", route_name_issues[0].object_id);\n\n assert_eq!(IssueType::MissingName, route_name_issues[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/check_name.rs", "rank": 70, "score": 68169.9150542312 }, { "content": "#[test]\n\nfn test_extra() {\n\n let raw = gtfs_structures::RawGtfs::new(\"test_data/missing_mandatory_files\").unwrap();\n\n let validations = extra_files(&raw);\n\n assert_eq!(1, validations.len());\n\n assert_eq!(IssueType::ExtraFile, validations[0].issue_type);\n\n assert_eq!(Severity::Information, validations[0].severity);\n\n}\n", "file_path": "src/validators/file_presence.rs", "rank": 71, "score": 68169.9150542312 }, { "content": "#[test]\n\nfn test_agencies() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/check_name\").unwrap();\n\n let issues = validate(&gtfs);\n\n let agency_name_issues: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.object_id == \"1\".to_string())\n\n .collect();\n\n\n\n assert_eq!(1, agency_name_issues.len());\n\n assert_eq!(\"1\", agency_name_issues[0].object_id);\n\n assert_eq!(IssueType::MissingName, agency_name_issues[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/check_name.rs", "rank": 72, "score": 68169.9150542312 }, { "content": "#[test]\n\nfn test_missing_url() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/agency\").unwrap();\n\n let issues = validate(&gtfs);\n\n let missing_url_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::MissingUrl)\n\n .collect();\n\n\n\n assert_eq!(1, missing_url_issue.len());\n\n assert_eq!(\"BIBUS\", missing_url_issue[0].object_name.as_ref().unwrap());\n\n assert_eq!(IssueType::MissingUrl, missing_url_issue[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/agency.rs", "rank": 73, "score": 68169.9150542312 }, { "content": "#[test]\n\nfn test_missing() {\n\n let validations =\n\n crate::validate::generate_validation(\"test_data/route_type_missing\", 10).validations;\n\n let invalid_archive_validations = validations.get(&IssueType::UnloadableModel).unwrap();\n\n\n\n assert_eq!(1, invalid_archive_validations.len());\n\n assert_eq!(Severity::Fatal, invalid_archive_validations[0].severity);\n\n assert_eq!(\n\n IssueType::UnloadableModel,\n\n invalid_archive_validations[0].issue_type\n\n );\n\n}\n", "file_path": "src/validators/route_type.rs", "rank": 75, "score": 68169.9150542312 }, { "content": "#[test]\n\nfn test_stop_points() {\n\n // in the `interpolated_stop_times` GTFS, there are 2 trips\n\n // Trip 1 has a stop time without departure/arrival, but it's neither the first nor the last, so it's no problem\n\n // Trip 2 has its first stop time without departure/arrival, so we create and issue\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/interpolated_stop_times\").unwrap();\n\n let issues = validate(&gtfs);\n\n\n\n assert_eq!(1, issues.len());\n\n let issue = &issues[0];\n\n\n\n assert_eq!(\n\n issue.issue_type,\n\n IssueType::ImpossibleToInterpolateStopTimes\n\n );\n\n assert_eq!(\"trip2\", issue.object_id);\n\n}\n", "file_path": "src/validators/interpolated_stoptimes.rs", "rank": 76, "score": 66889.03354189124 }, { "content": "#[test]\n\nfn test_missing_lang() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/feed_info\").unwrap();\n\n let issues = validate(&gtfs);\n\n let missing_lang_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::MissingLanguage)\n\n .filter(|issue| issue.object_name == Some(\"BIBUS\".to_string()))\n\n .collect();\n\n\n\n assert_eq!(1, missing_lang_issue.len());\n\n assert_eq!(IssueType::MissingLanguage, missing_lang_issue[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/feed_info.rs", "rank": 77, "score": 66889.03354189124 }, { "content": "#[test]\n\nfn test_missing_url() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/feed_info\").unwrap();\n\n let issues = validate(&gtfs);\n\n let missing_url_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::MissingUrl)\n\n .collect();\n\n\n\n assert_eq!(1, missing_url_issue.len());\n\n assert_eq!(\"SNCF\", missing_url_issue[0].object_name.as_ref().unwrap());\n\n assert_eq!(IssueType::MissingUrl, missing_url_issue[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/feed_info.rs", "rank": 78, "score": 66889.03354189124 }, { "content": "#[test]\n\nfn test_stop_entrances() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/duplicate_stops\").unwrap();\n\n let issues = validate(&gtfs);\n\n let entrance_issues: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::DuplicateStops)\n\n .filter(|issue| issue.object_name == Some(\"Stop Entrance\".to_string()))\n\n .collect();\n\n\n\n assert_eq!(0, entrance_issues.len());\n\n}\n", "file_path": "src/validators/duplicate_stops.rs", "rank": 79, "score": 66889.03354189124 }, { "content": "#[test]\n\nfn test_stop_areas() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/duplicate_stops\").unwrap();\n\n let issues = validate(&gtfs);\n\n let duplicate_stops_issues: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::DuplicateStops)\n\n .filter(|issue| issue.object_name == Some(\"Stop Area\".to_string()))\n\n .collect();\n\n\n\n assert_eq!(1, duplicate_stops_issues.len());\n\n assert_eq!(\n\n \"Stop Area\",\n\n duplicate_stops_issues[0].object_name.as_ref().unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/validators/duplicate_stops.rs", "rank": 80, "score": 66889.03354189124 }, { "content": "#[test]\n\nfn test_feed_info() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/check_name\").unwrap();\n\n let issues = validate(&gtfs);\n\n let publisher_name_issues: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.object_id == \"\".to_string())\n\n .collect();\n\n\n\n assert_eq!(1, publisher_name_issues.len());\n\n assert_eq!(\"\", publisher_name_issues[0].object_id);\n\n assert_eq!(IssueType::MissingName, publisher_name_issues[0].issue_type);\n\n}\n", "file_path": "src/validators/check_name.rs", "rank": 81, "score": 66889.03354189124 }, { "content": "#[test]\n\nfn test_stop_points() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/duplicate_stops\").unwrap();\n\n let issues = validate(&gtfs);\n\n let duplicate_stops_issues: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::DuplicateStops)\n\n .filter(|issue| issue.object_name == Some(\"Stop Point\".to_string()))\n\n .collect();\n\n\n\n assert_eq!(1, duplicate_stops_issues.len());\n\n assert_eq!(\n\n \"Stop Point\",\n\n duplicate_stops_issues[0].object_name.as_ref().unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/validators/duplicate_stops.rs", "rank": 82, "score": 66889.03354189124 }, { "content": "#[test]\n\nfn test_missing_price() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/fare_attributes\").unwrap();\n\n let issues = validate(&gtfs);\n\n let missing_price_issue: Vec<_> = issues\n\n .iter()\n\n .filter(|issue| issue.issue_type == IssueType::MissingPrice)\n\n .collect();\n\n\n\n assert_eq!(1, missing_price_issue.len());\n\n assert_eq!(\"50\", missing_price_issue[0].object_id);\n\n assert_eq!(IssueType::MissingPrice, missing_price_issue[0].issue_type);\n\n}\n\n\n", "file_path": "src/validators/fare_attributes.rs", "rank": 83, "score": 66889.03354189124 }, { "content": "#[test]\n\nfn test_optimisation_route_trips() {\n\n let gtfs = gtfs_structures::Gtfs::new(\"test_data/optimisation_route_trips\").unwrap();\n\n let mut issues = validate(&gtfs);\n\n\n\n assert_eq!(1, issues.len());\n\n // irrelevant to the test, but this acts as a guard in case someone modifies the fixtures\n\n assert_eq!(IssueType::CloseStops, issues[0].issue_type);\n\n\n\n // the routes order (for objects with index 1 and 2) is apparently non deterministic, for some\n\n // reason, so we sort the array to get a stable order and avoid random test failures\n\n issues[0].related_objects.sort_by(|a, b| a.id.cmp(&b.id));\n\n\n\n assert_eq!(3, issues[0].related_objects.len());\n\n\n\n // we would normally find N trips here, but we optimised the payload by\n\n // referring only to the parent route, and making sure each route appears only once.\n\n assert_eq!(\"route1\", issues[0].related_objects[0].id);\n\n assert_eq!(\"route2\", issues[0].related_objects[1].id);\n\n assert_eq!(\"stop002\", issues[0].related_objects[2].id);\n\n}\n", "file_path": "src/validators/duration_distance.rs", "rank": 84, "score": 65673.09622876265 }, { "content": "fn valid_url(agency: &gtfs_structures::Agency) -> bool {\n\n match url::Url::parse(agency.url.as_ref()) {\n\n Ok(url) => vec![\"https\", \"http\", \"ftp\"].contains(&url.scheme()),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/validators/agency.rs", "rank": 85, "score": 65536.3182020334 }, { "content": "fn valid_coord(stop: &gtfs_structures::Stop) -> bool {\n\n match (stop.longitude, stop.latitude) {\n\n (Some(lon), Some(lat)) => (-180.0..=180.0).contains(&lon) && (-90.0..=90.0).contains(&lat),\n\n _ => false, // there is already an issue if the coord is missing\n\n }\n\n}\n\n\n", "file_path": "src/validators/stops.rs", "rank": 86, "score": 65536.3182020334 }, { "content": "fn valid_coord(shape: &gtfs_structures::Shape) -> bool {\n\n ((shape.longitude <= 180.0) && (shape.longitude >= -180.0))\n\n && ((shape.latitude <= 90.0) && (shape.latitude >= -90.0))\n\n}\n\n\n", "file_path": "src/validators/shapes.rs", "rank": 87, "score": 65536.3182020334 }, { "content": "fn valid_timezone(agency: &gtfs_structures::Agency) -> bool {\n\n let tz: Result<chrono_tz::Tz, _> = agency.timezone.parse();\n\n tz.is_ok()\n\n}\n\n\n", "file_path": "src/validators/agency.rs", "rank": 88, "score": 65536.3182020334 }, { "content": "fn valid_url(feed: &gtfs_structures::FeedInfo) -> bool {\n\n url::Url::parse(feed.url.as_ref())\n\n .map(|url| vec![\"https\", \"http\", \"ftp\"].contains(&url.scheme()))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "src/validators/feed_info.rs", "rank": 89, "score": 63630.92246993824 }, { "content": "fn valid_lang(feed: &gtfs_structures::FeedInfo) -> bool {\n\n let lang = feed.lang.to_lowercase();\n\n let len = lang.len();\n\n match len {\n\n 2 => isolang::Language::from_639_1(&lang).is_some(),\n\n 3 => isolang::Language::from_639_3(&lang).is_some(),\n\n 4..=11 => isolang::Language::from_locale(&lang).is_some(),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/validators/feed_info.rs", "rank": 90, "score": 63630.92246993824 }, { "content": "fn valid_transfers(fare_attributes: &gtfs_structures::FareAttribute) -> bool {\n\n !matches!(\n\n fare_attributes.transfers,\n\n gtfs_structures::Transfers::Other(_)\n\n )\n\n}\n\n\n", "file_path": "src/validators/fare_attributes.rs", "rank": 91, "score": 62733.30954057904 }, { "content": "fn valid_currency(fare_attributes: &gtfs_structures::FareAttribute) -> bool {\n\n iso4217::alpha3(&fare_attributes.currency).is_some()\n\n}\n\n\n", "file_path": "src/validators/fare_attributes.rs", "rank": 92, "score": 62733.30954057904 }, { "content": "fn valid_duration(fare_attributes: &gtfs_structures::FareAttribute) -> bool {\n\n fare_attributes.transfer_duration.is_none() || fare_attributes.transfer_duration >= Some(0)\n\n}\n\n\n", "file_path": "src/validators/fare_attributes.rs", "rank": 93, "score": 62733.30954057904 }, { "content": "fn has_coord(stop: &gtfs_structures::Stop) -> bool {\n\n match (stop.latitude, stop.longitude) {\n\n (Some(lon), Some(lat)) => lon != 0.0 && lat != 0.0,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/validators/stops.rs", "rank": 94, "score": 60571.86176532704 }, { "content": "fn has_coord(shape: &gtfs_structures::Shape) -> bool {\n\n shape.latitude != 0.0 && shape.longitude != 0.0\n\n}\n\n\n", "file_path": "src/validators/shapes.rs", "rank": 95, "score": 60571.86176532704 }, { "content": "fn has_url(agency: &gtfs_structures::Agency) -> bool {\n\n !agency.url.is_empty()\n\n}\n\n\n", "file_path": "src/validators/agency.rs", "rank": 96, "score": 60571.86176532704 }, { "content": "fn has_lang(feed: &gtfs_structures::FeedInfo) -> bool {\n\n !feed.lang.is_empty()\n\n}\n\n\n", "file_path": "src/validators/feed_info.rs", "rank": 98, "score": 58423.623570429256 }, { "content": "fn has_url(feed: &gtfs_structures::FeedInfo) -> bool {\n\n !feed.url.is_empty()\n\n}\n\n\n", "file_path": "src/validators/feed_info.rs", "rank": 99, "score": 58423.623570429256 } ]
Rust
src/conversion.rs
vilaureu/kml
6a17f7ba075e489aa6c301f9b5a3c2963e0d5b48
use std::convert::TryFrom; use crate::errors::Error; use crate::types::{ Coord, CoordType, Geometry, Kml, LineString, LinearRing, MultiGeometry, Point, Polygon, }; #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Coordinate<T>> for Coord<T> where T: CoordType, { fn from(val: geo_types::Coordinate<T>) -> Coord<T> { Coord::from((val.x, val.y)) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<Coord<T>> for geo_types::Coordinate<T> where T: CoordType, { fn from(val: Coord<T>) -> geo_types::Coordinate<T> { geo_types::Coordinate::from((val.x, val.y)) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Point<T>> for Point<T> where T: CoordType + Default, { fn from(val: geo_types::Point<T>) -> Point<T> { Point::from(Coord::from(val.0)) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<Point<T>> for geo_types::Point<T> where T: CoordType, { fn from(val: Point<T>) -> geo_types::Point<T> { geo_types::Point::from(geo_types::Coordinate::from(val.coord)) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Line<T>> for LineString<T> where T: CoordType + Default, { fn from(val: geo_types::Line<T>) -> LineString<T> { LineString::from(vec![Coord::from(val.start), Coord::from(val.end)]) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::LineString<T>> for LineString<T> where T: CoordType + Default, { fn from(val: geo_types::LineString<T>) -> LineString<T> { LineString::from( val.0 .into_iter() .map(Coord::from) .collect::<Vec<Coord<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<LineString<T>> for geo_types::LineString<T> where T: CoordType, { fn from(val: LineString<T>) -> geo_types::LineString<T> { geo_types::LineString( val.coords .into_iter() .map(geo_types::Coordinate::from) .collect(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::LineString<T>> for LinearRing<T> where T: CoordType + Default, { fn from(val: geo_types::LineString<T>) -> LinearRing<T> { LinearRing::from( val.0 .into_iter() .map(Coord::from) .collect::<Vec<Coord<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<LinearRing<T>> for geo_types::LineString<T> where T: CoordType, { fn from(val: LinearRing<T>) -> geo_types::LineString<T> { geo_types::LineString( val.coords .into_iter() .map(geo_types::Coordinate::from) .collect(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Polygon<T>> for Polygon<T> where T: CoordType + Default, { fn from(val: geo_types::Polygon<T>) -> Polygon<T> { let (outer, inner) = val.into_inner(); Polygon::new( LinearRing::from(outer), inner .into_iter() .map(LinearRing::from) .collect::<Vec<LinearRing<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Rect<T>> for Polygon<T> where T: CoordType + Default, { fn from(val: geo_types::Rect<T>) -> Polygon<T> { Polygon::from(val.to_polygon()) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Triangle<T>> for Polygon<T> where T: CoordType + Default, { fn from(val: geo_types::Triangle<T>) -> Polygon<T> { Polygon::from(val.to_polygon()) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<Polygon<T>> for geo_types::Polygon<T> where T: CoordType, { fn from(val: Polygon<T>) -> geo_types::Polygon<T> { geo_types::Polygon::new( geo_types::LineString::from(val.outer), val.inner .into_iter() .map(geo_types::LineString::from) .collect::<Vec<geo_types::LineString<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::MultiPoint<T>> for MultiGeometry<T> where T: CoordType + Default, { fn from(val: geo_types::MultiPoint<T>) -> MultiGeometry<T> { MultiGeometry::new( val.into_iter() .map(|p| Geometry::Point(Point::from(p))) .collect::<Vec<Geometry<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::MultiLineString<T>> for MultiGeometry<T> where T: CoordType + Default, { fn from(val: geo_types::MultiLineString<T>) -> MultiGeometry<T> { MultiGeometry::new( val.into_iter() .map(|l| Geometry::LineString(LineString::from(l))) .collect::<Vec<Geometry<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::MultiPolygon<T>> for MultiGeometry<T> where T: CoordType + Default, { fn from(val: geo_types::MultiPolygon<T>) -> MultiGeometry<T> { MultiGeometry::new( val.into_iter() .map(|p| Geometry::Polygon(Polygon::from(p))) .collect::<Vec<Geometry<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::GeometryCollection<T>> for MultiGeometry<T> where T: CoordType + Default, { fn from(val: geo_types::GeometryCollection<T>) -> MultiGeometry<T> { MultiGeometry::new( val.into_iter() .map(Geometry::from) .collect::<Vec<Geometry<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> TryFrom<MultiGeometry<T>> for geo_types::GeometryCollection<T> where T: CoordType, { type Error = Error; fn try_from(val: MultiGeometry<T>) -> Result<geo_types::GeometryCollection<T>, Self::Error> { Ok(geo_types::GeometryCollection( val.geometries .into_iter() .map(geo_types::Geometry::try_from) .collect::<Result<Vec<geo_types::Geometry<T>>, _>>()?, )) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Geometry<T>> for Geometry<T> where T: CoordType + Default, { fn from(val: geo_types::Geometry<T>) -> Geometry<T> { match val { geo_types::Geometry::Point(p) => Geometry::Point(Point::from(p)), geo_types::Geometry::Line(l) => Geometry::LineString(LineString::from(l)), geo_types::Geometry::LineString(l) => Geometry::LineString(LineString::from(l)), geo_types::Geometry::Polygon(p) => Geometry::Polygon(Polygon::from(p)), geo_types::Geometry::MultiPoint(p) => Geometry::MultiGeometry(MultiGeometry::from(p)), geo_types::Geometry::MultiLineString(l) => { Geometry::MultiGeometry(MultiGeometry::from(l)) } geo_types::Geometry::MultiPolygon(p) => Geometry::MultiGeometry(MultiGeometry::from(p)), geo_types::Geometry::GeometryCollection(g) => { Geometry::MultiGeometry(MultiGeometry::from(g)) } geo_types::Geometry::Rect(r) => Geometry::Polygon(Polygon::from(r)), geo_types::Geometry::Triangle(t) => Geometry::Polygon(Polygon::from(t)), } } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> TryFrom<Geometry<T>> for geo_types::Geometry<T> where T: CoordType, { type Error = Error; fn try_from(val: Geometry<T>) -> Result<geo_types::Geometry<T>, Self::Error> { match val { Geometry::Point(p) => Ok(geo_types::Geometry::Point(geo_types::Point::from(p))), Geometry::LineString(l) => Ok(geo_types::Geometry::LineString( geo_types::LineString::from(l), )), Geometry::LinearRing(l) => Ok(geo_types::Geometry::LineString( geo_types::LineString::from(l), )), Geometry::Polygon(p) => Ok(geo_types::Geometry::Polygon(geo_types::Polygon::from(p))), Geometry::MultiGeometry(g) => Ok(geo_types::Geometry::GeometryCollection( geo_types::GeometryCollection::try_from(g)?, )), _ => Err(Error::InvalidGeometry("Can't convert geometry".to_string())), } } } fn process_kml<T>(k: Kml<T>) -> Result<Vec<geo_types::Geometry<T>>, Error> where T: CoordType, { match k { Kml::KmlDocument(d) => Ok(d .elements .into_iter() .flat_map(process_kml) .flatten() .collect()), Kml::Point(p) => Ok(vec![ geo_types::Geometry::Point(geo_types::Point::from(p)); 1 ]), Kml::LineString(l) => Ok(vec![ geo_types::Geometry::LineString( geo_types::LineString::from(l), ); 1 ]), Kml::LinearRing(l) => Ok(vec![ geo_types::Geometry::LineString( geo_types::LineString::from(l), ); 1 ]), Kml::Polygon(p) => Ok(vec![ geo_types::Geometry::Polygon(geo_types::Polygon::from( p )); 1 ]), Kml::MultiGeometry(g) => Ok(geo_types::GeometryCollection::try_from(g)?.0), Kml::Placemark(p) => Ok(if let Some(g) = p.geometry { vec![geo_types::Geometry::try_from(g)?; 1] } else { vec![] }), Kml::Document { elements, .. } => Ok(elements .into_iter() .flat_map(process_kml) .flatten() .collect()), Kml::Folder { elements, .. } => Ok(elements .into_iter() .flat_map(process_kml) .flatten() .collect()), _ => Ok(vec![]), } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] pub fn quick_collection<T>(k: Kml<T>) -> Result<geo_types::GeometryCollection<T>, Error> where T: CoordType, { Ok(geo_types::GeometryCollection(process_kml(k)?)) } #[cfg(test)] mod tests { use super::*; use crate::KmlDocument; use std::collections::HashMap; #[test] fn test_quick_collection() { let k = KmlDocument { elements: vec![ Kml::Point(Point::from(Coord::from((1., 1.)))), Kml::Folder { attrs: HashMap::new(), elements: vec![ Kml::LineString(LineString::from(vec![ Coord::from((1., 1.)), Coord::from((2., 2.)), ])), Kml::Point(Point::from(Coord::from((3., 3.)))), ], }, ], ..Default::default() }; let gc = geo_types::GeometryCollection(vec![ geo_types::Geometry::Point(geo_types::Point::from((1., 1.))), geo_types::Geometry::LineString(geo_types::LineString::from(vec![(1., 1.), (2., 2.)])), geo_types::Geometry::Point(geo_types::Point::from((3., 3.))), ]); assert_eq!(quick_collection(Kml::KmlDocument(k)).unwrap(), gc); } }
use std::convert::TryFrom; use crate::errors::Error; use crate::types::{ Coord, CoordType, Geometry, Kml, LineString, LinearRing, MultiGeometry, Point, Polygon, }; #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Coordinate<T>> for Coord<T> where T: CoordType, { fn from(val: geo_types::Coordinate<T>) -> Coord<T> { Coord::from((val.x, val.y)) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<Coord<T>> for geo_types::Coordinate<T> where T: CoordType, { fn from(val: Coord<T>) -> geo_types::Coordinate<T> { geo_types::Coordinate::from((val.x, val.y)) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Point<T>> for Point<T> where T: CoordType + Default, { fn from(val: geo_types::Point<T>) -> Point<T> { Point::from(Coord::from(val.0)) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<Point<T>> for geo_types::Point<T> where T: CoordType, { fn from(val: Point<T>) -> geo_types::Point<T> { geo_types::Point::from(geo_types::Coordinate::from(val.coord)) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Line<T>> for LineString<T> where T: CoordType + Default, { fn from(val: geo_types::Line<T>) -> LineString<T> { LineString::from(vec![Coord::from(val.start), Coord::from(val.end)]) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::LineString<T>> for LineString<T> where T: CoordType + Default, { fn from(val: geo_types::LineString<T>) -> LineString<T> { LineString::from( val.0 .into_iter() .map(Coord::from) .collect::<Vec<Coord<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<LineString<T>> for geo_types::LineString<T> where T: CoordType, { fn from(val: LineString<T>) -> geo_types::LineString<T> { geo_types::LineString( val.coords .into_iter() .map(geo_types::Coordinate::from) .collect(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::LineString<T>> for LinearRing<T> where T: CoordType + Default, { fn from(val: geo_types::LineString<T>) -> LinearRing<T> { LinearRing::from( val.0 .into_iter() .map(Coord::from) .collect::<Vec<Coord<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<LinearRing<T>> for geo_types::LineString<T> where T: CoordType, { fn from(val: LinearRing<T>) -> geo_types::LineString<T> { geo_types::LineString( val.coords .into_iter() .map(geo_types::Coordinate::from) .collect(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Polygon<T>> for Polygon<T> where T: CoordType + Default, { fn from(val: geo_types::Polygon<T>) -> Polygon<T> { let (outer, inner) = val.into_inner(); Polygon::new( LinearRing::from(outer), inner .into_iter() .map(LinearRing::from) .collect::<Vec<LinearRing<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Rect<T>> for Polygon<T> where T: CoordType + Default, { fn from(val: geo_types::Rect<T>) -> Polygon<T> { Polygon::from(val.to_polygon()) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Triangle<T>> for Polygon<T> where T: CoordType + Default, { fn from(val: geo_types::Triangle<T>) -> Polygon<T> { Polygon::from(val.to_polygon()) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<Polygon<T>> for geo_types::Polygon<T> where T: CoordType, { fn from(val: Polygon<T>) -> geo_types::Polygon<T> { geo_types::Polygon::new( geo_types::LineString::from(val.outer), val.inner .into_iter() .map(geo_types::LineString::from) .collect::<Vec<geo_types::LineString<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::MultiPoint<T>> for MultiGeometry<T> where T: CoordType + Default, { fn from(val: geo_types::MultiPoint<T>) -> MultiGeometry<T> { MultiGeometry::new( val.into_iter() .map(|p| Geometry::Point(Point::from(p))) .collect::<Vec<Geometry<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::MultiLineString<T>> for MultiGeometry<T> where T: CoordType + Default, { fn from(val: geo_types::MultiLineString<T>) -> MultiGeometry<T> { MultiGeometry::new( val.into_iter() .map(|l| Geometry::LineString(LineString::from(l))) .collect::<Vec<Geometry<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::MultiPolygon<T>> for MultiGeometry<T> where T: CoordType + Default, { fn from(val: geo_types::MultiPolygon<T>) -> MultiGeometry<T> { MultiGeometry::new( val.into_iter() .map(|p| Geometry::Polygon(Polygon::from(p))) .collect::<Vec<Geometry<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::GeometryCollection<T>> for MultiGeometry<T> where T: CoordType + Default, { fn from(val: geo_types::GeometryCollection<T>) -> MultiGeometry<T> { MultiGeometry::new( val.into_iter() .map(Geometry::from) .collect::<Vec<Geometry<T>>>(), ) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> TryFrom<MultiGeometry<T>> for geo_types::GeometryCollection<T> where T: CoordType, { type Error = Error; fn try_from(val: MultiGeometry<T>) -> Result<geo_types::GeometryCollection<T>, Self::Error> { Ok(geo_types::GeometryCollection( val.geometries .into_iter() .map(geo_types::Geometry::try_from) .collect::<Result<Vec<geo_types::Geometry<T>>, _>>()?, )) } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> From<geo_types::Geometry<T>> for Geometry<T> where T: CoordType + Default, { fn from(val: geo_types::Geometry<T>) -> Geometry<T> { match val { geo_types::Geometry::Point(p) => Geometry::Point(Point::from(p)), geo_types::Geometry::Line(l) => Geometry::LineString(LineString::from(l)), geo_types::Geometry::LineString(l) => Geometry::LineString(LineString::from(l)), geo_types::Geometry::Polygon(p) => Geometry::Polygon(Polygon::from(p)), geo_types::Geometry::MultiPoint(p) => Geometry::MultiGeometry(MultiGeometry::from(p)), geo_types::Geometry::MultiLineString(l) => { Geometry::MultiGeometry(MultiGeometry::from(l)) } geo_types::Geometry::MultiPolygon(p) => Geometry::MultiGeometry(MultiGeometry::from(p)), geo_types::Geometry::GeometryCollection(g) => { Geometry::MultiGeometry(MultiGeometry::from(g)) } geo_types::Geometry::Rect(r) => Geometry::Polygon(Polygon::from(r)), geo_types::Geometry::Triangle(t) => Geometry::Polygon(Polygon::from(t)), } } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] impl<T> TryFrom<Geometry<T>> for geo_types::Geometry<T> where T: CoordType, { type Error = Error; fn try_from(val: Geometry<T>) -> Result<geo_types::Geometry<T>, Self::Error> { match val { Geometry::Point(p) => Ok(geo_types::Geometry::Point(geo_types::Point::from(p))), Geometry::LineString(l) => Ok(geo_types::Geometry::LineString( geo_types::LineString::from(l), )), Geometry::LinearRing(l) => Ok(geo_types::Geometry::LineString( geo_types::LineString::from(l), )), Geometry::Polygon(p) => Ok(geo_types::Geometry::Polygon(geo_types::Polygon::from(p))), Geometry::MultiGeometry(g) => Ok(geo_types::Geometry::GeometryCollection( geo_types::GeometryCollection::try_from(g)?, )), _ => Err(Error::InvalidGeometry("Can't convert geometry".to_string())), } } } fn process_kml<T>(k: Kml<T>) -> Result<Vec<geo_types::Geometry<T>>, Error> where T: CoordType, { match k { Kml::KmlDocument(d) => Ok(d .elements .into_iter() .flat_map(process_kml) .flatten() .collect()), Kml::Point(p) => Ok(vec![ geo_types::Geometry::Point(geo_types::Point::from(p)); 1 ]), Kml::LineString(l) => Ok(vec![ geo_types::Geometry::LineString( geo_types::LineString::from(l), ); 1 ]), Kml::LinearRing(l) => Ok(vec![ geo_types::Geometry::LineString( geo_types::LineString::from(l), ); 1 ]), Kml::Polygon(p) => Ok(vec![ geo_types::Geometry::Polygon(geo_types::Polygon::from( p )); 1 ]), Kml::MultiGeometry(g) => Ok(geo_types::GeometryCollection::try_from(g)?.0), Kml::Placemark(p) => Ok(if let Some(g) = p.geometry { vec![geo_types::Geometry::try_from(g)?; 1] } else { vec![] }), Kml::Document { elements, .. } => Ok(elements .into_iter() .flat_map(process_kml) .flatten() .collect()), Kml::Folder { elements, .. } => Ok(elements .into_iter() .flat_map(process_kml) .flatten() .collect()), _ => Ok(vec![]), } } #[cfg_attr(docsrs, doc(cfg(feature = "geo-types")))] pub fn quick_collection<T>(k: Kml<T>) -> Result<geo_types::GeometryCollection<T>, Error> where T: CoordType, { Ok(geo_types::GeometryCollection(process_kml(k)?)) } #[cfg(test)] mod tests { use super::*; use crate::KmlDocument; use std::collections::HashMap; #[test] fn test_quick_collection() { let k = KmlDocument { elements: vec![ Kml::Point(Point::from(Coord::from((1., 1.)))), Kml::Folder { attrs: HashMap::new(), elements: vec![ Kml::LineString(LineString::from(vec![ Coord::from((1., 1.)), Coord::from((2., 2.)), ])), Kml::Point(Point::from(Coord::from((3., 3.)))), ], }, ], ..Default::default() };
assert_eq!(quick_collection(Kml::KmlDocument(k)).unwrap(), gc); } }
let gc = geo_types::GeometryCollection(vec![ geo_types::Geometry::Point(geo_types::Point::from((1., 1.))), geo_types::Geometry::LineString(geo_types::LineString::from(vec![(1., 1.), (2., 2.)])), geo_types::Geometry::Point(geo_types::Point::from((3., 3.))), ]);
assignment_statement
[ { "content": "/// Utility method for parsing multiple coordinates according to the spec\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use kml::types::{Coord, coords_from_str};\n\n///\n\n/// let coords_str = \"1,1,0\\n\\n1,2,0 2,2,0\";\n\n/// let coords: Vec<Coord> = coords_from_str(coords_str).unwrap();\n\n/// ```\n\npub fn coords_from_str<T: CoordType + FromStr>(s: &str) -> Result<Vec<Coord<T>>, Error> {\n\n s.split_whitespace().map(Coord::from_str).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{coords_from_str, Coord};\n\n use std::str::FromStr;\n\n\n\n #[test]\n\n fn test_coord_from_str() {\n\n assert_eq!(\n\n Coord::from_str(\" 1.0,2.0,3 \").unwrap(),\n\n Coord {\n\n x: 1.,\n\n y: 2.,\n\n z: Some(3.)\n\n }\n\n );\n\n assert_eq!(\n", "file_path": "src/types/coord.rs", "rank": 1, "score": 156743.69676585656 }, { "content": "/// Coordinate type compatible with `geo-types`\n\npub trait CoordType: Float + Debug {}\n\nimpl<T: Float + Debug> CoordType for T {}\n\n\n\n/// KML coordinates described by `kml:coordinatesType`, [16.10](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#1212)\n\n/// in the KML specification\n\n///\n\n/// Coordinates are tuples with the third Z value for altitude being optional. Coordinate tuples are\n\n/// separated by any whitespace character\n\n#[derive(Copy, Clone, Default, Debug, PartialEq)]\n\npub struct Coord<T: CoordType = f64> {\n\n pub x: T,\n\n pub y: T,\n\n pub z: Option<T>,\n\n}\n\n\n\nimpl<T> Coord<T>\n\nwhere\n\n T: CoordType,\n\n{\n\n pub fn new(x: T, y: T, z: Option<T>) -> Self {\n", "file_path": "src/types/coord.rs", "rank": 3, "score": 94627.8883856605 }, { "content": "use std::collections::HashMap;\n\n\n\n/// Generic type used for supporting elements that are extensions or not currently implemented\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct Element {\n\n pub name: String,\n\n pub attrs: HashMap<String, String>,\n\n pub content: Option<String>,\n\n pub children: Vec<Element>,\n\n}\n", "file_path": "src/types/element.rs", "rank": 4, "score": 57357.692416121055 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::types::altitude_mode::AltitudeMode;\n\nuse crate::types::coord::CoordType;\n\nuse crate::types::linear_ring::LinearRing;\n\n\n\n/// `kml:Polygon`, [10.8](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#505) in the KML\n\n/// specification\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct Polygon<T: CoordType = f64> {\n\n pub outer: LinearRing<T>,\n\n pub inner: Vec<LinearRing<T>>,\n\n pub extrude: bool,\n\n pub tessellate: bool,\n\n pub altitude_mode: AltitudeMode,\n\n pub attrs: HashMap<String, String>,\n\n}\n\n\n\nimpl<T> Polygon<T>\n\nwhere\n", "file_path": "src/types/polygon.rs", "rank": 5, "score": 57334.30401997143 }, { "content": "//! Module containing types for KML elements\n\nmod altitude_mode;\n\nmod coord;\n\n\n\npub use altitude_mode::AltitudeMode;\n\npub use coord::{coords_from_str, Coord, CoordType};\n\n\n\nmod line_string;\n\nmod linear_ring;\n\nmod location;\n\nmod multi_geometry;\n\nmod orientation;\n\nmod point;\n\nmod polygon;\n\nmod scale;\n\nmod vec2;\n\n\n\npub use line_string::LineString;\n\npub use linear_ring::LinearRing;\n\npub use location::Location;\n", "file_path": "src/types/mod.rs", "rank": 6, "score": 57332.56246252731 }, { "content": " T: CoordType + Default,\n\n{\n\n pub fn new(outer: LinearRing<T>, inner: Vec<LinearRing<T>>) -> Self {\n\n Polygon {\n\n outer,\n\n inner,\n\n ..Default::default()\n\n }\n\n }\n\n}\n", "file_path": "src/types/polygon.rs", "rank": 7, "score": 57330.02188764108 }, { "content": "pub use multi_geometry::MultiGeometry;\n\npub use orientation::Orientation;\n\npub use point::Point;\n\npub use polygon::Polygon;\n\npub use scale::Scale;\n\npub use vec2::{Units, Vec2};\n\n\n\nmod element;\n\npub(crate) mod geom_props;\n\nmod placemark;\n\n\n\npub use element::Element;\n\npub use placemark::Placemark;\n\n\n\nmod geometry;\n\n\n\npub use geometry::Geometry;\n\n\n\nmod style;\n\n\n\npub use style::{\n\n BalloonStyle, ColorMode, Icon, IconStyle, LabelStyle, LineStyle, ListStyle, Pair, PolyStyle,\n\n Style, StyleMap,\n\n};\n\n\n\nmod kml;\n\n\n\npub use self::kml::{Kml, KmlDocument, KmlVersion};\n", "file_path": "src/types/mod.rs", "rank": 8, "score": 57328.42469350635 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::types::altitude_mode::AltitudeMode;\n\nuse crate::types::coord::{Coord, CoordType};\n\n\n\n/// `kml:Point`, [10.2](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#446) in the KML\n\n/// specification\n\n///\n\n/// Coord is required as of https://docs.opengeospatial.org/ts/14-068r2/14-068r2.html#atc-114\n\n#[derive(Clone, Default, Debug, PartialEq)]\n\npub struct Point<T: CoordType = f64> {\n\n pub coord: Coord<T>,\n\n pub extrude: bool,\n\n pub altitude_mode: AltitudeMode,\n\n pub attrs: HashMap<String, String>,\n\n}\n\n\n\nimpl<T> From<Coord<T>> for Point<T>\n\nwhere\n\n T: CoordType + Default,\n", "file_path": "src/types/point.rs", "rank": 9, "score": 57326.362467447056 }, { "content": "{\n\n fn from(coord: Coord<T>) -> Self {\n\n Point {\n\n coord,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Point<T>\n\nwhere\n\n T: CoordType + Default,\n\n{\n\n pub fn new(x: T, y: T, z: Option<T>) -> Self {\n\n Point::from(Coord::new(x, y, z))\n\n }\n\n}\n", "file_path": "src/types/point.rs", "rank": 10, "score": 57317.51661858593 }, { "content": "use crate::types::coord::CoordType;\n\nuse crate::types::element::Element;\n\nuse crate::types::line_string::LineString;\n\nuse crate::types::linear_ring::LinearRing;\n\nuse crate::types::multi_geometry::MultiGeometry;\n\nuse crate::types::point::Point;\n\nuse crate::types::polygon::Polygon;\n\n\n\n/// Enum for elements in `kml:AbstractGeometryGroup`, [10.1](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#432)\n\n/// in the KML specification\n\n///\n\n/// `kml:Model` is currently represented by a placeholder element\n\n#[non_exhaustive]\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Geometry<T: CoordType = f64> {\n\n Point(Point<T>),\n\n LineString(LineString<T>),\n\n LinearRing(LinearRing<T>),\n\n Polygon(Polygon<T>),\n\n MultiGeometry(MultiGeometry<T>),\n\n Element(Element), // Currently just a stand-in for Model\n\n}\n", "file_path": "src/types/geometry.rs", "rank": 11, "score": 57264.36781553357 }, { "content": " Coord::from_str(\"1,1\").unwrap(),\n\n Coord {\n\n x: 1.,\n\n y: 1.,\n\n z: None\n\n }\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_coords_from_str() {\n\n assert_eq!(\n\n coords_from_str(\"1,1\\n\\n 2,2 \").unwrap(),\n\n vec![\n\n Coord {\n\n x: 1.,\n\n y: 1.,\n\n z: None\n\n },\n\n Coord {\n\n x: 2.,\n\n y: 2.,\n\n z: None\n\n }\n\n ]\n\n )\n\n }\n\n}\n", "file_path": "src/types/coord.rs", "rank": 12, "score": 57111.646281989175 }, { "content": "impl<T> From<[T; 3]> for Coord<T>\n\nwhere\n\n T: CoordType,\n\n{\n\n fn from(coord: [T; 3]) -> Self {\n\n Coord::new(coord[0], coord[1], Some(coord[2]))\n\n }\n\n}\n\n\n\nimpl<T> FromStr for Coord<T>\n\nwhere\n\n T: CoordType + FromStr,\n\n{\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let mut parts = s.trim().split(',');\n\n let x_str = parts.next().ok_or(Error::CoordEmpty)?;\n\n let x: T = x_str\n\n .parse()\n", "file_path": "src/types/coord.rs", "rank": 13, "score": 57110.94586857935 }, { "content": " .map_err(|_| Error::NumParse(x_str.to_string()))?;\n\n let y_str = parts.next().ok_or(Error::CoordEmpty)?;\n\n let y: T = y_str\n\n .parse()\n\n .map_err(|_| Error::NumParse(y_str.to_string()))?;\n\n let z = if let Some(z) = parts.next() {\n\n Some(z.parse::<T>().map_err(|_| Error::NumParse(z.to_string()))?)\n\n } else {\n\n None\n\n };\n\n Ok(Coord { x, y, z })\n\n }\n\n}\n\n\n\nimpl<T> fmt::Display for Coord<T>\n\nwhere\n\n T: fmt::Display + CoordType,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if let Some(z) = self.z {\n\n write!(f, \"{},{},{}\", self.x, self.y, z)\n\n } else {\n\n write!(f, \"{},{}\", self.x, self.y)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/types/coord.rs", "rank": 14, "score": 57109.87598658132 }, { "content": "use std::fmt::{self, Debug};\n\nuse std::str::FromStr;\n\n\n\nuse num_traits::Float;\n\n\n\nuse crate::errors::Error;\n\n\n\n/// Coordinate type compatible with `geo-types`\n", "file_path": "src/types/coord.rs", "rank": 15, "score": 57108.38227498508 }, { "content": " Coord { x, y, z }\n\n }\n\n}\n\n\n\nimpl<T> From<(T, T)> for Coord<T>\n\nwhere\n\n T: CoordType,\n\n{\n\n fn from(coord: (T, T)) -> Self {\n\n Coord::new(coord.0, coord.1, None)\n\n }\n\n}\n\n\n\nimpl<T> From<[T; 2]> for Coord<T>\n\nwhere\n\n T: CoordType,\n\n{\n\n fn from(coord: [T; 2]) -> Self {\n\n Coord::new(coord[0], coord[1], None)\n\n }\n", "file_path": "src/types/coord.rs", "rank": 16, "score": 57108.31890000345 }, { "content": "}\n\n\n\nimpl<T> From<(T, T, Option<T>)> for Coord<T>\n\nwhere\n\n T: CoordType,\n\n{\n\n fn from(coord: (T, T, Option<T>)) -> Self {\n\n Coord::new(coord.0, coord.1, coord.2)\n\n }\n\n}\n\n\n\nimpl<T> From<(T, T, T)> for Coord<T>\n\nwhere\n\n T: CoordType,\n\n{\n\n fn from(coord: (T, T, T)) -> Self {\n\n Coord::new(coord.0, coord.1, Some(coord.2))\n\n }\n\n}\n\n\n", "file_path": "src/types/coord.rs", "rank": 17, "score": 57108.176096029456 }, { "content": "use std::collections::HashMap;\n\nuse std::str::FromStr;\n\n\n\nuse crate::errors::Error;\n\nuse crate::types::{\n\n BalloonStyle, CoordType, Element, Icon, IconStyle, LabelStyle, LineString, LineStyle,\n\n LinearRing, ListStyle, Location, MultiGeometry, Orientation, Pair, Placemark, Point, PolyStyle,\n\n Polygon, Scale, Style, StyleMap,\n\n};\n\n\n\n/// Enum for representing the KML version being parsed\n\n///\n\n/// According to http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#7 namespace for 2.3\n\n/// is unchanged since it should be backwards-compatible\n\n#[derive(Clone, Debug, PartialEq)]\n\n#[non_exhaustive]\n\npub enum KmlVersion {\n\n Unknown,\n\n V22,\n\n V23,\n", "file_path": "src/types/kml.rs", "rank": 18, "score": 57092.68961869367 }, { "content": " }\n\n}\n\n\n\n/// Container for KML root element\n\n#[derive(Clone, Default, PartialEq, Debug)]\n\npub struct KmlDocument<T: CoordType = f64> {\n\n pub version: KmlVersion,\n\n pub attrs: HashMap<String, String>,\n\n pub elements: Vec<Kml<T>>,\n\n}\n\n\n\n/// Enum for representing any KML element\n\n#[allow(clippy::large_enum_variant)]\n\n#[derive(Clone, Debug, PartialEq)]\n\n#[non_exhaustive]\n\npub enum Kml<T: CoordType = f64> {\n\n KmlDocument(KmlDocument<T>),\n\n Scale(Scale<T>),\n\n Orientation(Orientation<T>),\n\n Point(Point<T>),\n", "file_path": "src/types/kml.rs", "rank": 19, "score": 57092.320989838045 }, { "content": " Location(Location<T>),\n\n LineString(LineString<T>),\n\n LinearRing(LinearRing<T>),\n\n Polygon(Polygon<T>),\n\n MultiGeometry(MultiGeometry<T>),\n\n Placemark(Placemark<T>),\n\n Document {\n\n attrs: HashMap<String, String>,\n\n elements: Vec<Kml<T>>,\n\n },\n\n Folder {\n\n attrs: HashMap<String, String>,\n\n elements: Vec<Kml<T>>,\n\n },\n\n Style(Style),\n\n StyleMap(StyleMap),\n\n Pair(Pair),\n\n BalloonStyle(BalloonStyle),\n\n IconStyle(IconStyle),\n\n Icon(Icon),\n\n LabelStyle(LabelStyle),\n\n LineStyle(LineStyle),\n\n PolyStyle(PolyStyle),\n\n ListStyle(ListStyle),\n\n Element(Element),\n\n}\n", "file_path": "src/types/kml.rs", "rank": 20, "score": 57086.49367074234 }, { "content": "}\n\n\n\nimpl Default for KmlVersion {\n\n fn default() -> KmlVersion {\n\n KmlVersion::Unknown\n\n }\n\n}\n\n\n\n// TODO: According to http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#7 namespace for 2.3\n\n// is unchanged since it should be backwards-compatible\n\nimpl FromStr for KmlVersion {\n\n type Err = Error;\n\n\n\n // TODO: Support different Google Earth implementations? Only check end?\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"http://www.opengis.net/kml/2.2\" => Ok(Self::V22),\n\n \"http://www.opengis.net/kml/2.3\" => Ok(Self::V23),\n\n v => Err(Error::InvalidKmlVersion(v.to_string())),\n\n }\n", "file_path": "src/types/kml.rs", "rank": 21, "score": 57077.70477171362 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::types::altitude_mode::AltitudeMode;\n\nuse crate::types::coord::{Coord, CoordType};\n\n\n\n/// `kml:LinearRing`, [10.5](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#465) in the\n\n/// KML specification\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct LinearRing<T: CoordType = f64> {\n\n pub coords: Vec<Coord<T>>,\n\n pub extrude: bool,\n\n pub tessellate: bool,\n\n pub altitude_mode: AltitudeMode,\n\n pub attrs: HashMap<String, String>,\n\n}\n\n\n\nimpl<T> From<Vec<Coord<T>>> for LinearRing<T>\n\nwhere\n\n T: CoordType + Default,\n\n{\n\n fn from(coords: Vec<Coord<T>>) -> Self {\n\n LinearRing {\n\n coords,\n\n ..Default::default()\n\n }\n\n }\n\n}\n", "file_path": "src/types/linear_ring.rs", "rank": 22, "score": 54537.97252939181 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::types::coord::CoordType;\n\nuse crate::types::geometry::Geometry;\n\n\n\n/// `kml:MultiGeometry`, [10.2](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#438) in the\n\n/// KML specification\n\n#[derive(Clone, Default, PartialEq, Debug)]\n\npub struct MultiGeometry<T: CoordType = f64> {\n\n pub geometries: Vec<Geometry<T>>,\n\n pub attrs: HashMap<String, String>,\n\n}\n\n\n\nimpl<T> MultiGeometry<T>\n\nwhere\n\n T: CoordType + Default,\n\n{\n\n pub fn new(geometries: Vec<Geometry<T>>) -> Self {\n\n MultiGeometry {\n\n geometries,\n\n ..Default::default()\n\n }\n\n }\n\n}\n", "file_path": "src/types/multi_geometry.rs", "rank": 23, "score": 54440.55440733613 }, { "content": "#[cfg(test)]\n\nmod roundtrip_tests {\n\n use kml::Kml;\n\n use std::fs::File;\n\n use std::io::prelude::*;\n\n\n\n // Based on roundtrip tests in georust/geojson\n\n macro_rules! roundtrip_test {\n\n ($name:ident : $file_name:expr) => {\n\n #[test]\n\n fn $name() {\n\n let fixture_dir_path = \"tests/fixtures/\";\n\n let mut file_path = fixture_dir_path.to_owned();\n\n file_path.push_str($file_name.to_owned().as_str());\n\n\n\n test_round_trip(&file_path);\n\n }\n\n };\n\n }\n\n\n", "file_path": "tests/test_roundtrip.rs", "rank": 24, "score": 37590.36865007069 }, { "content": " macro_rules! roundtrip_tests {\n\n ( $($name:ident: $file_name:expr,)* ) => {\n\n $(\n\n roundtrip_test!($name: $file_name);\n\n )*\n\n }\n\n }\n\n\n\n roundtrip_tests! {\n\n test_polygon: \"polygon.kml\",\n\n test_sample: \"sample.kml\",\n\n test_countries: \"countries.kml\",\n\n }\n\n\n\n // Confirms that parsing from KML and writing back doesn't drop any currently tracked data\n\n fn test_round_trip(file_path: &str) {\n\n let mut file = File::open(&file_path).unwrap();\n\n let mut file_contents = String::new();\n\n let _ = file.read_to_string(&mut file_contents);\n\n\n", "file_path": "tests/test_roundtrip.rs", "rank": 25, "score": 37588.161414258604 }, { "content": " // Read and parse the KML from the file's contents\n\n let original_kml = file_contents.parse::<Kml>().expect(\"unable to parse\");\n\n\n\n // Convert to a string and re-parse to make sure nothing we're watching was lost\n\n let kml_str = original_kml.to_string();\n\n\n\n let roundtrip_kml: Kml = kml_str.parse().unwrap();\n\n\n\n assert_eq!(original_kml, roundtrip_kml)\n\n }\n\n}\n", "file_path": "tests/test_roundtrip.rs", "rank": 26, "score": 37584.288144121325 }, { "content": "fn parse_benchmark(c: &mut Criterion) {\n\n c.bench_function(\"parse (countries.kml)\", |bencher| {\n\n let kml_str = include_str!(\"../tests/fixtures/countries.kml\");\n\n bencher.iter(|| {\n\n let _ = Kml::<f64>::from_str(kml_str).unwrap();\n\n });\n\n });\n\n\n\n c.bench_function(\"parse (sample.kml)\", |bencher| {\n\n let kml_str = include_str!(\"../tests/fixtures/sample.kml\");\n\n bencher.iter(|| {\n\n let _ = Kml::<f64>::from_str(kml_str).unwrap();\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, parse_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/parse.rs", "rank": 27, "score": 33565.85285734234 }, { "content": "//! Module for all KML-related errors\n\nuse thiserror::Error;\n\n\n\n/// Errors for KML reading and writing\n\n#[derive(Error, Debug)]\n\npub enum Error {\n\n #[error(\"Invalid input supplied for XML\")]\n\n InvalidInput,\n\n #[error(\"Encountered malformed XML: {0}\")]\n\n MalformedXml(#[from] quick_xml::Error),\n\n #[error(\"Invalid XML event: {0}\")]\n\n InvalidXmlEvent(String),\n\n #[error(\"Coordinate empty\")]\n\n CoordEmpty,\n\n #[error(\"No KML elements found\")]\n\n NoElements,\n\n #[error(\"Error parsing number from: {0}\")]\n\n NumParse(String),\n\n #[error(\"Invalid KML version: {0}\")]\n\n InvalidKmlVersion(String),\n", "file_path": "src/errors.rs", "rank": 28, "score": 31053.1025437832 }, { "content": " #[error(\"Invalid KML element: {0}\")]\n\n InvalidKmlElement(String),\n\n #[error(\"Geometry is invalid: {0}\")]\n\n InvalidGeometry(String),\n\n #[error(\"Invalid altitude mode: {0}\")]\n\n InvalidAltitudeMode(String),\n\n #[error(\"Invalid color mode: {0}\")]\n\n InvalidColorMode(String),\n\n #[error(\"Invalid list item type: {0}\")]\n\n InvalidListItemType(String),\n\n #[error(\"IO error: {0}\")]\n\n IoError(#[from] std::io::Error),\n\n #[cfg(feature = \"zip\")]\n\n #[error(\"ZIP error: {0}\")]\n\n ZipError(#[from] zip::result::ZipError),\n\n #[error(\"Invalid units: {0}\")]\n\n InvalidUnits(String),\n\n}\n", "file_path": "src/errors.rs", "rank": 29, "score": 31051.06628628444 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::types::coord::CoordType;\n\nuse crate::types::element::Element;\n\nuse crate::types::geometry::Geometry;\n\n\n\n/// `kml:Placemark`, [9.14](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#249) in the KML\n\n/// specification\n\n///\n\n/// Placemark not inside of kml:Update (unused) requires a Geometry according to [ATC-226](https://docs.opengeospatial.org/ts/14-068r2/14-068r2.html#atc-226),\n\n/// but Google's reference says it's optional [Google Placemark reference](https://developers.google.com/kml/documentation/kmlreference#placemark).\n\n///\n\n/// Currently leaving optional.\n\n#[derive(Clone, Default, Debug, PartialEq)]\n\npub struct Placemark<T: CoordType = f64> {\n\n pub name: Option<String>,\n\n pub description: Option<String>,\n\n pub geometry: Option<Geometry<T>>,\n\n pub attrs: HashMap<String, String>,\n\n pub children: Vec<Element>,\n\n}\n", "file_path": "src/types/placemark.rs", "rank": 30, "score": 27802.07952390768 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::types::coord::CoordType;\n\n\n\n/// `kml:Orientation`, [10.11](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#558) in the KML\n\n#[derive(Clone, Default, Debug, PartialEq)]\n\npub struct Orientation<T: CoordType = f64> {\n\n pub roll: T,\n\n pub tilt: T,\n\n pub heading: T,\n\n pub attrs: HashMap<String, String>,\n\n}\n\n\n\nimpl<T> Orientation<T>\n\nwhere\n\n T: CoordType + Default,\n\n{\n\n pub fn new(roll: T, tilt: T, heading: T) -> Self {\n\n Orientation {\n\n roll,\n\n tilt,\n\n heading,\n\n ..Default::default()\n\n }\n\n }\n\n}\n", "file_path": "src/types/orientation.rs", "rank": 31, "score": 27797.00417865721 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::types::coord::CoordType;\n\n\n\n/// `kml:Location`, [10.10](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#542) in the KML\n\n#[derive(Clone, Default, Debug, PartialEq)]\n\npub struct Location<T: CoordType = f64> {\n\n pub latitude: T,\n\n pub longitude: T,\n\n pub altitude: T,\n\n pub attrs: HashMap<String, String>,\n\n}\n\n\n\nimpl<T> Location<T>\n\nwhere\n\n T: CoordType + Default,\n\n{\n\n pub fn new(latitude: T, longitude: T, altitude: T) -> Self {\n\n Location {\n\n latitude,\n\n longitude,\n\n altitude,\n\n ..Default::default()\n\n }\n\n }\n\n}\n", "file_path": "src/types/location.rs", "rank": 32, "score": 27797.00417865721 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::types::coord::CoordType;\n\nuse num_traits::One;\n\n\n\n/// `kml:Scale`, [10.12](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#575) in the KML\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Scale<T: CoordType = f64> {\n\n pub x: T,\n\n pub y: T,\n\n pub z: T,\n\n pub attrs: HashMap<String, String>,\n\n}\n\n\n\nimpl<T> Scale<T>\n\nwhere\n\n T: CoordType,\n\n{\n\n pub fn new(x: T, y: T, z: T) -> Self {\n\n Scale {\n", "file_path": "src/types/scale.rs", "rank": 33, "score": 27795.056144548795 }, { "content": "use std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\nuse crate::errors::Error;\n\n\n\nuse crate::types::Vec2;\n\n\n\n/// `kml:Style`, [12.2](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#798) in the KML\n\n/// specification\n\n#[derive(Clone, Default, Debug, PartialEq)]\n\npub struct Style {\n\n pub id: String,\n\n pub balloon: Option<BalloonStyle>,\n\n pub icon: Option<IconStyle>,\n\n pub label: Option<LabelStyle>,\n\n pub line: Option<LineStyle>,\n\n pub poly: Option<PolyStyle>,\n\n pub list: Option<ListStyle>,\n\n}\n", "file_path": "src/types/style.rs", "rank": 34, "score": 27790.973954203513 }, { "content": "\n\n/// `kml:StyleMap`, [12.3](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#811) in the KML\n\n/// specification\n\n#[derive(Clone, Default, Debug, PartialEq)]\n\npub struct StyleMap {\n\n pub id: String,\n\n pub pairs: Vec<Pair>,\n\n}\n\n\n\n/// `kml:Pair`, [12.4](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#819) in the KML\n\n/// specification\n\n#[derive(Clone, Default, Debug, PartialEq)]\n\npub struct Pair {\n\n pub key: String,\n\n pub style_url: String,\n\n pub attrs: HashMap<String, String>,\n\n}\n\n\n\n/// `kml:BalloonStyle`, [12.7](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#841) in the\n\n/// KML specification\n", "file_path": "src/types/style.rs", "rank": 35, "score": 27788.035405286886 }, { "content": "\n\n/// `kml:listItemType`, [12.18](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#955) in the\n\n/// KML specification.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum ListItemType {\n\n Check,\n\n CheckOffOnly,\n\n CheckHideChildren,\n\n RadioFolder,\n\n}\n\n\n\nimpl Default for ListItemType {\n\n fn default() -> ListItemType {\n\n ListItemType::Check\n\n }\n\n}\n\n\n\nimpl FromStr for ListItemType {\n\n type Err = Error;\n\n\n", "file_path": "src/types/style.rs", "rank": 36, "score": 27787.581918443255 }, { "content": "\n\n/// `kml:colorMode`, [12.11](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#879) in the\n\n/// KML specification\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum ColorMode {\n\n Default,\n\n Random,\n\n}\n\n\n\nimpl Default for ColorMode {\n\n fn default() -> ColorMode {\n\n ColorMode::Default\n\n }\n\n}\n\n\n\nimpl FromStr for ColorMode {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n", "file_path": "src/types/style.rs", "rank": 37, "score": 27787.35650625421 }, { "content": "use core::fmt;\n\nuse std::str::FromStr;\n\n\n\nuse crate::Error;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Vec2 {\n\n pub x: f64,\n\n pub y: f64,\n\n pub xunits: Units,\n\n pub yunits: Units,\n\n}\n\n\n\nimpl Default for Vec2 {\n\n fn default() -> Self {\n\n Self {\n\n x: 1.,\n\n y: 1.,\n\n xunits: Units::default(),\n\n yunits: Units::default(),\n", "file_path": "src/types/vec2.rs", "rank": 38, "score": 27786.55862919259 }, { "content": " Self::RadioFolder => \"radioFolder\",\n\n }\n\n )\n\n }\n\n}\n\n\n\n/// `kml:ListStyle`, [12.17](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#940) in the\n\n/// KML specification.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ListStyle {\n\n pub id: String,\n\n pub bg_color: String,\n\n pub max_snippet_lines: u32,\n\n pub list_item_type: ListItemType,\n\n}\n\n\n\nimpl Default for ListStyle {\n\n fn default() -> ListStyle {\n\n ListStyle {\n\n id: \"\".to_string(),\n\n bg_color: \"ffffffff\".to_string(),\n\n max_snippet_lines: 2,\n\n list_item_type: ListItemType::default(),\n\n }\n\n }\n\n}\n", "file_path": "src/types/style.rs", "rank": 39, "score": 27785.77453358136 }, { "content": " icon: Icon::default(),\n\n color: \"ffffffff\".to_string(),\n\n color_mode: ColorMode::default(),\n\n }\n\n }\n\n}\n\n\n\n/// `kml:Icon`, [12.13](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#900) in the KML\n\n/// specification.\n\n///\n\n/// Implements on `kml:BasicLinkType`\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct Icon {\n\n pub href: String,\n\n}\n\n\n\n/// `kml:LabelStyle`, [12.14](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#909) in the\n\n/// KML specification.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct LabelStyle {\n", "file_path": "src/types/style.rs", "rank": 40, "score": 27785.16276410414 }, { "content": "pub struct LineStyle {\n\n pub id: String,\n\n pub color: String,\n\n pub color_mode: ColorMode,\n\n pub width: f64,\n\n}\n\n\n\nimpl Default for LineStyle {\n\n fn default() -> LineStyle {\n\n LineStyle {\n\n id: \"\".to_string(),\n\n color: \"ffffffff\".to_string(),\n\n color_mode: ColorMode::default(),\n\n width: 1.0,\n\n }\n\n }\n\n}\n\n\n\n/// `kml:PolyStyle`, [12.16](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#927) in the\n\n/// KML specification.\n", "file_path": "src/types/style.rs", "rank": 41, "score": 27783.061671592157 }, { "content": "/// `kml:IconStyle`, [12.12](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#883) in the\n\n/// KML specification\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct IconStyle {\n\n pub id: String,\n\n pub scale: f64,\n\n pub heading: f64,\n\n pub hot_spot: Option<Vec2>,\n\n pub icon: Icon,\n\n pub color: String,\n\n pub color_mode: ColorMode,\n\n}\n\n\n\nimpl Default for IconStyle {\n\n fn default() -> IconStyle {\n\n IconStyle {\n\n id: \"\".to_string(),\n\n scale: 1.0,\n\n heading: 0.0,\n\n hot_spot: None,\n", "file_path": "src/types/style.rs", "rank": 42, "score": 27782.896403445036 }, { "content": " pub id: String,\n\n pub color: String,\n\n pub color_mode: ColorMode,\n\n pub scale: f64,\n\n}\n\n\n\nimpl Default for LabelStyle {\n\n fn default() -> LabelStyle {\n\n LabelStyle {\n\n id: \"\".to_string(),\n\n color: \"ffffffff\".to_string(),\n\n color_mode: ColorMode::default(),\n\n scale: 1.0,\n\n }\n\n }\n\n}\n\n\n\n/// `kml:LineStyle`, [12.15](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#917) in the\n\n/// KML specification.\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/types/style.rs", "rank": 43, "score": 27782.79707614785 }, { "content": " \"default\" => Ok(Self::Default),\n\n \"random\" => Ok(Self::Random),\n\n v => Err(Error::InvalidColorMode(v.to_string())),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for ColorMode {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n Self::Default => \"default\",\n\n Self::Random => \"random\",\n\n }\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/types/style.rs", "rank": 44, "score": 27782.419875464537 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Units {\n\n Fraction,\n\n Pixels,\n\n InsetPixels,\n\n}\n\n\n\nimpl Default for Units {\n\n fn default() -> Self {\n\n Self::Fraction\n\n }\n\n}\n\n\n\nimpl FromStr for Units {\n\n type Err = Error;\n\n\n", "file_path": "src/types/vec2.rs", "rank": 45, "score": 27782.40314495355 }, { "content": " fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"check\" => Ok(Self::Check),\n\n \"checkOffOnly\" => Ok(Self::CheckOffOnly),\n\n \"checkHideChildren\" => Ok(Self::CheckHideChildren),\n\n \"radioFolder\" => Ok(Self::RadioFolder),\n\n v => Err(Error::InvalidListItemType(v.to_string())),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for ListItemType {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n Self::Check => \"check\",\n\n Self::CheckOffOnly => \"checkOffOnly\",\n\n Self::CheckHideChildren => \"checkHideChildren\",\n", "file_path": "src/types/style.rs", "rank": 46, "score": 27781.691607092376 }, { "content": " x,\n\n y,\n\n z,\n\n attrs: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Scale {\n\n fn default() -> Scale {\n\n Scale {\n\n x: One::one(),\n\n y: One::one(),\n\n z: One::one(),\n\n attrs: HashMap::new(),\n\n }\n\n }\n\n}\n", "file_path": "src/types/scale.rs", "rank": 47, "score": 27780.372487692028 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub struct PolyStyle {\n\n pub id: String,\n\n pub color: String,\n\n pub color_mode: ColorMode,\n\n pub fill: bool,\n\n pub outline: bool,\n\n}\n\n\n\nimpl Default for PolyStyle {\n\n fn default() -> PolyStyle {\n\n PolyStyle {\n\n id: \"\".to_string(),\n\n color: \"ffffffff\".to_string(),\n\n color_mode: ColorMode::default(),\n\n fill: true,\n\n outline: true,\n\n }\n\n }\n\n}\n", "file_path": "src/types/style.rs", "rank": 48, "score": 27780.32576379757 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub struct BalloonStyle {\n\n pub id: String,\n\n pub bg_color: Option<String>,\n\n pub text_color: String,\n\n pub text: Option<String>,\n\n pub display: bool,\n\n}\n\n\n\nimpl Default for BalloonStyle {\n\n fn default() -> BalloonStyle {\n\n BalloonStyle {\n\n id: \"\".to_string(),\n\n bg_color: None,\n\n text_color: \"ffffffff\".to_string(),\n\n text: None,\n\n display: true,\n\n }\n\n }\n\n}\n", "file_path": "src/types/style.rs", "rank": 49, "score": 27780.29833105629 }, { "content": " fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"fraction\" => Ok(Self::Fraction),\n\n \"pixels\" => Ok(Self::Pixels),\n\n \"insetPixels\" => Ok(Self::InsetPixels),\n\n v => Err(Error::InvalidUnits(v.to_string())),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Units {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n Self::Fraction => \"fraction\",\n\n Self::Pixels => \"pixels\",\n\n Self::InsetPixels => \"insetPixels\",\n\n }\n\n )\n\n }\n\n}\n", "file_path": "src/types/vec2.rs", "rank": 50, "score": 27779.27887536083 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::types::altitude_mode::AltitudeMode;\n\nuse crate::types::coord::{Coord, CoordType};\n\n\n\n/// `kml:LineString`, [10.7](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#488) in the\n\n/// KML specification\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct LineString<T: CoordType = f64> {\n\n pub coords: Vec<Coord<T>>,\n\n pub extrude: bool,\n\n pub tessellate: bool,\n\n pub altitude_mode: AltitudeMode,\n\n pub attrs: HashMap<String, String>,\n\n}\n\n\n\nimpl<T> From<Vec<Coord<T>>> for LineString<T>\n\nwhere\n\n T: CoordType + Default,\n\n{\n\n fn from(coords: Vec<Coord<T>>) -> Self {\n\n LineString {\n\n coords,\n\n ..Default::default()\n\n }\n\n }\n\n}\n", "file_path": "src/types/line_string.rs", "rank": 51, "score": 26429.9823747691 }, { "content": "use std::str::FromStr;\n\n\n\nuse crate::types::altitude_mode::AltitudeMode;\n\nuse crate::types::coord::{Coord, CoordType};\n\n\n\n// TODO: Should this be an attribute of geometries? Only complication is Point doesn't include\n\n// tessellate, not sure how to represent that\n\n// TODO: Implement validity check based on ATC-112 https://docs.opengeospatial.org/ts/14-068r2/14-068r2.html#atc-112\n\n// where if extrude is true, altitudeMode can't be clampToGround, as well as ATC-113 where if\n\n// tessellate is true, altitudeMode must be clampToGround\n\npub(crate) struct GeomProps<T: CoordType + FromStr + Default = f64> {\n\n pub coords: Vec<Coord<T>>,\n\n pub altitude_mode: AltitudeMode,\n\n pub extrude: bool,\n\n pub tessellate: bool,\n\n}\n", "file_path": "src/types/geom_props.rs", "rank": 52, "score": 26424.34115071786 }, { "content": "use std::fmt;\n\nuse std::str::FromStr;\n\n\n\nuse crate::errors::Error;\n\n\n\n/// `kml:altitudeMode`, [9.20](http://docs.opengeospatial.org/is/12-007r2/12-007r2.html#322) in the\n\n/// KML specification\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum AltitudeMode {\n\n ClampToGround,\n\n RelativeToGround,\n\n Absolute,\n\n}\n\n\n\nimpl Default for AltitudeMode {\n\n fn default() -> AltitudeMode {\n\n AltitudeMode::ClampToGround\n\n }\n\n}\n\n\n", "file_path": "src/types/altitude_mode.rs", "rank": 53, "score": 26418.12139334267 }, { "content": "impl FromStr for AltitudeMode {\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"clampToGround\" => Ok(Self::ClampToGround),\n\n \"relativeToGround\" => Ok(Self::RelativeToGround),\n\n \"absolute\" => Ok(Self::Absolute),\n\n v => Err(Error::InvalidAltitudeMode(v.to_string())),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for AltitudeMode {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n Self::ClampToGround => \"clampToGround\",\n\n Self::RelativeToGround => \"relativeToGround\",\n\n Self::Absolute => \"absolute\",\n\n }\n\n )\n\n }\n\n}\n", "file_path": "src/types/altitude_mode.rs", "rank": 54, "score": 26409.830357430197 }, { "content": "# kml\n\n\n\n[![crates.io](https://img.shields.io/crates/v/kml.svg)](https://crates.io/crates/kml)\n\n[![Build status](https://github.com/georust/kml/workflows/CI/badge.svg)](https://github.com/georust/kml/actions?query=workflow%3ACI)\n\n[![Documentation](https://docs.rs/kml/badge.svg)](https://docs.rs/kml)\n\n\n\nRust support for reading and writing KML with a focus on conversion to [`geo-types`](https://github.com/georust/geo) primitives.\n\n\n\n## Examples\n\n\n\n### Reading\n\n\n\n```rust\n\nuse std::path::Path;\n\nuse kml::{Kml, KmlReader};\n\n\n\nlet kml_str = r#\"\n\n<Polygon>\n\n <outerBoundaryIs>\n\n <LinearRing>\n\n <tessellate>1</tessellate>\n\n <coordinates>\n\n -1,2,0\n\n -1.5,3,0\n\n -1.5,2,0\n\n -1,2,0\n\n </coordinates>\n\n </LinearRing>\n\n </outerBoundaryIs>\n\n</Polygon>\n\n\"#;\n\n\n\n// Parse from a string\n\nlet kml: Kml = kml_str.parse().unwrap();\n\n\n\n// Read from a file path\n\nlet kml_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"fixtures\")\n\n .join(\"polygon.kml\");\n\nlet mut kml_reader = KmlReader::<_, f64>::from_path(kml_path).unwrap();\n\nlet kml_data = kml_reader.read().unwrap();\n\n\n\n// Read KMZ files with the `zip` feature or default features enabled\n\nlet kmz_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"fixtures\")\n\n .join(\"polygon.kmz\");\n\nlet mut kmz_reader = KmlReader::<_, f64>::from_kmz_path(kmz_path).unwrap();\n\nlet kmz_data = kmz_reader.read().unwrap();\n\n```\n\n\n\n### Writing\n\n\n\n```rust\n\nuse std::str;\n\nuse quick_xml;\n\nuse kml::{Kml, KmlWriter, types::{AltitudeMode, Coord, Point}};\n\n\n\nlet kml = Kml::Point(Point::new(1., 1., None));\n\n\n\nlet mut buf = Vec::new();\n\nlet mut writer = KmlWriter::from_writer(&mut buf);\n\nwriter.write(&kml).unwrap();\n\n```\n\n\n", "file_path": "README.md", "rank": 55, "score": 19993.73254972771 }, { "content": "### Conversion\n\n\n\n```rust\n\nuse geo_types::{self, GeometryCollection};\n\nuse kml::{quick_collection, Kml, types::Point};\n\n\n\nlet kml_point = Point::new(1., 1., None);\n\n// Convert into geo_types primitives\n\nlet geo_point = geo_types::Point::from(kml_point);\n\n// Convert back into kml::types structs\n\nlet kml_point = Point::from(geo_point);\n\n\n\nlet kml_folder_str = r#\"\n\n<Folder>\n\n <Point>\n\n <coordinates>1,1,1</coordinates>\n\n <altitudeMode>relativeToGround</altitudeMode>\n\n </Point>\n\n <LineString>\n\n <coordinates>1,1 2,1 3,1</coordinates>\n\n <altitudeMode>relativeToGround</altitudeMode>\n\n </LineString>\n\n</Folder>\"#;\n\nlet kml_folder: Kml<f64> = kml_folder_str.parse().unwrap();\n\n\n\n// Use the quick_collection helper to convert Kml to a geo_types::GeometryCollection\n\nlet geom_coll: GeometryCollection<f64> = quick_collection(kml_folder).unwrap();\n\n```\n\n\n\n## Code of Conduct\n\n\n\nAll contributors are expected to follow the [GeoRust Code of Conduct](https://github.com/georust/.github/blob/main/CODE_OF_CONDUCT.md)\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 56, "score": 19993.679577600928 }, { "content": "# Changelog\n\n\n\n## Unreleased\n\n\n\n## [v0.4.1](https://github.com/georust/kml/releases/tag/v0.4.1)\n\n\n\n- Fix issue with sibling `kml:Folder` or `kml:Document` elements nesting ([#19](https://github.com/georust/kml/pull/19))\n\n\n\n## [v0.4.0](https://github.com/georust/kml/releases/tag/v0.4.0)\n\n\n\n- Clippy cleanup ([#3](https://github.com/georust/kml/pull/3))\n\n- Add support for `kml:Location` ([#7](https://github.com/georust/kml/pull/7)) from [@Nadhum](https://github.com/Nadhum)\n\n- Add support for `kml:Scale` ([#8](https://github.com/georust/kml/pull/8)) from [@Nadhum](https://github.com/Nadhum)\n\n- Add support for `kml:Orientation` ([#8](https://github.com/georust/kml/pull/9)) from [@Nadhum](https://github.com/Nadhum)\n\n- Require clippy and rustfmt in CI ([#14](https://github.com/georust/kml/pull/14))\n\n- Add support for `hotSpot` element within `kml:IconStyle`, including a new `Units` enum and `Vec2` struct ([#13](https://github.com/georust/kml/pull/13)) from [@ardouglas](https://github.com/ardouglas)\n\n\n\n## [v0.3.1](https://github.com/georust/kml/releases/tag/v0.3.1)\n\n\n\n- Handle UTF-8 decoding issues without a panic, fixing [#1](https://github.com/georust/kml/issues/1)\n\n\n\n## [v0.3.0](https://github.com/georust/kml/releases/tag/v0.3.0)\n\n\n\n- Cleaned up method names (i.e. \"parse*\" to \"read*\")\n\n- Added `KmlWriter::from_writer`\n\n- Update license to MIT/Apache-2.0\n\n- Transfer to georust\n\n\n\n## [v0.2.0](https://github.com/georust/kml/releases/tag/v0.2.0)\n\n\n\n- Initial functionality for reading, writing, and conversion\n", "file_path": "CHANGELOG.md", "rank": 57, "score": 19974.81169904768 }, { "content": "//! </Point>\n\n//! <LineString>\n\n//! <coordinates>1,1 2,1 3,1</coordinates>\n\n//! <altitudeMode>relativeToGround</altitudeMode>\n\n//! </LineString>\n\n//! </Folder>\"#;\n\n//! let kml_folder: Kml<f64> = kml_folder_str.parse().unwrap();\n\n//!\n\n//! // Use the quick_collection helper to convert Kml to a geo_types::GeometryCollection\n\n//! let geom_coll: GeometryCollection<f64> = quick_collection(kml_folder).unwrap();\n\n//! ```\n\n\n\n#![cfg_attr(docsrs, feature(doc_cfg))]\n\n\n\npub mod types;\n\n\n\npub use crate::types::{Kml, KmlDocument, KmlVersion};\n\n\n\nmod errors;\n\npub use crate::errors::Error;\n", "file_path": "src/lib.rs", "rank": 59, "score": 31.539596351169358 }, { "content": "//! Module for writing KML types\n\nuse std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::io::Write;\n\nuse std::marker::PhantomData;\n\nuse std::str;\n\nuse std::str::FromStr;\n\n\n\nuse quick_xml::events::{BytesEnd, BytesStart, BytesText, Event};\n\n\n\nuse crate::errors::Error;\n\nuse crate::types::geom_props::GeomProps;\n\nuse crate::types::{\n\n BalloonStyle, Coord, CoordType, Element, Geometry, Icon, IconStyle, Kml, LabelStyle,\n\n LineString, LineStyle, LinearRing, ListStyle, Location, MultiGeometry, Orientation, Pair,\n\n Placemark, Point, PolyStyle, Polygon, Scale, Style, StyleMap,\n\n};\n\n\n\n/// Struct for managing writing KML\n\npub struct KmlWriter<W: Write, T: CoordType + FromStr + Default = f64> {\n", "file_path": "src/writer.rs", "rank": 63, "score": 28.743475292875452 }, { "content": " T: CoordType + FromStr + Default,\n\n{\n\n type Err = Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n KmlReader::<&[u8], T>::from_string(s).read()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_point() {\n\n let kml_str = \"<Point><coordinates>1,1,1</coordinates><altitudeMode>relativeToGround</altitudeMode></Point>\";\n\n let p: Kml = kml_str.parse().unwrap();\n\n assert_eq!(\n\n p,\n\n Kml::Point(Point {\n", "file_path": "src/reader.rs", "rank": 65, "score": 27.414370412242935 }, { "content": " /// ```\n\n /// use kml::{Kml, KmlReader};\n\n ///\n\n /// let point_str = \"<Point><coordinates>1,1,1</coordinates></Point>\";\n\n /// let kml_point: Kml<f64> = KmlReader::from_string(point_str).read().unwrap();\n\n /// ```\n\n pub fn read(&mut self) -> Result<Kml<T>, Error> {\n\n let mut result = self.read_elements()?;\n\n // Converts multiple items at the same level to KmlDocument\n\n match result.len().cmp(&1) {\n\n Ordering::Greater => Ok(Kml::KmlDocument(KmlDocument {\n\n elements: result,\n\n ..Default::default()\n\n })),\n\n Ordering::Equal => Ok(result.remove(0)),\n\n Ordering::Less => Err(Error::NoElements),\n\n }\n\n }\n\n\n\n fn read_elements(&mut self) -> Result<Vec<Kml<T>>, Error> {\n", "file_path": "src/reader.rs", "rank": 71, "score": 26.041996670036276 }, { "content": " .write(self)\n\n .map_err(|_| fmt::Error)\n\n .and_then(|_| f.write_str(str::from_utf8(&buf).unwrap()))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::types;\n\n\n\n #[test]\n\n fn test_write_point() {\n\n let kml = Kml::Point(Point {\n\n coord: Coord {\n\n x: 1.,\n\n y: 1.,\n\n z: Some(1.),\n\n },\n\n altitude_mode: types::AltitudeMode::RelativeToGround,\n", "file_path": "src/writer.rs", "rank": 72, "score": 25.635593047182603 }, { "content": " fn read_multi_geometry(\n\n &mut self,\n\n attrs: HashMap<String, String>,\n\n ) -> Result<MultiGeometry<T>, Error> {\n\n let mut geometries: Vec<Geometry<T>> = Vec::new();\n\n loop {\n\n let mut e = self.reader.read_event(&mut self.buf)?;\n\n match e {\n\n Event::Start(ref e) => {\n\n let attrs = Self::read_attrs(e.attributes());\n\n match e.local_name() {\n\n b\"Point\" => geometries.push(Geometry::Point(self.read_point(attrs)?)),\n\n b\"LineString\" => {\n\n geometries.push(Geometry::LineString(self.read_line_string(attrs)?))\n\n }\n\n b\"LinearRing\" => {\n\n geometries.push(Geometry::LinearRing(self.read_linear_ring(attrs)?))\n\n }\n\n b\"Polygon\" => geometries.push(Geometry::Polygon(self.read_polygon(attrs)?)),\n\n b\"MultiGeometry\" => geometries\n", "file_path": "src/reader.rs", "rank": 74, "score": 24.839593161853365 }, { "content": " tilt: 13.,\n\n heading: 45.07,\n\n ..Default::default()\n\n });\n\n let expected_string = \"<Orientation>\\\n\n <roll>-170.279</roll>\\\n\n <tilt>13</tilt>\\\n\n <heading>45.07</heading>\\\n\n </Orientation>\";\n\n assert_eq!(expected_string, kml.to_string());\n\n }\n\n\n\n #[test]\n\n fn test_write_polygon() {\n\n let kml = Kml::Polygon(Polygon {\n\n outer: LinearRing {\n\n coords: vec![\n\n Coord {\n\n x: -1.,\n\n y: 2.,\n", "file_path": "src/writer.rs", "rank": 75, "score": 24.585267234707327 }, { "content": " /// use quick_xml;\n\n /// use kml::{Kml, KmlWriter, types::{AltitudeMode, Coord, Point}};\n\n ///\n\n /// let kml = Kml::Point(Point::new(1., 1., None));\n\n ///\n\n /// let mut buf = Vec::new();\n\n /// let mut writer = KmlWriter::from_writer(&mut buf);\n\n /// writer.write(&kml).unwrap();\n\n /// ```\n\n pub fn write(&mut self, kml: &Kml<T>) -> Result<(), Error> {\n\n self.write_kml(kml)\n\n }\n\n\n\n fn write_kml(&mut self, k: &Kml<T>) -> Result<(), Error> {\n\n match k {\n\n Kml::KmlDocument(d) => self.write_container(b\"kml\", &d.attrs, &d.elements)?,\n\n Kml::Scale(s) => self.write_scale(s)?,\n\n Kml::Orientation(o) => self.write_orientation(o)?,\n\n Kml::Point(p) => self.write_point(p)?,\n\n Kml::Location(l) => self.write_location(l)?,\n", "file_path": "src/writer.rs", "rank": 76, "score": 24.48984537631153 }, { "content": "//! Module for reading KML sources into Rust types\n\nuse std::cmp::Ordering;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\nuse std::marker::PhantomData;\n\nuse std::path::Path;\n\nuse std::str;\n\nuse std::str::FromStr;\n\n\n\nuse num_traits::{Float, One, Zero};\n\nuse quick_xml::events::attributes::Attributes;\n\nuse quick_xml::events::{BytesStart, Event};\n\n\n\nuse crate::errors::Error;\n\nuse crate::types::geom_props::GeomProps;\n\nuse crate::types::{\n\n self, coords_from_str, BalloonStyle, ColorMode, Coord, CoordType, Element, Geometry, Icon,\n\n IconStyle, Kml, KmlDocument, KmlVersion, LabelStyle, LineString, LineStyle, LinearRing,\n\n ListStyle, Location, MultiGeometry, Orientation, Pair, Placemark, Point, PolyStyle, Polygon,\n", "file_path": "src/reader.rs", "rank": 78, "score": 23.866321919294762 }, { "content": "//! writer.write(&kml).unwrap();\n\n//! ```\n\n//!\n\n//! ### Conversion\n\n//!\n\n//! ```\n\n//! use geo_types::{self, GeometryCollection};\n\n//! use kml::{quick_collection, Kml, types::Point};\n\n//!\n\n//! let kml_point = Point::new(1., 1., None);\n\n//! // Convert into geo_types primitives\n\n//! let geo_point = geo_types::Point::from(kml_point);\n\n//! // Convert back into kml::types structs\n\n//! let kml_point = Point::from(geo_point);\n\n//!\n\n//! let kml_folder_str = r#\"\n\n//! <Folder>\n\n//! <Point>\n\n//! <coordinates>1,1,1</coordinates>\n\n//! <altitudeMode>relativeToGround</altitudeMode>\n", "file_path": "src/lib.rs", "rank": 79, "score": 23.442719575027205 }, { "content": " let mut elements: Vec<Kml<T>> = Vec::new();\n\n loop {\n\n let mut e = self.reader.read_event(&mut self.buf)?;\n\n match e {\n\n Event::Start(ref mut e) => {\n\n let attrs = Self::read_attrs(e.attributes());\n\n match e.local_name() {\n\n b\"kml\" => elements.push(Kml::KmlDocument(self.read_kml_document()?)),\n\n b\"Scale\" => elements.push(Kml::Scale(self.read_scale(attrs)?)),\n\n b\"Orientation\" => {\n\n elements.push(Kml::Orientation(self.read_orientation(attrs)?))\n\n }\n\n b\"Point\" => elements.push(Kml::Point(self.read_point(attrs)?)),\n\n b\"Location\" => elements.push(Kml::Location(self.read_location(attrs)?)),\n\n b\"LineString\" => {\n\n elements.push(Kml::LineString(self.read_line_string(attrs)?))\n\n }\n\n b\"LinearRing\" => {\n\n elements.push(Kml::LinearRing(self.read_linear_ring(attrs)?))\n\n }\n", "file_path": "src/reader.rs", "rank": 81, "score": 22.933782818397383 }, { "content": "\n\n assert!(matches!(d, Kml::KmlDocument(_)));\n\n let doc: Option<KmlDocument> = match d {\n\n Kml::KmlDocument(d) => Some(d),\n\n _ => None,\n\n };\n\n\n\n assert!(doc.unwrap().elements.iter().all(|e| match e {\n\n Kml::Point(p) =>\n\n *p == Point {\n\n coord: Coord {\n\n x: 1.,\n\n y: 1.,\n\n z: Some(1.)\n\n },\n\n ..Default::default()\n\n },\n\n Kml::LineString(l) =>\n\n *l == LineString {\n\n coords: vec![\n", "file_path": "src/reader.rs", "rank": 82, "score": 22.85771588147832 }, { "content": " continue;\n\n }\n\n let mut buf = Vec::with_capacity(kml_file.size() as usize);\n\n std::io::copy(&mut kml_file, &mut buf)?;\n\n return Ok(KmlReader::from_reader(Cursor::new(buf)));\n\n }\n\n\n\n Err(Error::InvalidInput)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::types::Kml;\n\n\n\n #[test]\n\n fn test_read_kmz() {\n\n let kmz_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"fixtures\")\n\n .join(\"polygon.kmz\");\n\n let mut kml_reader = KmlReader::<_, f64>::from_kmz_path(kmz_path).unwrap();\n\n let kml = kml_reader.read().unwrap();\n\n\n\n assert!(matches!(kml, Kml::Polygon(_)))\n\n }\n\n}\n", "file_path": "src/kmz_reader.rs", "rank": 83, "score": 22.716988117797328 }, { "content": " },\n\n Coord {\n\n x: -1.,\n\n y: 2.,\n\n z: Some(0.)\n\n },\n\n ],\n\n tessellate: true,\n\n ..Default::default()\n\n },\n\n inner: vec![],\n\n ..Default::default()\n\n })\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_parse_kml_document_default() {\n\n let kml_str =\"<Point><coordinates>1,1,1</coordinates></Point><LineString><coordinates>1,1 2,1</coordinates></LineString>\";\n\n let d: Kml = kml_str.parse().unwrap();\n", "file_path": "src/reader.rs", "rank": 84, "score": 22.411716397100772 }, { "content": "use std::fs::File;\n\nuse std::io::Cursor;\n\nuse std::path::Path;\n\nuse std::str::FromStr;\n\n\n\nuse zip::ZipArchive;\n\n\n\nuse crate::errors::Error;\n\nuse crate::reader::KmlReader;\n\nuse crate::types::CoordType;\n\n\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"zip\")))]\n\nimpl<T> KmlReader<Cursor<Vec<u8>>, T>\n\nwhere\n\n T: CoordType + FromStr + Default,\n\n{\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"zip\")))]\n\n /// Create a [`KmlReader`](struct.KmlReader.html) from a KMZ file path\n\n ///\n\n /// # Example\n", "file_path": "src/kmz_reader.rs", "rank": 85, "score": 22.280363997326326 }, { "content": " /// use kml::KmlReader;\n\n ///\n\n /// let poly_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n /// .join(\"tests\")\n\n /// .join(\"fixtures\")\n\n /// .join(\"polygon.kml\");\n\n /// let mut kml_reader = KmlReader::<_, f64>::from_path(poly_path).unwrap();\n\n /// let kml = kml_reader.read().unwrap();\n\n /// ```\n\n pub fn from_path<P: AsRef<Path>>(path: P) -> Result<KmlReader<BufReader<File>, T>, Error> {\n\n Ok(KmlReader::<BufReader<File>, T>::from_xml_reader(\n\n quick_xml::Reader::from_file(path)?,\n\n ))\n\n }\n\n}\n\n\n\nimpl<B: BufRead, T> KmlReader<B, T>\n\nwhere\n\n T: CoordType + FromStr + Default,\n\n{\n", "file_path": "src/reader.rs", "rank": 87, "score": 21.716214981815476 }, { "content": " })\n\n }\n\n\n\n fn read_linear_ring(&mut self, attrs: HashMap<String, String>) -> Result<LinearRing<T>, Error> {\n\n let props = self.read_geom_props(b\"LinearRing\")?;\n\n Ok(LinearRing {\n\n coords: props.coords,\n\n altitude_mode: props.altitude_mode,\n\n extrude: props.extrude,\n\n tessellate: props.tessellate,\n\n attrs,\n\n })\n\n }\n\n\n\n fn read_polygon(&mut self, attrs: HashMap<String, String>) -> Result<Polygon<T>, Error> {\n\n let mut outer: LinearRing<T> = LinearRing::default();\n\n let mut inner: Vec<LinearRing<T>> = Vec::new();\n\n let mut altitude_mode = types::AltitudeMode::default();\n\n let mut extrude = false;\n\n let mut tessellate = false;\n", "file_path": "src/reader.rs", "rank": 88, "score": 21.586302342848587 }, { "content": "\n\n loop {\n\n let e = self.reader.read_event(&mut self.buf)?;\n\n match e {\n\n Event::Start(ref e) => {\n\n let attrs = Self::read_attrs(e.attributes());\n\n match e.local_name() {\n\n b\"name\" => name = Some(self.read_str()?),\n\n b\"description\" => description = Some(self.read_str()?),\n\n b\"Point\" => geometry = Some(Geometry::Point(self.read_point(attrs)?)),\n\n b\"LineString\" => {\n\n geometry = Some(Geometry::LineString(self.read_line_string(attrs)?))\n\n }\n\n b\"LinearRing\" => {\n\n geometry = Some(Geometry::LinearRing(self.read_linear_ring(attrs)?))\n\n }\n\n b\"Polygon\" => geometry = Some(Geometry::Polygon(self.read_polygon(attrs)?)),\n\n b\"MultiGeometry\" => {\n\n geometry =\n\n Some(Geometry::MultiGeometry(self.read_multi_geometry(attrs)?))\n", "file_path": "src/reader.rs", "rank": 89, "score": 21.13352917874698 }, { "content": " let p: Kml = r.read().unwrap();\n\n assert_eq!(\n\n p,\n\n Kml::Polygon(Polygon {\n\n outer: LinearRing {\n\n coords: vec![\n\n Coord {\n\n x: -1.,\n\n y: 2.,\n\n z: Some(0.)\n\n },\n\n Coord {\n\n x: -1.5,\n\n y: 3.,\n\n z: Some(0.)\n\n },\n\n Coord {\n\n x: -1.5,\n\n y: 2.,\n\n z: Some(0.)\n", "file_path": "src/reader.rs", "rank": 91, "score": 20.920843343484208 }, { "content": "\n\n fn write_polygon(&mut self, polygon: &Polygon<T>) -> Result<(), Error> {\n\n self.writer.write_event(Event::Start(\n\n BytesStart::owned_name(b\"Polygon\".to_vec())\n\n .with_attributes(self.hash_map_as_attrs(&polygon.attrs)),\n\n ))?;\n\n self.writer\n\n .write_event(Event::Start(BytesStart::owned_name(\n\n b\"outerBoundaryIs\".to_vec(),\n\n )))?;\n\n self.write_linear_ring(&polygon.outer)?;\n\n self.writer\n\n .write_event(Event::End(BytesEnd::borrowed(b\"outerBoundaryIs\")))?;\n\n\n\n if !polygon.inner.is_empty() {\n\n self.writer\n\n .write_event(Event::Start(BytesStart::owned_name(\n\n b\"innerBoundaryIs\".to_vec(),\n\n )))?;\n\n for b in &polygon.inner {\n", "file_path": "src/writer.rs", "rank": 92, "score": 20.890237567669143 }, { "content": " self.write_linear_ring(b)?;\n\n }\n\n self.writer\n\n .write_event(Event::End(BytesEnd::borrowed(b\"innerBoundaryIs\")))?;\n\n }\n\n self.write_geom_props(GeomProps {\n\n coords: Vec::new(),\n\n altitude_mode: polygon.altitude_mode,\n\n extrude: polygon.extrude,\n\n tessellate: polygon.tessellate,\n\n })?;\n\n Ok(self\n\n .writer\n\n .write_event(Event::End(BytesEnd::borrowed(b\"Polygon\")))?)\n\n }\n\n\n\n fn write_multi_geometry(&mut self, multi_geometry: &MultiGeometry<T>) -> Result<(), Error> {\n\n self.writer.write_event(Event::Start(\n\n BytesStart::owned_name(b\"MultiGeometry\".to_vec())\n\n .with_attributes(self.hash_map_as_attrs(&multi_geometry.attrs)),\n", "file_path": "src/writer.rs", "rank": 94, "score": 20.546048980647665 }, { "content": " },\n\n inner: vec![],\n\n ..Default::default()\n\n });\n\n\n\n assert_eq!(\n\n r#\"<Polygon><outerBoundaryIs><LinearRing><coordinates>-1,2,0\n\n-1.5,3,0\n\n-1.5,2,0\n\n-1,2,0</coordinates><altitudeMode>clampToGround</altitudeMode><extrude>0</extrude><tessellate>1</tessellate></LinearRing></outerBoundaryIs><altitudeMode>clampToGround</altitudeMode><extrude>0</extrude><tessellate>0</tessellate></Polygon>\"#,\n\n kml.to_string()\n\n );\n\n }\n\n}\n", "file_path": "src/writer.rs", "rank": 95, "score": 19.837020615913325 }, { "content": " self.write_text_element(\n\n b\"maxSnippetLines\",\n\n &list_style.max_snippet_lines.to_string(),\n\n )?;\n\n Ok(self\n\n .writer\n\n .write_event(Event::End(BytesEnd::borrowed(b\"ListStyle\")))?)\n\n }\n\n\n\n fn write_geometry(&mut self, geometry: &Geometry<T>) -> Result<(), Error> {\n\n match geometry {\n\n Geometry::Point(p) => self.write_point(p),\n\n Geometry::LineString(l) => self.write_line_string(l),\n\n Geometry::LinearRing(l) => self.write_linear_ring(l),\n\n Geometry::Polygon(p) => self.write_polygon(p),\n\n Geometry::MultiGeometry(g) => self.write_multi_geometry(g),\n\n _ => Ok(()),\n\n }\n\n }\n\n\n", "file_path": "src/writer.rs", "rank": 96, "score": 19.70441199164958 }, { "content": " /// use kml::{Kml, KmlReader};\n\n ///\n\n /// let point_str = \"<Point><coordinates>1,1,1</coordinates></Point>\";\n\n /// let kml_point: Kml<f64> = KmlReader::from_string(point_str).read().unwrap();\n\n /// ```\n\n pub fn from_string(s: &str) -> KmlReader<&[u8], T> {\n\n KmlReader::<&[u8], T>::from_xml_reader(quick_xml::Reader::<&[u8]>::from_str(s))\n\n }\n\n}\n\n\n\nimpl<T> KmlReader<BufReader<File>, T>\n\nwhere\n\n T: CoordType + FromStr + Default,\n\n{\n\n /// Read KML from a file path\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use std::path::Path;\n", "file_path": "src/reader.rs", "rank": 97, "score": 18.994978618318264 }, { "content": " ///\n\n /// ```\n\n /// use std::path::Path;\n\n /// use kml::KmlReader;\n\n ///\n\n /// let kmz_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n /// .join(\"tests\")\n\n /// .join(\"fixtures\")\n\n /// .join(\"polygon.kmz\");\n\n /// let mut kml_reader = KmlReader::<_, f64>::from_kmz_path(kmz_path).unwrap();\n\n /// let kml = kml_reader.read().unwrap();\n\n /// ```\n\n pub fn from_kmz_path<P: AsRef<Path>>(path: P) -> Result<KmlReader<Cursor<Vec<u8>>, T>, Error> {\n\n let file = File::open(path)?;\n\n let mut archive = ZipArchive::new(file)?;\n\n\n\n // Should parse the first file with a KML extension\n\n for i in 0..archive.len() {\n\n let mut kml_file = archive.by_index(i).map_err(|_| Error::InvalidInput)?;\n\n if !kml_file.name().to_ascii_lowercase().ends_with(\".kml\") {\n", "file_path": "src/kmz_reader.rs", "rank": 98, "score": 18.767814188921463 } ]
Rust
src/search.rs
lachlan-smith/scryfall-rs
3c7a6d9258c9ad186a772094ebe5e84275e9c688
use url::Url; use crate::list::ListIter; use crate::Card; pub mod advanced; pub mod param; pub mod query; pub trait Search { fn write_query(&self, url: &mut Url) -> crate::Result<()>; #[cfg(test)] fn query_string(&self) -> crate::Result<String> { let mut url = Url::parse("http://localhost")?; self.write_query(&mut url)?; Ok(url.query().unwrap_or_default().to_string()) } fn search(&self) -> crate::Result<ListIter<Card>> { Card::search(self) } fn search_all(&self) -> crate::Result<Vec<Card>> { Card::search_all(self) } fn random(&self) -> crate::Result<Card> { Card::search_random(self) } } impl<T: Search + ?Sized> Search for &T { fn write_query(&self, url: &mut Url) -> crate::Result<()> { <T as Search>::write_query(*self, url) } } impl<T: Search + ?Sized> Search for &mut T { fn write_query(&self, url: &mut Url) -> crate::Result<()> { <T as Search>::write_query(*self, url) } } #[inline] fn write_query_string<S: ToString + ?Sized>(query: &S, url: &mut Url) -> crate::Result<()> { url.query_pairs_mut() .append_pair("q", query.to_string().as_str()); Ok(()) } impl Search for str { fn write_query(&self, url: &mut Url) -> crate::Result<()> { write_query_string(self, url) } } impl Search for String { fn write_query(&self, url: &mut Url) -> crate::Result<()> { write_query_string(self, url) } } pub mod prelude { pub use super::advanced::{SearchOptions, SortDirection, SortOrder, UniqueStrategy}; pub use super::param::compare::{eq, gt, gte, lt, lte, neq}; pub use super::param::criteria::{CardIs, PrintingIs}; pub use super::param::value::{ artist, artist_count, banned, block, border_color, cheapest, cmc, collector_number, color, color_count, color_identity, color_identity_count, cube, date, devotion, eur, flavor_text, format, frame, full_oracle_text, game, illustration_count, in_game, in_language, in_rarity, in_set, in_set_type, keyword, language, loyalty, mana, name, oracle_text, paper_print_count, paper_set_count, pow_tou, power, print_count, produces, rarity, restricted, set, set_count, set_type, tix, toughness, type_line, usd, usd_foil, watermark, year, Devotion, NumProperty, Regex, }; pub use super::param::{exact, Param}; pub use super::query::{not, Query}; pub use super::Search; } #[cfg(test)] mod tests { use super::prelude::*; use crate::Card; #[test] fn basic_search() { let cards = SearchOptions::new() .query(Query::And(vec![ name("lightning"), name("helix"), cmc(eq(2)), ])) .unique(UniqueStrategy::Prints) .search() .unwrap() .map(|c| c.unwrap()) .collect::<Vec<_>>(); assert!(cards.len() > 1); for card in cards { assert_eq!(card.name, "Lightning Helix") } } #[test] fn random_works_with_search_options() { assert!( SearchOptions::new() .query(keyword("storm")) .unique(UniqueStrategy::Art) .sort(SortOrder::Usd, SortDirection::Ascending) .extras(true) .multilingual(true) .variations(true) .random() .unwrap() .oracle_text .unwrap() .to_lowercase() .contains("storm") ); } #[test] fn finds_alpha_lotus() { let mut search = SearchOptions::new(); search .query(exact("Black Lotus")) .unique(UniqueStrategy::Prints) .sort(SortOrder::Released, SortDirection::Ascending); eprintln!("{}", search.query_string().unwrap()); assert_eq!( Card::search(&search) .unwrap() .next() .unwrap() .unwrap() .set .to_string(), "lea", ); } #[test] fn rarity_comparison() { use crate::card::Rarity; let cards = SearchOptions::new() .query(rarity(gt(Rarity::Mythic))) .search() .unwrap() .collect::<Vec<_>>(); assert!(cards.len() >= 9, "Couldn't find the Power Nine from VMA."); assert!( cards .into_iter() .map(|c| c.unwrap()) .all(|c| c.rarity > Rarity::Mythic) ); } #[test] fn numeric_property_comparison() { let card = Card::search_random(Query::And(vec![ power(eq(NumProperty::Toughness)), pow_tou(eq(NumProperty::Cmc)), not(CardIs::Funny), ])) .unwrap(); let power = card .power .and_then(|s| s.parse::<u32>().ok()) .unwrap_or_default(); let toughness = card .toughness .and_then(|s| s.parse::<u32>().ok()) .unwrap_or_default(); assert_eq!(power, toughness); assert_eq!(power + toughness, card.cmc as u32); let card = Card::search(pow_tou(gt(NumProperty::Year))) .unwrap() .map(|c| c.unwrap()) .collect::<Vec<_>>(); assert!(card.into_iter().any(|c| &c.name == "Infinity Elemental")); } #[test] fn query_string_sanity_check() { let query = cmc(4).and(name("Yargle")); assert_eq!( query.query_string().unwrap(), "q=%28cmc%3A4+AND+name%3A%22Yargle%22%29" ); } }
use url::Url; use crate::list::ListIter; use crate::Card; pub mod advanced; pub mod param; pub mod query; pub trait Search { fn write_query(&self, url: &mut Url) -> crate::Result<()>; #[cfg(test)] fn query_string(&self) -> crate::Result<String> { let mut url = Url::parse("http://localhost")?; self.write_query(&mut url)?; Ok(url.query().unwrap_or_default().to_string()) } fn search(&self) -> crate::Result<ListIter<Card>> { Card::search(self) } fn search_all(&self) -> crate::Result<Vec<Card>> { Card::search_all(self) } fn random(&self) -> crate::Result<Card> { Card::search_random(self) } } impl<T: Search + ?Sized> Search for &T { fn write_query(&self, url: &mut Url) -> crate::Result<()> { <T as Search>::write_query(*self, url) } } impl<T: Search + ?Sized> Search for &mut T { fn write_query(&self, url: &mut Url) -> crate::Result<()> { <T as Search>::write_query(*self, url) } } #[inline] fn write_query_string<S: ToString + ?Sized>(query: &S, url: &mut Url) -> crate::Result<()> { url.query_pairs_mut() .append_pair("q", query.to_string().as_str()); Ok(()) } impl Search for str { fn write_query(&self, url: &mut Url) -> crate::Result<()> { write_query_string(self, url) } } impl Search for String { fn write_query(&self, url: &mut Url) -> crate::Result<()> { write_query_string(self, url) } } pub mod prelude { pub use super::advanced::{SearchOptions, SortDirection, SortOrder, UniqueStrategy}; pub use super::param::compare::{eq, gt, gte, lt, lte, neq}; pub use super::param::criteria::{CardIs, PrintingIs}; pub use super::param::value::{ artist, artist_count, banned, block, border_color, cheapest, cmc, collector_number, color, color_count, color_identity, color_identity_count, cube, date, devotion, eur, flavor_text, format, frame, full_oracle_text, game, illustration_count, in_game, in_language, in_rarity, in_set, in_set_type, keyword, language, loyalty, mana, name, oracle_text, paper_print_count, paper_set_count, pow_tou, power, print_count, produces, rarity, restricted, set, set_count, set_type, tix, toughness, type_line, usd, usd_foil, watermark, year, Devotion, NumProperty, Regex, }; pub use super::param::{exact, Param}; pub use super::query::{not, Query}; pub use super::Search; } #[cfg(test)] mod tests { use super::prelude::*; use crate::Card; #[test] fn basic_search() { let cards = SearchOptions::new() .query(Query::And(vec![ nam
#[test] fn random_works_with_search_options() { assert!( SearchOptions::new() .query(keyword("storm")) .unique(UniqueStrategy::Art) .sort(SortOrder::Usd, SortDirection::Ascending) .extras(true) .multilingual(true) .variations(true) .random() .unwrap() .oracle_text .unwrap() .to_lowercase() .contains("storm") ); } #[test] fn finds_alpha_lotus() { let mut search = SearchOptions::new(); search .query(exact("Black Lotus")) .unique(UniqueStrategy::Prints) .sort(SortOrder::Released, SortDirection::Ascending); eprintln!("{}", search.query_string().unwrap()); assert_eq!( Card::search(&search) .unwrap() .next() .unwrap() .unwrap() .set .to_string(), "lea", ); } #[test] fn rarity_comparison() { use crate::card::Rarity; let cards = SearchOptions::new() .query(rarity(gt(Rarity::Mythic))) .search() .unwrap() .collect::<Vec<_>>(); assert!(cards.len() >= 9, "Couldn't find the Power Nine from VMA."); assert!( cards .into_iter() .map(|c| c.unwrap()) .all(|c| c.rarity > Rarity::Mythic) ); } #[test] fn numeric_property_comparison() { let card = Card::search_random(Query::And(vec![ power(eq(NumProperty::Toughness)), pow_tou(eq(NumProperty::Cmc)), not(CardIs::Funny), ])) .unwrap(); let power = card .power .and_then(|s| s.parse::<u32>().ok()) .unwrap_or_default(); let toughness = card .toughness .and_then(|s| s.parse::<u32>().ok()) .unwrap_or_default(); assert_eq!(power, toughness); assert_eq!(power + toughness, card.cmc as u32); let card = Card::search(pow_tou(gt(NumProperty::Year))) .unwrap() .map(|c| c.unwrap()) .collect::<Vec<_>>(); assert!(card.into_iter().any(|c| &c.name == "Infinity Elemental")); } #[test] fn query_string_sanity_check() { let query = cmc(4).and(name("Yargle")); assert_eq!( query.query_string().unwrap(), "q=%28cmc%3A4+AND+name%3A%22Yargle%22%29" ); } }
e("lightning"), name("helix"), cmc(eq(2)), ])) .unique(UniqueStrategy::Prints) .search() .unwrap() .map(|c| c.unwrap()) .collect::<Vec<_>>(); assert!(cards.len() > 1); for card in cards { assert_eq!(card.name, "Lightning Helix") } }
function_block-function_prefixed
[]
Rust
python_ext/pyo3/src/resolution.rs
hoodmane/sseq
0f19a29c95486a629b0d054c703ca0a58999ae97
use std::sync::Arc; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; use pyo3::prelude::*; use ext::resolution::ResolutionInner as ResolutionRust; use ext::chain_complex::{AugmentedChainComplex, ChainComplex, FiniteChainComplex, FreeChainComplex}; use python_algebra::module::{ ModuleRust, FreeModule, homomorphism::{ FreeModuleHomomorphism, ModuleHomomorphismRust } }; pub type CCRust = FiniteChainComplex<ModuleRust, ModuleHomomorphismRust>; python_utils::rc_wrapper_type!(Resolution, ResolutionRust<CCRust>); #[pymethods] impl Resolution { #[new] pub fn new(module : PyObject) -> PyResult<Self> { let chain_complex = Arc::new( FiniteChainComplex::ccdz( ModuleRust::from_py_object(module)? ) ); Ok(Resolution::box_and_wrap(ResolutionRust::new(Arc::clone(&chain_complex)))) } pub fn extended_degree(&self) -> PyResult<(u32, i32)> { Ok(self.inner()?.extended_degree()) } pub fn extend_through_degree(&self, max_s : u32, max_t : i32) -> PyResult<()> { let (old_max_s, old_max_t) = self.extended_degree()?; self.inner()?.extend_through_degree(old_max_s, max_s, old_max_t, max_t); Ok(()) } pub fn graded_dimension_string(&self, max_degree : i32 , max_hom_deg : u32) -> PyResult<String> { Ok(self.inner()?.graded_dimension_string(max_degree, max_hom_deg)) } pub fn step_resolution(&self, s : u32, t : i32) -> PyResult<()> { let self_inner = self.inner()?; let (max_s, max_t) = self_inner.extended_degree(); if max_s <= s || max_t <= t { return Err(python_utils::exception!(ValueError, "You need to run res.extend_degree(>={}, >={}) before res.step_resolution({}, {})", s,t,s,t )); } let next_t = self_inner.differential(s).next_degree(); if next_t > t { return Ok(()) } python_utils::release_gil!(self_inner.step_resolution(s, t)); Ok(()) } pub fn check_has_computed_bidegree(&self, hom_deg : u32, int_deg : i32) -> PyResult<()> { if !self.inner()?.has_computed_bidegree(hom_deg, int_deg) { Err(python_utils::exception!(IndexError, "We haven't computed out to bidegree {} yet.", python_utils::bidegree_string(hom_deg, int_deg) )) } else { Ok(()) } } pub fn check_cocycle_idx(&self, hom_deg : u32, int_deg : i32, idx : usize) -> PyResult<()> { self.check_has_computed_bidegree(hom_deg, int_deg)?; if idx >= self.inner()?.number_of_gens_in_bidegree(hom_deg, int_deg) { Err(python_utils::exception!(IndexError, "Fewer than {} generators in bidegree {}.", idx + 1, python_utils::bidegree_string(hom_deg, int_deg) )) } else { Ok(()) } } pub fn cocycle_string(&self, hom_deg : u32, int_deg : i32, idx : usize) -> PyResult<String> { self.check_cocycle_idx(hom_deg, int_deg, idx)?; Ok(self.inner()?.cocycle_string(hom_deg, int_deg, idx)) } pub fn bidegree_hash(&self, hom_deg : u32, int_deg : i32) -> PyResult<u64> { self.check_has_computed_bidegree(hom_deg, int_deg)?; let self_inner = self.inner()?; let num_gens = self_inner.number_of_gens_in_bidegree(hom_deg, int_deg); let mut hasher = DefaultHasher::new(); hom_deg.hash(&mut hasher); int_deg.hash(&mut hasher); num_gens.hash(&mut hasher); let ds = self_inner.differential(hom_deg); for idx in 0 .. num_gens { ds.output(int_deg, idx).hash(&mut hasher); } Ok(hasher.finish()) } pub fn number_of_gens_in_bidegree(&self, homological_degree : u32, internal_degree : i32) -> PyResult<usize> { self.check_has_computed_bidegree(homological_degree, internal_degree)?; Ok(self.inner()?.module(homological_degree).number_of_gens_in_degree(internal_degree)) } pub fn prime(&self) -> PyResult<u32> { Ok(*self.inner()?.complex().prime()) } pub fn module(&self, homological_degree : u32) -> PyResult<FreeModule> { Ok(FreeModule::wrap_immutable(self.inner()?.module(homological_degree))) } #[getter] pub fn get_min_degree(&self) -> PyResult<i32> { Ok(self.inner()?.min_degree()) } pub fn differential(&self, s : u32) -> PyResult<FreeModuleHomomorphism> { Ok(FreeModuleHomomorphism::wrap_to_free(self.inner()?.differential(s))) } pub fn chain_map(&self, s : u32) -> PyResult<FreeModuleHomomorphism> { Ok(FreeModuleHomomorphism::wrap_to_other(self.inner()?.chain_map(s))) } } use python_algebra::module::FreeUnstableModule; use python_algebra::algebra::{AdemAlgebra, AlgebraRust}; pub fn test() -> PyResult<()> { let a = Arc::new(AdemAlgebra::new(2, false, true, None)?); let b = a.to_arc()?.clone(); let m = FreeUnstableModule::new(AlgebraRust::into_py_object(b), "i".to_string(), 0)?; Resolution::new(ModuleRust::into_py_object(m.to_arc()?.clone()))?; Ok(()) }
use std::sync::Arc; use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; use pyo3::prelude::*; use ext::resolution::ResolutionInner as ResolutionRust; use ext::chain_complex::{AugmentedChainComplex, ChainComplex, FiniteChainComplex, FreeChainComplex}; use python_algebra::module::{ ModuleRust, FreeModule, homomorphism::{ FreeModuleHomomorphism, ModuleHomomorphismRust } }; pub type CCRust = FiniteChainComplex<ModuleRust, ModuleHomomorphismRust>; python_utils::rc_wrapper_type!(Resolution, ResolutionRust<CCRust>); #[pymethods] impl Resolution { #[new] pub fn new(module : PyObject) -> PyResult<Self> { let chain_complex = Arc::new( FiniteChainComplex::ccdz( ModuleRust::from_py_object(module)? ) ); Ok(Resolution::box_and_wrap(ResolutionRust::new(Arc::clone(&chain_complex)))) } pub fn extended_degree(&self) -> PyResult<(u32, i32)> { Ok(self.inner()?.extended_degree()) } pub fn extend_through_degree(&self, max_s : u32, max_t : i32) -> PyResult<()> { let (old_max_s, old_max_t) = self.extended_degree()?; self.inner()?.extend_through_degree(old_max_s, max_s, old_max_t, max_t); Ok(()) } pub fn graded_dimension_string(&self, max_degree : i32 , max_hom_deg : u32) -> PyResult<String> { Ok(self.inner()?.graded_dimension_string(max_degree, max_hom_deg)) } pub fn step_resolution(&self, s : u32, t : i32) -> PyResult<()> { let self_inner = self.inner()?; let (max_s, max_t) = self_inner.extended_degree(); if max_s <= s || max_t <= t { return Err(python_utils::exception!(ValueError, "You need to run res.extend_degree(>={}, >={}) before res.step_resolution({}, {})", s,t,s,t )); } let next_t = self_inner.differential(s).next_degree(); if next_t > t { return Ok(()) } python_utils::release_gil!(self_inner.step_resolution(s, t)); Ok(()) } pub fn check_has_computed_bidegree(&self, hom_deg : u32, int_deg : i32) -> PyResult<()> { if !self.inner()?.has_computed_bidegree(hom_deg, int_deg) { Err(python_utils::exception!(IndexError, "We haven't computed out to bidegree {} yet.", python_utils::bidegree_string(hom_deg, int_deg) )) } else { Ok(()) } } pub fn check_cocycle_idx(&self, hom_deg : u32, int_deg : i32, idx : usize) -> PyResult<()> { self.check_has_computed_bidegree(hom_deg, int_deg)?; if idx >= self.inner()?.number_of_gens_in_bidegree(hom_deg, int_deg) { Err(python_utils::exception!(IndexError, "Fewer than {} generators in bidegree {}.", idx + 1, python_utils::bidegree_string(hom_deg, int_deg) )) } else { Ok(()) } } pub fn cocycle_string(&self, hom_deg : u32, int_deg : i32, idx : usize) -> PyResult<String> { self.check_cocycle_idx(hom_deg, int_deg, idx)?; Ok(self.inner()?.cocycle_string(hom_deg, int_deg, idx)) } pub fn bidegree_hash(&s
} pub fn number_of_gens_in_bidegree(&self, homological_degree : u32, internal_degree : i32) -> PyResult<usize> { self.check_has_computed_bidegree(homological_degree, internal_degree)?; Ok(self.inner()?.module(homological_degree).number_of_gens_in_degree(internal_degree)) } pub fn prime(&self) -> PyResult<u32> { Ok(*self.inner()?.complex().prime()) } pub fn module(&self, homological_degree : u32) -> PyResult<FreeModule> { Ok(FreeModule::wrap_immutable(self.inner()?.module(homological_degree))) } #[getter] pub fn get_min_degree(&self) -> PyResult<i32> { Ok(self.inner()?.min_degree()) } pub fn differential(&self, s : u32) -> PyResult<FreeModuleHomomorphism> { Ok(FreeModuleHomomorphism::wrap_to_free(self.inner()?.differential(s))) } pub fn chain_map(&self, s : u32) -> PyResult<FreeModuleHomomorphism> { Ok(FreeModuleHomomorphism::wrap_to_other(self.inner()?.chain_map(s))) } } use python_algebra::module::FreeUnstableModule; use python_algebra::algebra::{AdemAlgebra, AlgebraRust}; pub fn test() -> PyResult<()> { let a = Arc::new(AdemAlgebra::new(2, false, true, None)?); let b = a.to_arc()?.clone(); let m = FreeUnstableModule::new(AlgebraRust::into_py_object(b), "i".to_string(), 0)?; Resolution::new(ModuleRust::into_py_object(m.to_arc()?.clone()))?; Ok(()) }
elf, hom_deg : u32, int_deg : i32) -> PyResult<u64> { self.check_has_computed_bidegree(hom_deg, int_deg)?; let self_inner = self.inner()?; let num_gens = self_inner.number_of_gens_in_bidegree(hom_deg, int_deg); let mut hasher = DefaultHasher::new(); hom_deg.hash(&mut hasher); int_deg.hash(&mut hasher); num_gens.hash(&mut hasher); let ds = self_inner.differential(hom_deg); for idx in 0 .. num_gens { ds.output(int_deg, idx).hash(&mut hasher); } Ok(hasher.finish())
function_block-random_span
[ { "content": "pub fn reduce_coefficient(p : u32, c : i32) -> u32 {\n\n let p = p as i32;\n\n (((c % p) + p) % p) as u32\n\n}\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 0, "score": 290762.9992508795 }, { "content": "#[allow(dead_code)]\n\nfn operation_drop(algebra: &AdemAlgebra, deg: i32, idx: usize) -> i32 {\n\n if *algebra.prime() != 2 {\n\n return 1;\n\n }\n\n let elt = algebra.basis_element_from_index(deg, idx);\n\n if elt.ps.is_empty() {\n\n return 0;\n\n }\n\n\n\n let mut first = elt.ps[0];\n\n let mut drop = 1;\n\n while first & 1 == 0 {\n\n first >>= 1;\n\n drop *= 2;\n\n }\n\n deg - drop\n\n}\n\n\n", "file_path": "ext/src/yoneda.rs", "rank": 1, "score": 265852.83067380113 }, { "content": "pub fn bidegree_string(s : u32, t : i32) -> String {\n\n format!(\"(s, t) = ({}, {}) <==> (x, y) = ({}, {})\", s, t, t-s as i32, s)\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! exception {\n\n ($error_type : ident ) => { \n\n pyo3::exceptions::$error_type::py_err(\"\") \n\n };\n\n ($error_type : ident, $msg:expr) => { \n\n pyo3::exceptions::$error_type::py_err($msg)\n\n };\n\n ($error_type : ident, $msg:expr,) => { \n\n pyo3::exceptions::$error_type::py_err($msg)\n\n };\n\n ($error_type : ident, $fmt:expr, $($arg:tt)+) => { \n\n pyo3::exceptions::$error_type::py_err(format!($fmt, $($arg)*))\n\n }; \n\n}\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 2, "score": 265664.7225763765 }, { "content": "pub fn new_valid_prime(p: u32) -> PyResult<ValidPrime> {\n\n let result = ValidPrime::try_new(p)\n\n .ok_or(python_utils::exception!(ValueError, \n\n \"First argument {} is not a valid prime.\", p\n\n ))?;\n\n fp::vector::initialize_limb_bit_index_table(result);\n\n Ok(result)\n\n}\n", "file_path": "python_ext/pyo3/python_fp/src/prime.rs", "rank": 3, "score": 263123.1086299871 }, { "content": "fn rate_adem_operation(algebra: &AdemAlgebra, deg: i32, idx: usize) -> i32 {\n\n if *algebra.prime() != 2 {\n\n return 1;\n\n }\n\n let elt = algebra.basis_element_from_index(deg, idx);\n\n let mut pref = 0;\n\n for i in &elt.ps {\n\n let mut i = *i;\n\n while i != 0 {\n\n pref += (i & 1) as i32;\n\n i >>= 1;\n\n }\n\n }\n\n pref\n\n}\n\n\n", "file_path": "ext/src/yoneda.rs", "rank": 4, "score": 262616.37106027437 }, { "content": "pub fn inadmissible_pairs(p: ValidPrime, generic: bool, degree: i32) -> Vec<(u32, u32, u32)> {\n\n let p = *p;\n\n let degree = degree as u32;\n\n let q = if generic { 2 * p - 2 } else { 1 };\n\n // (i, b, j) means P^i P^j if b = 0, or P^i b P^j if b = 1.\n\n let mut inadmissible_pairs = Vec::new();\n\n\n\n // Since |P^i| is always a multiple of q, we have a relation only if degree = 0 or 1 mod q.\n\n // If it is 0, then there is no Bockstein. Otherwise, there is.\n\n if degree % q == 0 {\n\n let degq = degree / q;\n\n // We want P^i P^j to be inadmissible, so i < p * j. This translates to\n\n // i < p * degq /(p + 1). Since Rust automatically rounds *down*, but we want to round\n\n // up instead, we use i < (p * degq + p)/(p + 1).\n\n for i in 1..(p * degq + p) / (p + 1) {\n\n inadmissible_pairs.push((i, 0, degq - i));\n\n }\n\n } else if degree % q == 1 {\n\n let degq = degree / q; // Since we round down, this is actually (degree - 1)/q\n\n // We want P^i b P^j to be inadmissible, so i < p * j + 1. This translates to\n\n // i < (p * degq + 1)/(p + 1). Since Rust automatically rounds *down*, but we want to round\n\n // up instead, we use i < (p * degq + p + 1)/(p + 1).\n\n for i in 1..(p * degq + p + 1) / (p + 1) {\n\n inadmissible_pairs.push((i, 1, degq - i));\n\n }\n\n }\n\n inadmissible_pairs\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/algebra/combinatorics.rs", "rank": 5, "score": 261563.44647780515 }, { "content": "/// Prints an element in the bidegree `(n, s)` to stdout. For example, `[0, 2, 1]` will be printed\n\n/// as `2 x_(n, s, 1) + x_(f, s, 2)`.\n\npub fn print_element(v: fp::vector::Slice, n: i32, s: u32) {\n\n let mut first = true;\n\n for (i, v) in v.iter_nonzero() {\n\n if !first {\n\n print!(\" + \");\n\n }\n\n if v != 1 {\n\n print!(\"{} \", v);\n\n }\n\n print!(\"x_({}, {}, {})\", n, s, i);\n\n first = false;\n\n }\n\n}\n\n\n\n/// Given a function f(s, t), compute it for every `s` in `[min_s, max_s]` and every `t` in\n\n/// `[min_t, max_t(s)]`. Further, we only compute `f(s, t)` when `f(s - 1, t')` has been computed\n\n/// for all `t' < t`.\n\n///\n\n/// The function `f` should return a range starting from t and ending at the largest `T` such that\n\n/// `f(s, t')` has already been computed for every `t' < T`.\n\n///\n\n/// While `iter_s_t` could have had kept track of that data, it is usually the case that `f` would\n\n/// compute something and write it to a `OnceBiVec`, and\n\n/// [`OnceBiVec::push_ooo`](once::OnceBiVec::push_ooo) would return this range for us.\n\n///\n\n/// This uses [`rayon`] under the hood, and `f` should feel free to use further rayon parallelism.\n\n///\n\n/// # Arguments:\n\n/// - `max_s`: This is exclusive\n\n/// - `max_t`: This is exclusive\n", "file_path": "ext/src/utils.rs", "rank": 6, "score": 256885.291078852 }, { "content": "/// Read the Diff.$N files in `data_dir` and produce the corresponding chain complex object.\n\nfn read_bruner_resolution(data_dir: PathBuf, max_n: i32) -> Result<(u32, FiniteChainComplex)> {\n\n let num_s: usize = data_dir.read_dir()?.count();\n\n\n\n let cc = create_chain_complex(num_s);\n\n let algebra = cc.algebra();\n\n let algebra = algebra.milnor_algebra();\n\n\n\n let mut buf = String::new();\n\n let s = num_s as u32 - 1;\n\n\n\n algebra.compute_basis(max_n + s as i32 + 1);\n\n // Handle s = 0\n\n {\n\n // TODO: actually parse file\n\n let m = cc.module(0);\n\n m.add_generators(0, 1, None);\n\n m.extend_by_zero(max_n + 1);\n\n }\n\n\n\n for s in 1..num_s as u32 {\n", "file_path": "ext/examples/bruner.rs", "rank": 7, "score": 246746.78894967888 }, { "content": "pub fn vector(prompt: &str, len: usize) -> Vec<u32> {\n\n raw(prompt, |s| {\n\n let v = s[1..s.len() - 1]\n\n .split(',')\n\n .map(|x| x.trim().parse::<u32>().map_err(|e| e.to_string()))\n\n .collect::<Result<Vec<_>, String>>()?;\n\n if v.len() != len {\n\n return Err(format!(\n\n \"Target has dimension {} but {} coordinates supplied\",\n\n len,\n\n v.len()\n\n ));\n\n }\n\n Ok(v)\n\n })\n\n}\n", "file_path": "ext/crates/query/src/lib.rs", "rank": 8, "score": 240993.07483231078 }, { "content": "/// Whether picking δ₂ = 0 gives a valid secondary refinement. This requires\n\n/// 1. The chain complex is concentrated in degree zero;\n\n/// 2. The module is finite dimensional; and\n\n/// 3. $\\mathrm{Hom}(\\mathrm{Ext}^{2, t}_A(H^*X, k), H^{t - 1} X) = 0$ for all $t$ or $\\mathrm{Hom}(\\mathrm{Ext}^{3, t}_A(H^*X, k), H^{t - 1} X) = 0$ for all $t$.\n\npub fn can_compute(res: &Resolution<CCC>) -> bool {\n\n let complex = res.target();\n\n if *complex.prime() != 2 {\n\n eprintln!(\"Prime is not 2\");\n\n return false;\n\n }\n\n if complex.max_s() != 1 {\n\n eprintln!(\"Complex is not concentrated in degree 0.\");\n\n return false;\n\n }\n\n let module = complex.module(0);\n\n let module = module.as_fd_module();\n\n if module.is_none() {\n\n eprintln!(\"Module is not finite dimensional\");\n\n return false;\n\n }\n\n let module = module.unwrap();\n\n let max_degree = module.max_degree();\n\n\n\n (0..max_degree)\n\n .all(|t| module.dimension(t) == 0 || res.number_of_gens_in_bidegree(2, t + 1) == 0)\n\n || (0..max_degree)\n\n .all(|t| module.dimension(t) == 0 || res.number_of_gens_in_bidegree(3, t + 1) == 0)\n\n}\n", "file_path": "ext/src/secondary.rs", "rank": 9, "score": 239835.0065191746 }, { "content": "#[allow(clippy::single_match)]\n\nfn rate_operation(algebra: &Arc<SteenrodAlgebra>, op_deg: i32, op_idx: usize) -> i32 {\n\n let mut pref = 0;\n\n match &**algebra {\n\n SteenrodAlgebra::AdemAlgebra(a) => pref += rate_adem_operation(a, op_deg, op_idx),\n\n _ => (),\n\n };\n\n pref\n\n}\n\n\n", "file_path": "ext/src/yoneda.rs", "rank": 11, "score": 234848.66284654732 }, { "content": "pub fn print_resolution_color<C: FreeChainComplex, S: std::hash::BuildHasher>(\n\n res: &C,\n\n max_s: u32,\n\n highlight: &std::collections::HashMap<(u32, i32), u32, S>,\n\n) {\n\n let stderr = std::io::stderr();\n\n let mut stderr = stderr.lock();\n\n for s in (0..max_s).rev() {\n\n for t in s as i32..=res.module(s).max_computed_degree() {\n\n if matches!(highlight.get(&(s, t)), None | Some(0)) {\n\n write!(\n\n stderr,\n\n \"{}{}{} \",\n\n RED_ANSI_CODE,\n\n ascii_num(res.module(s).number_of_gens_in_degree(t)),\n\n WHITE_ANSI_CODE\n\n )\n\n .unwrap();\n\n } else {\n\n write!(\n", "file_path": "ext/src/utils.rs", "rank": 12, "score": 231089.24378533254 }, { "content": "// Uses a the lookup table we initialized.\n\npub fn inverse(p: ValidPrime, k: u32) -> u32 {\n\n assert!(k > 0 && k < *p);\n\n // LLVM doesn't understand the inequality is transitive\n\n unsafe { *INVERSE_TABLE[PRIME_TO_INDEX_MAP[*p as usize]].get_unchecked(k as usize) }\n\n}\n\n\n\npub const fn minus_one_to_the_n(p: u32, i: i32) -> u32 {\n\n if i % 2 == 0 {\n\n 1\n\n } else {\n\n p - 1\n\n }\n\n}\n\n\n\n/// This uses a lookup table for n choose k when n and k are both less than p.\n\n/// Lucas's theorem reduces general binomial coefficients to this case.\n\n///\n\n/// Calling this function safely requires that `k, n < p`. These invariants are often known\n\n/// apriori because k and n are obtained by reducing mod p, so it is better to expose an unsafe\n\n/// interface that avoids these checks.\n", "file_path": "ext/crates/fp/src/prime.rs", "rank": 13, "score": 229198.2057547288 }, { "content": "pub fn bitmask_u32_from_vec(vec : &Vec<u32>) -> u32 {\n\n let mut result = 0;\n\n // TODO: make sure we get ordering correct here\n\n for (idx, b) in vec.iter().enumerate() {\n\n result |= if *b != 0 { 1 << idx } else { 0 };\n\n }\n\n result\n\n}\n\n\n", "file_path": "python_ext/pyo3/python_algebra/src/algebra/algebra_bindings.rs", "rank": 14, "score": 227674.07847043377 }, { "content": "pub fn bitmask_u32_from_py_object(obj : PyObject, argument_name : &str) -> PyResult<u32> {\n\n let gil = Python::acquire_gil();\n\n let py = gil.python();\n\n obj.extract::<u32>(py).or_else(|_err| {\n\n Ok(bitmask_u32_from_vec(&obj.extract(py)?))\n\n }).map_err(|_err : PyErr| {\n\n python_utils::exception!(TypeError,\n\n \"Argument \\\"{}\\\" expected to be either a single integer or a list of integers.\",\n\n argument_name\n\n )\n\n })\n\n}\n\n\n\n\n\n#[pymethods]\n\nimpl PVector {\n\n #[new]\n\n fn new(l : PyObject) -> PyResult<Self> {\n\n let gil = Python::acquire_gil();\n\n let py = gil.python();\n", "file_path": "python_ext/pyo3/python_algebra/src/algebra/algebra_bindings.rs", "rank": 15, "score": 224062.53374787763 }, { "content": "pub fn bitmask_u32_to_vec(mut bitmask : u32) -> Vec<u32> {\n\n let mut result = Vec::new();\n\n // TODO: make sure we get ordering correct here\n\n while bitmask != 0 {\n\n result.push(bitmask & 1);\n\n bitmask >>= 1;\n\n }\n\n result\n\n}\n\n\n\n\n", "file_path": "python_ext/pyo3/python_algebra/src/algebra/algebra_bindings.rs", "rank": 16, "score": 219833.79187109403 }, { "content": "// Compute the coefficient of the operation on x^j.\n\nfn coef_adem(algebra: &AdemAlgebra, op_deg: i32, op_idx: usize, mut j: i32) -> bool {\n\n let elt: &AdemBasisElement = algebra.basis_element_from_index(op_deg, op_idx);\n\n // Apply Sq^i to x^j and see if it is zero\n\n for i in elt.ps.iter().rev() {\n\n let c = if j >= 0 {\n\n i32::binomial(ValidPrime::new(2), j, *i as i32)\n\n } else {\n\n i32::binomial(ValidPrime::new(2), -j + (*i as i32) - 1, *i as i32)\n\n };\n\n if c == 0 {\n\n return false;\n\n }\n\n // Somehow j += 1 produces the same answer...\n\n j += *i as i32;\n\n }\n\n true\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/module/rpn.rs", "rank": 17, "score": 218611.61916361368 }, { "content": "pub fn adem_relation_coefficient(p: ValidPrime, x: u32, y: u32, j: u32, e1: u32, e2: u32) -> u32 {\n\n let pi32 = *p as i32;\n\n let x = x as i32;\n\n let y = y as i32;\n\n let j = j as i32;\n\n let e1 = e1 as i32;\n\n let e2 = e2 as i32;\n\n let mut c = i32::binomial(p, (y - j) * (pi32 - 1) + e1 - 1, x - pi32 * j - e2) as u32;\n\n if c == 0 {\n\n return 0;\n\n }\n\n c *= minus_one_to_the_n(*p, (x + j) + e2);\n\n c % *p\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/algebra/combinatorics.rs", "rank": 18, "score": 218097.6525980314 }, { "content": "fn coef_milnor(algebra: &MilnorAlgebra, op_deg: i32, op_idx: usize, mut mod_degree: i32) -> bool {\n\n if mod_degree == 0 {\n\n return false;\n\n }\n\n\n\n let elt: &MilnorBasisElement = algebra.basis_element_from_index(op_deg, op_idx);\n\n\n\n let sum: PPartEntry = elt.p_part.iter().sum();\n\n if mod_degree < 0 {\n\n mod_degree = sum as i32 - mod_degree - 1;\n\n } else if mod_degree < sum as i32 {\n\n return false;\n\n }\n\n\n\n let mod_degree = mod_degree as PPartEntry;\n\n\n\n let mut list = Vec::with_capacity(elt.p_part.len() + 1);\n\n list.push(mod_degree - sum);\n\n list.extend_from_slice(&elt.p_part);\n\n\n", "file_path": "ext/crates/algebra/src/module/rpn.rs", "rank": 19, "score": 213504.12267856582 }, { "content": "pub fn handle_index(dimension : usize, index : isize, dim_or_len : &str, type_to_index : &str) -> PyResult<usize> {\n\n let result = \n\n if index < 0 {\n\n dimension as isize + index\n\n } else {\n\n index\n\n };\n\n check_index(dimension, result, dim_or_len, type_to_index)?;\n\n Ok(result as usize)\n\n}\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 20, "score": 213210.90817220288 }, { "content": "pub fn tau_degrees(p: ValidPrime) -> &'static [i32] {\n\n &TAU_DEGREES[PRIME_TO_INDEX_MAP[*p as usize]]\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/algebra/combinatorics.rs", "rank": 21, "score": 204478.89730574965 }, { "content": "pub fn xi_degrees(p: ValidPrime) -> &'static [i32] {\n\n &XI_DEGREES[PRIME_TO_INDEX_MAP[*p as usize]]\n\n}\n\n\n\npub struct TruncatedPolynomialMonomialBasis {\n\n p: ValidPrime,\n\n /// degree => (first_index, number_of_gens)\n\n pub gens: OnceVec<(usize, usize)>,\n\n /// index ==> degree\n\n pub gen_degrees: OnceVec<i32>,\n\n /// degree => max_part => list of partitions with maximum part max_part\n\n parts_by_max: OnceVec<Vec<Vec<FpVector>>>,\n\n pub parts: OnceVec<Vec<FpVector>>,\n\n}\n\n\n\nimpl TruncatedPolynomialMonomialBasis {\n\n pub fn new(p: ValidPrime) -> Self {\n\n let gens = OnceVec::new();\n\n gens.push((0, 0));\n\n let parts_by_max = OnceVec::new();\n", "file_path": "ext/crates/algebra/src/algebra/combinatorics.rs", "rank": 22, "score": 204478.89730574965 }, { "content": "pub fn check_index(dimension : usize, index : isize, dim_or_len : &str, type_to_index : &str) -> PyResult<()> {\n\n if index >= dimension as isize {\n\n Err(exception!(IndexError,\n\n \"Index {} is greater than or equal to {} {} of {}.\", index, dim_or_len, dimension, type_to_index\n\n ))\n\n } else if index < 0 {\n\n Err(exception!(IndexError,\n\n \"Index {} is greater than {} {} of {}.\", index - dimension as isize, dim_or_len, dimension, type_to_index\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\nuse std::sync::{Arc, Weak};\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 23, "score": 204138.93288547717 }, { "content": "pub fn vecu32_from_py_object(obj : PyObject, argument_name : &str) -> PyResult<Vec<u32>> {\n\n let gil = Python::acquire_gil();\n\n let py = gil.python();\n\n obj.extract(py).or_else(|_err| {\n\n let result : PVector = obj.extract(py)?;\n\n Ok(result.inner()?.clone())\n\n }).map_err(|_err : PyErr| {\n\n python_utils::exception!(ValueError,\n\n \"Argument \\\"{}\\\" expected to be either a list of integers or a PVector.\",\n\n argument_name\n\n )\n\n })\n\n}\n\n\n", "file_path": "python_ext/pyo3/python_algebra/src/algebra/algebra_bindings.rs", "rank": 24, "score": 203297.32072961476 }, { "content": "pub fn ascii_num(n: usize) -> char {\n\n match n {\n\n 0 => ' ',\n\n 1 => '·',\n\n 2 => ':',\n\n 3 => '∴',\n\n 4 => '⁘',\n\n 5 => '⁙',\n\n 6 => '⠿',\n\n 7 => '⡿',\n\n 8 => '⣿',\n\n 9 => '9',\n\n _ => '*',\n\n }\n\n}\n\n\n", "file_path": "ext/src/utils.rs", "rank": 25, "score": 201091.31501983837 }, { "content": "fn first_n_primes(n: usize) -> Vec<u32> {\n\n let mut acc = vec![];\n\n let mut i = 2;\n\n while acc.len() < n {\n\n if is_prime(i) {\n\n acc.push(i);\n\n }\n\n i += 1;\n\n }\n\n acc\n\n}\n\n\n", "file_path": "ext/crates/fp/build.rs", "rank": 26, "score": 198217.6089376591 }, { "content": "fn vec_from_pyobj(p : u32, l : PyObject) -> PyResult<Vec<u32>> {\n\n let gil = Python::acquire_gil();\n\n let py = gil.python();\n\n let vec : Vec<i32> = l.extract(py)?;\n\n let mut result : Vec<u32> = Vec::with_capacity(vec.len());\n\n for i in 0..vec.len() {\n\n result.push(python_utils::reduce_coefficient(p, vec[i]));\n\n }\n\n Ok(result)\n\n}\n\n\n\n#[pymethods]\n\nimpl FpVector {\n\n #[staticmethod]\n\n pub fn from_list(p : u32, l : PyObject) -> PyResult<Self> {\n\n let vec = vec_from_pyobj(p, l)?;\n\n Ok(FpVector::box_and_wrap(FpVectorRust::from_vec(new_valid_prime(p)?, &vec)))\n\n }\n\n\n\n \n", "file_path": "python_ext/pyo3/python_fp/src/vector.rs", "rank": 27, "score": 186890.50027964538 }, { "content": "pub fn get_profile_from_kwargs(p : u32, kwargs : Option<&PyDict>) -> PyResult<MilnorProfileRust> {\n\n let truncated = get_from_kwargs(kwargs, \"truncated\", false)?;\n\n let mut q_part = !0;\n\n let p_part : Vec<u32>;\n\n if p == 2 {\n\n p_part = get_from_kwargs(kwargs, \"profile\", vec![])?;\n\n } else if let Some(x) = \n\n kwargs.and_then(|dict| dict.get_item(\"profile\"))\n\n .map(|value| PyAny::extract::<Vec<Vec<u32>>>(value)) \n\n {\n\n let profile = x?;\n\n if profile.len() != 2 {\n\n return Err(python_utils::exception!(ValueError,\n\n \"For generic MilnorAlgebra profile argument should be a pair of lists [p_part, q_part].\"\n\n ));\n\n }\n\n p_part = profile[0].clone();\n\n q_part = algebra_bindings::bitmask_u32_from_vec(&profile[1]);\n\n } else {\n\n p_part = vec![];\n", "file_path": "python_ext/pyo3/python_algebra/src/algebra/milnor_algebra.rs", "rank": 28, "score": 186046.35620021174 }, { "content": "#[cfg(feature = \"concurrent\")]\n\npub fn iter_s_t(\n\n f: &(impl Fn(u32, i32) -> std::ops::Range<i32> + Sync),\n\n min_s: u32,\n\n min_t: i32,\n\n max_s: u32,\n\n max_t: &(impl Fn(u32) -> i32 + Sync),\n\n) {\n\n use rayon::prelude::*;\n\n\n\n rayon::scope(|scope| {\n\n // Rust does not support recursive closures, so we have to pass everything along as\n\n // arguments.\n\n fn run<'a>(\n\n scope: &rayon::Scope<'a>,\n\n f: &'a (impl Fn(u32, i32) -> std::ops::Range<i32> + Sync + 'a),\n\n max_s: u32,\n\n max_t: &'a (impl Fn(u32) -> i32 + Sync + 'a),\n\n s: u32,\n\n t: i32,\n\n ) {\n", "file_path": "ext/src/utils.rs", "rank": 29, "score": 176742.01775557082 }, { "content": "pub fn rename_submodule(module : &PyModule, name : &str, new_name : &str) -> PyResult<()> {\n\n let submodule = module.get(name)?;\n\n submodule.setattr(\"__name__\", new_name)?;\n\n module.add(new_name, submodule)?;\n\n module.delattr(name)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 30, "score": 174588.6941523265 }, { "content": "pub fn query_module_only(\n\n prompt: &str,\n\n algebra: Option<AlgebraType>,\n\n) -> anyhow::Result<Resolution<CCC>> {\n\n let module: Config = query::with_default(prompt, \"S_2\", |s| match algebra {\n\n Some(algebra) => (s, algebra).try_into(),\n\n None => s.try_into(),\n\n });\n\n\n\n let save_dir = query::optional(&format!(\"{prompt} save directory\"), |x| {\n\n core::result::Result::<PathBuf, std::convert::Infallible>::Ok(PathBuf::from(x))\n\n });\n\n\n\n construct(module, save_dir).context(\"Failed to load module from save file\")\n\n}\n\n\n\npub enum LoadQuasiInverseOption {\n\n /// Always load quasi inverses\n\n Yes,\n\n /// Load quasi inverses if there is no save file (so that `apply_quasi_inverse` always works)\n", "file_path": "ext/src/utils.rs", "rank": 31, "score": 174427.11202115918 }, { "content": "pub fn query_module(\n\n algebra: Option<AlgebraType>,\n\n load_quasi_inverse: impl Into<LoadQuasiInverseOption>,\n\n) -> anyhow::Result<Resolution<CCC>> {\n\n let mut resolution = query_module_only(\"Module\", algebra)?;\n\n resolution.load_quasi_inverse = match load_quasi_inverse.into() {\n\n LoadQuasiInverseOption::Yes => true,\n\n LoadQuasiInverseOption::No => false,\n\n LoadQuasiInverseOption::IfNoSave => resolution.save_dir().is_none(),\n\n };\n\n\n\n let max_n: i32 = query::with_default(\"Max n\", \"30\", str::parse);\n\n let max_s: u32 = query::with_default(\"Max s\", \"7\", str::parse);\n\n\n\n resolution.compute_through_stem(max_s, max_n);\n\n\n\n Ok(resolution)\n\n}\n\n\n", "file_path": "ext/src/utils.rs", "rank": 32, "score": 174427.11202115918 }, { "content": "pub fn adem_q(\n\n adem_algebra: &AdemAlgebra,\n\n milnor_algebra: &MilnorAlgebra,\n\n result: &mut FpVector,\n\n coeff: u32,\n\n qi: u32,\n\n) {\n\n let p = adem_algebra.prime();\n\n let degree = crate::algebra::combinatorics::tau_degrees(p)[qi as usize];\n\n let mbe = if adem_algebra.generic {\n\n MilnorBasisElement {\n\n degree,\n\n q_part: 1 << qi,\n\n p_part: vec![],\n\n }\n\n } else {\n\n let mut p_part = vec![0; qi as usize + 1];\n\n p_part[qi as usize] = 1;\n\n MilnorBasisElement {\n\n degree,\n\n q_part: 0,\n\n p_part,\n\n }\n\n };\n\n let idx = milnor_algebra.basis_element_to_index(&mbe);\n\n milnor_to_adem_on_basis(adem_algebra, milnor_algebra, result, coeff, degree, idx);\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/change_of_basis.rs", "rank": 33, "score": 170080.8777076493 }, { "content": "pub fn interactive_module_define_fdmodule(\n\n output_json: &mut Value,\n\n p: ValidPrime,\n\n generic: bool,\n\n name: String,\n\n) -> anyhow::Result<()> {\n\n let algebra = Arc::new(SteenrodAlgebra::AdemAlgebra(AdemAlgebra::new(\n\n p, generic, false, false,\n\n )));\n\n\n\n let gens = get_gens()?;\n\n let min_degree = gens.min_degree();\n\n let max_degree = gens.len();\n\n\n\n algebra.compute_basis(max_degree - min_degree);\n\n\n\n let mut graded_dim = BiVec::with_capacity(min_degree, max_degree);\n\n for i in gens.iter().map(Vec::len) {\n\n graded_dim.push(i);\n\n }\n", "file_path": "ext/examples/define_module.rs", "rank": 34, "score": 168038.20136344622 }, { "content": "#[wasm_bindgen]\n\npub fn rust_main() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details see\n\n // https://github.com/rustwasm/console_error_panic_hook#readme\n\n // #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n}\n", "file_path": "chart/chart/display_backend/src/lib.rs", "rank": 35, "score": 168038.20136344622 }, { "content": "pub fn adem_plist(\n\n adem_algebra: &AdemAlgebra,\n\n milnor_algebra: &MilnorAlgebra,\n\n result: &mut FpVector,\n\n coeff: u32,\n\n degree: i32,\n\n p_part: PPart,\n\n) {\n\n let mbe = MilnorBasisElement {\n\n degree,\n\n p_part,\n\n q_part: 0,\n\n };\n\n let idx = milnor_algebra.basis_element_to_index(&mbe);\n\n milnor_to_adem_on_basis(adem_algebra, milnor_algebra, result, coeff, degree, idx);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "ext/crates/algebra/src/change_of_basis.rs", "rank": 36, "score": 168038.20136344622 }, { "content": "pub fn adem_to_milnor(\n\n adem_algebra: &AdemAlgebra,\n\n milnor_algebra: &MilnorAlgebra,\n\n result: &mut FpVector,\n\n coeff: u32,\n\n degree: i32,\n\n input: &FpVector,\n\n) {\n\n let p = milnor_algebra.prime();\n\n for (i, v) in input.iter().enumerate() {\n\n if v == 0 {\n\n continue;\n\n }\n\n adem_to_milnor_on_basis(\n\n adem_algebra,\n\n milnor_algebra,\n\n result,\n\n (coeff * v) % *p,\n\n degree,\n\n i,\n\n );\n\n }\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/change_of_basis.rs", "rank": 37, "score": 168038.20136344622 }, { "content": "pub fn milnor_to_adem(\n\n adem_algebra: &AdemAlgebra,\n\n milnor_algebra: &MilnorAlgebra,\n\n result: &mut FpVector,\n\n coeff: u32,\n\n degree: i32,\n\n input: &FpVector,\n\n) {\n\n let p = milnor_algebra.prime();\n\n for (i, v) in input.iter().enumerate() {\n\n if v == 0 {\n\n continue;\n\n }\n\n milnor_to_adem_on_basis(\n\n adem_algebra,\n\n milnor_algebra,\n\n result,\n\n (coeff * v) % *p,\n\n degree,\n\n i,\n\n );\n\n }\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/change_of_basis.rs", "rank": 38, "score": 168038.20136344622 }, { "content": "pub fn interactive_module_define_fpmodule(\n\n output_json: &mut Value,\n\n p: ValidPrime,\n\n generic: bool,\n\n name: String,\n\n) -> anyhow::Result<()> {\n\n output_json[\"type\"] = Value::from(\"finitely presented module\");\n\n\n\n let gens = get_gens()?;\n\n let min_degree = gens.min_degree();\n\n let max_degree = 20;\n\n\n\n let steenrod_algebra = Arc::new(SteenrodAlgebra::AdemAlgebra(AdemAlgebra::new(\n\n p, generic, false, false,\n\n )));\n\n let adem_algebra = AdemAlgebra::new(p, generic, false, false);\n\n let milnor_algebra = MilnorAlgebra::new(p);\n\n\n\n steenrod_algebra.compute_basis(max_degree - min_degree);\n\n adem_algebra.compute_basis(max_degree - min_degree);\n", "file_path": "ext/examples/define_module.rs", "rank": 39, "score": 168038.20136344622 }, { "content": "fn compute_kernel_image<M: BoundedModule, F: ModuleHomomorphism, G: ModuleHomomorphism>(\n\n source: &QM<M>,\n\n augmentation_map: Option<Arc<F>>,\n\n preserve_map: Option<&G>,\n\n keep: Option<&Subspace>,\n\n t: i32,\n\n) -> (Matrix, Matrix)\n\nwhere\n\n M::Algebra: GeneratedAlgebra,\n\n{\n\n let algebra = source.algebra();\n\n let p = algebra.prime();\n\n\n\n let mut generators: Vec<(i32, usize)> = Vec::new();\n\n let mut target_degrees = Vec::new();\n\n let mut padded_target_degrees: Vec<usize> = Vec::new();\n\n\n\n let source_orig_dimension = source.module.dimension(t);\n\n let source_dimension = source.dimension(t);\n\n\n", "file_path": "ext/src/yoneda.rs", "rank": 40, "score": 167493.0372385626 }, { "content": "// This is currently pretty inefficient... We should memoize results so that we don't repeatedly\n\n// recompute the same inverse.\n\npub fn milnor_to_adem_on_basis(\n\n adem_algebra: &AdemAlgebra,\n\n milnor_algebra: &MilnorAlgebra,\n\n result: &mut FpVector,\n\n coeff: u32,\n\n degree: i32,\n\n idx: usize,\n\n) {\n\n if milnor_algebra.generic() {\n\n milnor_to_adem_on_basis_generic(adem_algebra, milnor_algebra, result, coeff, degree, idx);\n\n } else {\n\n milnor_to_adem_on_basis_2(adem_algebra, milnor_algebra, result, coeff, degree, idx);\n\n }\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/change_of_basis.rs", "rank": 41, "score": 166075.66236813657 }, { "content": "// Outputs in the Adem basis.\n\npub fn evaluate_algebra_adem(\n\n adem_algebra: &AdemAlgebra,\n\n milnor_algebra: &MilnorAlgebra,\n\n input: &str,\n\n) -> anyhow::Result<(i32, FpVector)> {\n\n evaluate_algebra_tree(adem_algebra, milnor_algebra, parse_algebra(input)?)\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/steenrod_evaluator.rs", "rank": 42, "score": 166075.66236813657 }, { "content": "// Outputs in the Milnor basis\n\npub fn evaluate_algebra_milnor(\n\n adem_algebra: &AdemAlgebra,\n\n milnor_algebra: &MilnorAlgebra,\n\n input: &str,\n\n) -> anyhow::Result<(i32, FpVector)> {\n\n let adem_result = evaluate_algebra_adem(adem_algebra, milnor_algebra, input);\n\n if let Ok((degree, adem_vector)) = adem_result {\n\n let mut milnor_vector = FpVector::new(adem_vector.prime(), adem_vector.len());\n\n change_of_basis::adem_to_milnor(\n\n adem_algebra,\n\n milnor_algebra,\n\n &mut milnor_vector,\n\n 1,\n\n degree,\n\n &adem_vector,\n\n );\n\n Ok((degree, milnor_vector))\n\n } else {\n\n adem_result\n\n }\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/steenrod_evaluator.rs", "rank": 43, "score": 166075.66236813657 }, { "content": "pub fn adem_to_milnor_on_basis(\n\n adem_algebra: &AdemAlgebra,\n\n milnor_algebra: &MilnorAlgebra,\n\n result: &mut FpVector,\n\n coeff: u32,\n\n degree: i32,\n\n idx: usize,\n\n) {\n\n let elt = adem_algebra.basis_element_from_index(degree, idx);\n\n let p = milnor_algebra.prime();\n\n let q = milnor_algebra.q() as u32;\n\n let dim = milnor_algebra.dimension(elt.degree);\n\n if dim == 1 {\n\n result.set_entry(0, coeff);\n\n return;\n\n }\n\n let mut bocksteins = elt.bocksteins;\n\n let mbe = MilnorBasisElement {\n\n degree: (q * elt.ps[0] + (bocksteins & 1)) as i32,\n\n q_part: bocksteins & 1,\n", "file_path": "ext/crates/algebra/src/change_of_basis.rs", "rank": 44, "score": 166075.66236813657 }, { "content": "pub fn get_expression_to_vector<F>(\n\n prompt: &str,\n\n output_vec: &mut FpVector,\n\n string_to_basis_element: F,\n\n) where\n\n F: for<'a> Fn(&'a str) -> Option<usize>,\n\n{\n\n 'outer: loop {\n\n let result: String = query::raw(prompt, str::parse);\n\n if result == \"0\" {\n\n output_vec.set_to_zero();\n\n break;\n\n }\n\n for term in result.split('+') {\n\n let term = term.trim();\n\n let parts: Vec<&str> = term.splitn(2, ' ').collect();\n\n if parts.len() == 1 {\n\n match string_to_basis_element(parts[0]) {\n\n Some(i) => output_vec.add_basis_element(i, 1),\n\n None => {\n", "file_path": "ext/examples/define_module.rs", "rank": 45, "score": 159619.88116845128 }, { "content": "fn is_prime(i: u32) -> bool {\n\n (2..i).all(|k| i % k != 0)\n\n}\n", "file_path": "ext/crates/fp/build.rs", "rank": 46, "score": 157436.23081797772 }, { "content": "/// This constructs a resolution resolving a module according to the specifications\n\n///\n\n/// # Arguments\n\n/// - `module_spec`: A specification for the module. This is any object that implements\n\n/// [`TryInto<Config>`] (with appropriate error bounds). In practice, we can supply\n\n/// - A [`Config`] object itself\n\n/// - `(json, algebra)`: The first argument is a [`serde_json::Value`] that specifies the\n\n/// module; the second argument is either a string (`\"milnor\"` or `\"adem\"`) or an\n\n/// [`algebra::AlgebraType`] object.\n\n/// - `(module_name, algebra)`: The first argument is the name of the module and the second is\n\n/// as above. Modules are searched in the current directory, `$CWD/steenrod_modules` and\n\n/// `ext/steenrod_modules`. The modules can be shifted by appending e.g. `S_2[2]`.\n\n/// - `module_spec`, a single `&str` of the form `module_name@algebra`, where `module_name` and\n\n/// `algebra` are as above.\n\n/// - `save_file`: The save file for the module. If it points to an invalid save file, an error is\n\n/// returned.\n\npub fn construct<T, E>(module_spec: T, save_dir: Option<PathBuf>) -> anyhow::Result<Resolution<CCC>>\n\nwhere\n\n anyhow::Error: From<E>,\n\n T: TryInto<Config, Error = E>,\n\n{\n\n let Config {\n\n module: json,\n\n algebra,\n\n } = module_spec.try_into()?;\n\n\n\n let algebra = Arc::new(SteenrodAlgebra::from_json(&json, algebra)?);\n\n let module = Arc::new(FiniteModule::from_json(Arc::clone(&algebra), &json)?);\n\n let mut chain_complex = Arc::new(FiniteChainComplex::ccdz(Arc::clone(&module)));\n\n\n\n let cofiber = &json[\"cofiber\"];\n\n if !cofiber.is_null() {\n\n use crate::chain_complex::ChainMap;\n\n use crate::yoneda::yoneda_representative;\n\n use algebra::module::homomorphism::FreeModuleHomomorphism;\n\n use algebra::module::BoundedModule;\n", "file_path": "ext/src/utils.rs", "rank": 47, "score": 151688.03543109342 }, { "content": " def run(): \n\n self.A.compute_basis( x + y + 1)\n\n self.target_max_degree = n\n\n self.rust_res.extend_through_degree( x + y + 2)\n\n t0 = time.time()\n\n for x in range(n):\n\n for y in range(n):\n\n self.step_if_needed(*xy_to_st(x,y))\n\n t1 = time.time()\n\n time_elapsed = t1 - t0\n\n print(f\"Time taken to resolve {self.name} from stem {self.max_degree + 1} to stem {self.target_max_degree}:\", time_elapsed)\n", "file_path": "python_ext/ext/resolution.py", "rank": 48, "score": 150159.64649101542 }, { "content": "/// Create a new FiniteChainComplex with `num_s` many non-zero modules.\n\nfn create_chain_complex(num_s: usize) -> FiniteChainComplex {\n\n let algebra: Arc<SteenrodAlgebra> = Arc::new(MilnorAlgebra::new(TWO).into());\n\n\n\n let zero_module = Arc::new(FreeModule::new(Arc::clone(&algebra), String::from(\"0\"), 0));\n\n\n\n let mut modules: Vec<Arc<FreeModule>> = Vec::with_capacity(num_s);\n\n let mut differentials: Vec<Arc<FreeModuleHomomorphism>> = Vec::with_capacity(num_s);\n\n for _ in 0..num_s {\n\n modules.push(Arc::new(FreeModule::new(\n\n Arc::clone(&algebra),\n\n String::new(),\n\n 0,\n\n )));\n\n }\n\n differentials.push(Arc::new(FreeModuleHomomorphism::new(\n\n Arc::clone(&modules[0]),\n\n Arc::clone(&zero_module),\n\n 0,\n\n )));\n\n for s in 1..num_s {\n", "file_path": "ext/examples/bruner.rs", "rank": 49, "score": 149436.48183610543 }, { "content": "pub fn evaluate_module<M: Module>(\n\n adem_algebra: &AdemAlgebra,\n\n milnor_algebra: &MilnorAlgebra,\n\n module: &M,\n\n basis_elt_lookup: &HashMap<String, (i32, usize)>,\n\n input: &str,\n\n) -> anyhow::Result<(i32, FpVector)> {\n\n evaluate_module_tree(\n\n adem_algebra,\n\n milnor_algebra,\n\n module,\n\n basis_elt_lookup,\n\n parse_module(input)?,\n\n )\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/steenrod_evaluator.rs", "rank": 50, "score": 149206.99952820002 }, { "content": "type FiniteChainComplex = FCC<FreeModule, FreeModuleHomomorphism>;\n\n\n\nconst TWO: ValidPrime = ValidPrime::new(2);\n\n\n", "file_path": "ext/examples/bruner.rs", "rank": 51, "score": 148136.1262999518 }, { "content": "fn main() {\n\n let resolution = construct(\"S_2@milnor\", Some(\"/tmp/.ext_bench/S_2_milnor\".into())).unwrap();\n\n resolution.algebra().compute_basis(100);\n\n let start = std::time::Instant::now();\n\n resolution.compute_through_bidegree(50, 100);\n\n println!(\"Time: {:?}\", start.elapsed());\n\n}\n", "file_path": "ext/benches/load_resolution.rs", "rank": 52, "score": 148060.6847928397 }, { "content": "pub fn raw<S, E: Display>(\n\n prompt: &str,\n\n mut parser: impl for<'a> FnMut(&'a str) -> Result<S, E>,\n\n) -> S {\n\n let cli: Option<(String, Result<S, E>)> = ARGV.with(|argv| {\n\n let arg = argv.borrow_mut().next()?;\n\n let result = parser(&arg);\n\n Some((arg, result))\n\n });\n\n\n\n match cli {\n\n Some((arg, Ok(res))) => {\n\n eprintln!(\"{}: {}\", prompt, arg);\n\n return res;\n\n }\n\n Some((arg, Err(e))) => {\n\n eprintln!(\"{}: {}\", prompt, arg);\n\n eprintln!(\"{:#}\", e);\n\n std::process::exit(1);\n\n }\n", "file_path": "ext/crates/query/src/lib.rs", "rank": 53, "score": 145833.429356336 }, { "content": "pub fn yoneda_representative_element<TCM, TC, CC>(\n\n cc: Arc<CC>,\n\n s: u32,\n\n t: i32,\n\n idx: usize,\n\n) -> Yoneda<CC>\n\nwhere\n\n TCM: BoundedModule<Algebra = SteenrodAlgebra>,\n\n TC: BoundedChainComplex<Algebra = SteenrodAlgebra, Module = TCM>,\n\n CC: AugmentedChainComplex<\n\n Algebra = SteenrodAlgebra,\n\n TargetComplex = TC,\n\n Module = FreeModule<SteenrodAlgebra>,\n\n ChainMap = FreeModuleHomomorphism<TCM>,\n\n >,\n\n{\n\n let p = cc.prime();\n\n\n\n let target = FDModule::new(cc.algebra(), \"\".to_string(), BiVec::from_vec(0, vec![1]));\n\n let map = FreeModuleHomomorphism::new(cc.module(s), Arc::new(target), t);\n", "file_path": "ext/src/yoneda.rs", "rank": 54, "score": 145833.429356336 }, { "content": "pub fn with_default<S, E: Display>(\n\n prompt: &str,\n\n default: &str,\n\n mut parser: impl for<'a> FnMut(&'a str) -> Result<S, E>,\n\n) -> S {\n\n raw(&format!(\"{} (default: {})\", prompt, default), |x| {\n\n if x.is_empty() {\n\n parser(default)\n\n } else {\n\n parser(x)\n\n }\n\n })\n\n}\n\n\n", "file_path": "ext/crates/query/src/lib.rs", "rank": 55, "score": 145833.429356336 }, { "content": "pub fn yes_no(prompt: &str) -> bool {\n\n with_default(prompt, \"y\", |response| {\n\n if response.starts_with('y') || response.starts_with('n') {\n\n Ok(response.starts_with('y'))\n\n } else {\n\n Err(format!(\n\n \"unrecognized response '{}'. Should be '(y)es' or '(n)o'\",\n\n response\n\n ))\n\n }\n\n })\n\n}\n\n\n", "file_path": "ext/crates/query/src/lib.rs", "rank": 56, "score": 145833.429356336 }, { "content": "pub fn optional<S, E: Display>(\n\n prompt: &str,\n\n mut parser: impl for<'a> FnMut(&'a str) -> Result<S, E>,\n\n) -> Option<S> {\n\n raw(&format!(\"{} (optional)\", prompt), |x| {\n\n if x.is_empty() {\n\n Ok(None)\n\n } else {\n\n parser(x).map(Some)\n\n }\n\n })\n\n}\n\n\n", "file_path": "ext/crates/query/src/lib.rs", "rank": 57, "score": 145833.429356336 }, { "content": "fn rasterize_polygon(polygon : &[Vector], width : u32, height : u32) -> Raster<Matte8> {\n\n\tlet mut path_builder = Path2D::default();\n\n\tpath_builder = path_builder.absolute().move_to(polygon[0].x, polygon[0].y);\n\n\tfor v in &polygon[1..] {\n\n\t\tpath_builder = path_builder.line_to(v.x, v.y);\n\n\t}\n\n\tlet path = path_builder.close().finish();\n\n\tlet mut p = Plotter::new(Raster::<Matte8>::with_clear(width, height));\n\n p.fill(FillRule::NonZero, path.iter(), Matte8::new(255));\n\n p.raster()\n\n}\n\n\n\n\n\n\n\npub struct ConvexHull {\n\n\tpub outline : Vec<Vector>,\n\n\tpub inner_radius : f32,\n\n\tpub outer_radius : f32,\n\n\tbounding_box : Box2D<f32>\n\n}\n", "file_path": "chart/chart/display_backend/src/convex_hull.rs", "rank": 58, "score": 145312.26729125346 }, { "content": "pub fn must_be_immutable_exception() -> pyo3::PyErr {\n\n exception!(\n\n ReferenceError,\n\n \"Reference must be immutable for desired operation!\"\n\n )\n\n}\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 59, "score": 143957.78848084074 }, { "content": "pub fn must_be_mutable_exception() -> pyo3::PyErr {\n\n exception!(\n\n ReferenceError,\n\n \"Reference must be mutable for desired operation!\"\n\n )\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! must_be_mutable_panic {\n\n () => { panic!( \"Attempted to mutate immutable reference!\" ) }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! must_be_immutable_panic {\n\n () => { panic!(\"Reference must be immutable for desired operation!\") }\n\n}\n\n\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 60, "score": 143957.78848084074 }, { "content": "pub fn null_ptr_exception() -> pyo3::PyErr {\n\n exception!(ReferenceError, \"Null pointer!\")\n\n}\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 61, "score": 143957.78848084074 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_checksum() {\n\n use std::fs::OpenOptions;\n\n use std::io::{Seek, SeekFrom, Write};\n\n\n\n let tempdir = tempfile::TempDir::new().unwrap();\n\n\n\n construct(\"S_2\", Some(tempdir.path().into()))\n\n .unwrap()\n\n .compute_through_bidegree(2, 2);\n\n\n\n let mut path = tempdir.path().to_owned();\n\n path.push(\"differentials/2_2_differential\");\n\n\n\n let mut file = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .open(path)\n\n .unwrap();\n\n\n\n file.seek(SeekFrom::Start(41)).unwrap();\n\n file.write_all(&[1]).unwrap();\n\n\n\n construct(\"S_2\", Some(tempdir.path().into()))\n\n .unwrap()\n\n .compute_through_bidegree(2, 2);\n\n}\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 62, "score": 143531.68270724005 }, { "content": "#[test]\n\n#[should_panic]\n\nfn wrong_algebra() {\n\n let tempdir = tempfile::TempDir::new().unwrap();\n\n let resolution1 = construct(\"S_2@adem\", Some(tempdir.path().into())).unwrap();\n\n resolution1.compute_through_bidegree(2, 2);\n\n\n\n let resolution2 = construct(\"S_2@milnor\", Some(tempdir.path().into())).unwrap();\n\n resolution2.compute_through_bidegree(2, 2);\n\n}\n\n\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 63, "score": 143531.68270724005 }, { "content": "pub fn check_not_null<T>(ptr : *mut T) -> pyo3::PyResult<()> {\n\n if ptr.is_null() {\n\n Err(null_ptr_exception())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 64, "score": 141593.54670153713 }, { "content": "/// This function produces a quasi-isomorphic quotient of `cc` (as an augmented chain complex) that `map` factors through\n\npub fn yoneda_representative<TCM, TC, CC, CMM>(\n\n cc: Arc<CC>,\n\n map: ChainMap<FreeModuleHomomorphism<CMM>>,\n\n) -> Yoneda<CC>\n\nwhere\n\n TCM: BoundedModule<Algebra = SteenrodAlgebra>,\n\n TC: BoundedChainComplex<Algebra = SteenrodAlgebra, Module = TCM>,\n\n CC: AugmentedChainComplex<\n\n Algebra = SteenrodAlgebra,\n\n TargetComplex = TC,\n\n Module = FreeModule<SteenrodAlgebra>,\n\n ChainMap = FreeModuleHomomorphism<TCM>,\n\n >,\n\n CMM: BoundedModule<Algebra = SteenrodAlgebra>,\n\n{\n\n yoneda_representative_with_strategy(\n\n cc,\n\n map,\n\n |module: &FreeModule<SteenrodAlgebra>, subspace: &Subspace, t: i32, i: usize| {\n\n let opgen = module.index_to_op_gen(t, i);\n", "file_path": "ext/src/yoneda.rs", "rank": 65, "score": 141522.71285114583 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_tempdir_lock() {\n\n let tempdir = tempfile::TempDir::new().unwrap();\n\n let resolution1 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n resolution1.compute_through_bidegree(5, 5);\n\n\n\n lock_tempdir(tempdir.path());\n\n resolution1.compute_through_bidegree(6, 6);\n\n}\n\n\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 66, "score": 141405.8829416403 }, { "content": "#[test]\n\nfn test_tempdir_unlock() {\n\n let tempdir = tempfile::TempDir::new().unwrap();\n\n let resolution1 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n resolution1.compute_through_bidegree(5, 5);\n\n\n\n lock_tempdir(tempdir.path());\n\n unlock_tempdir(tempdir.path());\n\n resolution1.compute_through_bidegree(6, 6);\n\n}\n\n\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 67, "score": 141405.8829416403 }, { "content": "#[test]\n\nfn test_load_smaller() {\n\n let tempdir = tempfile::TempDir::new().unwrap();\n\n\n\n let resolution1 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n resolution1.compute_through_stem(8, 14);\n\n\n\n let resolution2 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n resolution2.compute_through_stem(5, 8);\n\n}\n\n\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 68, "score": 141405.8829416403 }, { "content": "#[test]\n\nfn test_save_load() {\n\n let tempdir = tempfile::TempDir::new().unwrap();\n\n let mut resolution1 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n\n\n resolution1.compute_through_bidegree(10, 6);\n\n resolution1.compute_through_bidegree(6, 10);\n\n resolution1.should_save = false;\n\n\n\n let mut resolution2 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n\n\n // Check that we are not writing anything new.\n\n lock_tempdir(tempdir.path());\n\n resolution2.compute_through_bidegree(10, 6);\n\n resolution2.compute_through_bidegree(6, 10);\n\n\n\n resolution2.should_save = false;\n\n\n\n resolution1.compute_through_bidegree(20, 20);\n\n resolution2.compute_through_bidegree(20, 20);\n\n\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 69, "score": 141405.8829416403 }, { "content": "#[test]\n\nfn test_load_secondary() {\n\n let tempdir = tempfile::TempDir::new().unwrap();\n\n\n\n let mut resolution1 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n resolution1.load_quasi_inverse = false;\n\n resolution1.compute_through_stem(4, 10);\n\n\n\n let lift1 = SecondaryResolution::new(Arc::new(resolution1));\n\n lift1.initialize_homotopies();\n\n lift1.compute_composites();\n\n lift1.compute_intermediates();\n\n\n\n let mut dir = tempdir.path().to_owned();\n\n let mut is_empty = |d| {\n\n dir.push(d);\n\n let result = dir.read_dir().unwrap().next().is_none();\n\n dir.pop();\n\n result\n\n };\n\n\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 70, "score": 141405.8829416403 }, { "content": "#[test]\n\nfn test_save_load_stem() {\n\n let tempdir = tempfile::TempDir::new().unwrap();\n\n\n\n let resolution1 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n\n\n resolution1.compute_through_stem(10, 10);\n\n\n\n let resolution2 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n lock_tempdir(tempdir.path());\n\n\n\n resolution2.compute_through_stem(10, 10);\n\n\n\n assert_eq!(\n\n resolution1.graded_dimension_string(),\n\n resolution2.graded_dimension_string()\n\n );\n\n\n\n assert_eq!(\n\n resolution1.differential(5).quasi_inverse(7),\n\n resolution2.differential(5).quasi_inverse(7)\n\n );\n\n unlock_tempdir(tempdir.path());\n\n}\n\n\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 71, "score": 139365.15024138044 }, { "content": "#[test]\n\nfn test_save_load_resume() {\n\n let tempdir = tempfile::TempDir::new().unwrap();\n\n\n\n let resolution1 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n resolution1.compute_through_stem(8, 14);\n\n\n\n let resolution2 = construct(\"S_2\", Some(tempdir.path().into())).unwrap();\n\n lock_tempdir(tempdir.path());\n\n resolution2.compute_through_stem(8, 14);\n\n unlock_tempdir(tempdir.path());\n\n\n\n resolution1.compute_through_stem(5, 19);\n\n lock_tempdir(tempdir.path());\n\n resolution2.compute_through_stem(5, 19);\n\n\n\n assert_eq!(\n\n resolution1.graded_dimension_string(),\n\n resolution2.graded_dimension_string()\n\n );\n\n unlock_tempdir(tempdir.path());\n\n}\n\n\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 72, "score": 139365.15024138044 }, { "content": "pub fn null_ptr_exception_if_none<T>(opt : Option<T>) -> pyo3::PyResult<()> {\n\n opt.map_or_else(\n\n || Err(null_ptr_exception()),\n\n |_x| Ok(())\n\n )\n\n}\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 73, "score": 138717.75003991186 }, { "content": "pub fn convert_tessellation_error(err : TessellationError) -> JsValue {\n\n JsValue::from_str(&format!(\"{:?}\", err))\n\n}\n", "file_path": "chart/chart/display_backend/src/error.rs", "rank": 74, "score": 137072.48174762088 }, { "content": "fn push(n: usize) {\n\n let v = OnceVec::new();\n\n for i in 0..n {\n\n v.push(i);\n\n }\n\n assert_eq!(v.len(), n);\n\n}\n\n\n", "file_path": "ext/crates/once/benches/criterion.rs", "rank": 75, "score": 136192.2270270105 }, { "content": "pub trait FPModuleT: Module {\n\n fn gen_idx_to_fp_idx(&self, degree: i32, index: usize) -> isize;\n\n fn fp_idx_to_gen_idx(&self, degree: i32, index: usize) -> usize;\n\n fn generators(&self) -> Arc<FreeModule<Self::Algebra>>;\n\n}\n\n\n\nimpl<A: Algebra> FPModuleT for FPModule<A> {\n\n fn fp_idx_to_gen_idx(&self, degree: i32, index: usize) -> usize {\n\n self.fp_idx_to_gen_idx(degree, index)\n\n }\n\n fn gen_idx_to_fp_idx(&self, degree: i32, index: usize) -> isize {\n\n self.gen_idx_to_fp_idx(degree, index)\n\n }\n\n\n\n fn generators(&self) -> Arc<FreeModule<A>> {\n\n self.generators()\n\n }\n\n}\n", "file_path": "ext/crates/algebra/src/module/homomorphism/fp_module_homomorphism.rs", "rank": 76, "score": 134412.5308050999 }, { "content": "/// Each `ModuleHomomorphism` may come with auxiliary data, namely the kernel, image and\n\n/// quasi_inverse at each degree (the quasi-inverse is a map that is a right inverse when\n\n/// restricted to the image). These are computed via\n\n/// [`ModuleHomomorphism::compute_auxiliary_data_through_degree`] and retrieved through\n\n/// [`ModuleHomomorphism::kernel`], [`ModuleHomomorphism::quasi_inverse`] and\n\n/// [`ModuleHomomorphism::image`].\n\n///\n\n/// Note that an instance of a `ModuleHomomorphism` need not have the data available, even after\n\n/// `compute_auxiliary_data_through_degree` is invoked.\n\npub trait ModuleHomomorphism: Send + Sync {\n\n type Source: Module;\n\n type Target: Module<Algebra = <Self::Source as Module>::Algebra>;\n\n const CUSTOM_QI: bool = false;\n\n\n\n fn source(&self) -> Arc<Self::Source>;\n\n fn target(&self) -> Arc<Self::Target>;\n\n fn degree_shift(&self) -> i32;\n\n\n\n /// Calling this function when `input_idx < source().dimension(input_degree)` results in\n\n /// undefined behaviour. Implementations are encouraged to panic when this happens (this is\n\n /// usually the case because of out-of-bounds errors.\n\n fn apply_to_basis_element(\n\n &self,\n\n result: SliceMut,\n\n coeff: u32,\n\n input_degree: i32,\n\n input_idx: usize,\n\n );\n\n\n", "file_path": "ext/crates/algebra/src/module/homomorphism/mod.rs", "rank": 77, "score": 134273.88375800382 }, { "content": "pub fn load_module_json(name: &str) -> anyhow::Result<Value> {\n\n let current_dir = std::env::current_dir().unwrap();\n\n let relative_dir = current_dir.join(\"steenrod_modules\");\n\n\n\n for path in &[\n\n current_dir,\n\n relative_dir,\n\n PathBuf::from(STATIC_MODULES_PATH),\n\n ] {\n\n let mut path = path.clone();\n\n path.push(name);\n\n path.set_extension(\"json\");\n\n if let Ok(s) = std::fs::read_to_string(&path) {\n\n return serde_json::from_str(&s)\n\n .with_context(|| format!(\"Failed to load module json at {:?}\", path));\n\n }\n\n }\n\n Err(anyhow!(\"Module file '{}' not found\", name))\n\n}\n\n\n\nconst RED_ANSI_CODE: &str = \"\\x1b[31;1m\";\n\nconst WHITE_ANSI_CODE: &str = \"\\x1b[0m\";\n\n\n", "file_path": "ext/src/utils.rs", "rank": 78, "score": 134223.09043140325 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\npub fn yoneda_representative_with_strategy<TCM, TC, CC, CMM, F>(\n\n cc: Arc<CC>,\n\n map: ChainMap<FreeModuleHomomorphism<CMM>>,\n\n strategy: F,\n\n) -> Yoneda<CC>\n\nwhere\n\n TCM: BoundedModule<Algebra = SteenrodAlgebra>,\n\n TC: BoundedChainComplex<Algebra = SteenrodAlgebra, Module = TCM>,\n\n CC: AugmentedChainComplex<\n\n Algebra = SteenrodAlgebra,\n\n TargetComplex = TC,\n\n Module = FreeModule<SteenrodAlgebra>,\n\n ChainMap = FreeModuleHomomorphism<TCM>,\n\n >,\n\n CMM: BoundedModule<Algebra = SteenrodAlgebra>,\n\n F: Fn(&CC::Module, &Subspace, i32, usize) -> i32,\n\n{\n\n let p = cc.prime();\n\n let target_cc = cc.target();\n\n\n", "file_path": "ext/src/yoneda.rs", "rank": 79, "score": 134223.09043140325 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let res = query_module(None, false)?;\n\n\n\n for s in (0..res.next_homological_degree()).rev() {\n\n let module = res.module(s);\n\n for t in res.min_degree() + s as i32..=module.max_computed_degree() {\n\n print!(\"{}, \", module.dimension(t));\n\n }\n\n println!()\n\n }\n\n Ok(())\n\n}\n", "file_path": "ext/examples/resolution_size.rs", "rank": 80, "score": 133771.9784350034 }, { "content": "fn benchmark_pair(module_name: &str, max_degree: i32, n_times: u128) {\n\n println!();\n\n benchmark(module_name, max_degree, \"adem\", n_times);\n\n benchmark(module_name, max_degree, \"milnor\", n_times);\n\n}\n\n\n", "file_path": "ext/benches/resolve.rs", "rank": 81, "score": 132643.3590932591 }, { "content": "pub fn get_gens() -> anyhow::Result<BiVec<Vec<String>>> {\n\n // Query for generators\n\n eprintln!(\"Input generators. Press return to finish.\");\n\n stderr().flush()?;\n\n\n\n let mut gens: BiVec<Vec<_>> = BiVec::new(0);\n\n loop {\n\n let gen_deg: Option<i32> = query::optional(\"Generator degree\", str::parse);\n\n if gen_deg.is_none() {\n\n eprintln!(\"This is the list of generators and degrees:\");\n\n for (i, deg_i_gens) in gens.iter_enum() {\n\n for gen in deg_i_gens.iter() {\n\n eprint!(\"({}, {}) \", i, gen);\n\n }\n\n }\n\n eprintln!();\n\n if query::yes_no(\"Is it okay?\") {\n\n break;\n\n } else {\n\n if query::yes_no(\"Start over?\") {\n", "file_path": "ext/examples/define_module.rs", "rank": 82, "score": 132474.57709585162 }, { "content": "pub fn parse_module_name(module_name: &str) -> anyhow::Result<Value> {\n\n let mut args = module_name.split('[');\n\n let module_file = args.next().unwrap();\n\n let mut module = load_module_json(module_file)\n\n .with_context(|| format!(\"Failed to load module file {}\", module_file))?;\n\n if let Some(shift) = args.next() {\n\n let shift: i64 = match shift.strip_suffix(']') {\n\n None => return Err(anyhow!(\"Unterminated shift [\")),\n\n Some(x) => x\n\n .parse()\n\n .with_context(|| format!(\"Cannot parse shift value ({}) as an integer\", x))?,\n\n };\n\n if let Some(gens) = module[\"gens\"].as_object_mut() {\n\n for entry in gens.into_iter() {\n\n *entry.1 = (entry.1.as_i64().unwrap() + shift).into()\n\n }\n\n }\n\n }\n\n Ok(module)\n\n}\n", "file_path": "ext/src/utils.rs", "rank": 83, "score": 132474.57709585162 }, { "content": "#[rstest]\n\n#[trace]\n\n#[case(\"S_2\", 30)]\n\n#[case(\"C2\", 30)]\n\n#[case(\"Joker\", 30)]\n\n#[case(\"RP4\", 30)]\n\n#[case(\"RP_inf\", 30)]\n\n#[case(\"RP_-4_inf\", 30)]\n\n#[case(\"Csigma\", 30)]\n\n#[case(\"S_3\", 30)]\n\n#[case(\"Calpha\", 30)]\n\nfn compare(#[case] module_name: &str, #[case] max_degree: i32) {\n\n let a = construct((module_name, \"adem\"), None).unwrap();\n\n let b = construct((module_name, \"milnor\"), None).unwrap();\n\n\n\n a.compute_through_bidegree(max_degree as u32, max_degree);\n\n b.compute_through_bidegree(max_degree as u32, max_degree);\n\n\n\n assert_eq!(a.graded_dimension_string(), b.graded_dimension_string());\n\n}\n", "file_path": "ext/tests/milnor_vs_adem.rs", "rank": 84, "score": 130956.82661138306 }, { "content": "pub fn get_from_kwargs<'a, T : pyo3::FromPyObject<'a>>(\n\n kwargs : Option<&'a PyDict>, argument : &str, default : T\n\n) -> PyResult<T> {\n\n kwargs.and_then(|dict| dict.get_item(argument))\n\n .map(|value| PyAny::extract::<T>(value))\n\n .unwrap_or(Ok(default))\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! py_repr {\n\n ( $wrapper : ty, $freed_str : expr, $repr_block : block) => {\n\n #[pyproto]\n\n #[allow(unused_variables)]\n\n impl PyObjectProtocol for $wrapper {\n\n fn __repr__(&self) -> PyResult<String> {\n\n if self.is_null() {\n\n Ok(format!($freed_str))\n\n } else {\n\n let inner = self.inner_unchkd();\n\n $repr_block\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 85, "score": 130789.66990521422 }, { "content": "type FreeModuleHomomorphism = FMH<FreeModule>;\n", "file_path": "ext/examples/bruner.rs", "rank": 86, "score": 130374.27465253917 }, { "content": "fn bockstein_or_sq_to_adem_basis_elt(e: &BocksteinOrSq, q: i32) -> AdemBasisElement {\n\n match e {\n\n BocksteinOrSq::Bockstein => {\n\n if q == 1 {\n\n AdemBasisElement {\n\n degree: 1,\n\n excess: 1,\n\n bocksteins: 0,\n\n ps: vec![1],\n\n p_or_sq: false,\n\n }\n\n } else {\n\n AdemBasisElement {\n\n degree: 1,\n\n excess: 1,\n\n bocksteins: 1,\n\n ps: vec![],\n\n p_or_sq: true,\n\n }\n\n }\n", "file_path": "ext/crates/algebra/src/steenrod_evaluator.rs", "rank": 87, "score": 129850.2667503978 }, { "content": "fn lock_tempdir(dir: &Path) {\n\n let mut dir: PathBuf = dir.into();\n\n for kind in SaveKind::resolution_data() {\n\n dir.push(format!(\"{}s\", kind.name()));\n\n set_readonly(&dir, true);\n\n dir.pop();\n\n }\n\n set_readonly(&dir, true);\n\n}\n\n\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 88, "score": 129605.4459691438 }, { "content": "/// Should unlock after the test so that cleanup can be performed\n\nfn unlock_tempdir(dir: &Path) {\n\n set_readonly(dir, false);\n\n\n\n let mut dir: PathBuf = dir.into();\n\n for kind in SaveKind::resolution_data() {\n\n dir.push(format!(\"{}s\", kind.name()));\n\n set_readonly(&dir, false);\n\n dir.pop();\n\n }\n\n}\n\n\n", "file_path": "ext/tests/save_load_resolution.rs", "rank": 89, "score": 129605.4459691438 }, { "content": "fn check<T: TryInto<Config>>(config: T, max_degree: i32) {\n\n let config: Config = config.try_into().ok().unwrap();\n\n println!(\"Module: {:?}\", config);\n\n let resolution = Arc::new(construct(config, None).unwrap());\n\n resolution.compute_through_bidegree(max_degree as u32, max_degree);\n\n\n\n let module = resolution.target().module(0);\n\n let p = module.prime();\n\n\n\n let id = FiniteModuleHomomorphism::identity_homomorphism(module);\n\n\n\n let hom = ResolutionHomomorphism::from_module_homomorphism(\n\n \"\".to_string(),\n\n Arc::clone(&resolution),\n\n Arc::clone(&resolution),\n\n &id,\n\n );\n\n hom.extend(max_degree as u32, max_degree);\n\n\n\n for s in 0..=max_degree as u32 {\n", "file_path": "ext/tests/extend_identity.rs", "rank": 90, "score": 129590.60442681496 }, { "content": "pub fn arc_to_final<T>(ptr : &Arc<T>) -> Weak<()> {\n\n weak_ptr_to_final(Arc::downgrade(ptr))\n\n}\n\n\n\n// pub fn get_from_kwargs<'a, T : pyo3::FromPyObject<'a>>(\n\n// kwargs : Option<&'a PyDict>, argument : &str\n\n// ) -> Option<PyResult<T>> {\n\n// kwargs.and_then(|dict| dict.get_item(argument))\n\n// .map(|value| PyAny::extract::<T>(value))\n\n// }\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 91, "score": 129164.96013254853 }, { "content": "pub fn parse_algebra(i: &str) -> Result<AlgebraParseNode, ParseError> {\n\n let (rest, parse_tree) = algebra_expr(i).map_err(|err| ParseError {\n\n info: format!(\"{:#?}\", err),\n\n })?;\n\n if rest.is_empty() {\n\n Ok(parse_tree)\n\n } else {\n\n Err(ParseError {\n\n info: \"Failed to consume all of input\".to_string(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "ext/crates/algebra/src/steenrod_parser.rs", "rank": 92, "score": 127597.27834636928 }, { "content": "pub fn weak_ptr_to_final<T>(ptr : Weak<T>) -> Weak<()> {\n\n unsafe { std::mem::transmute(ptr) }\n\n}\n\n\n", "file_path": "python_ext/pyo3/python_utils/src/lib.rs", "rank": 93, "score": 127597.27834636928 }, { "content": "pub fn parse_module(i: &str) -> Result<ModuleParseNode, ParseError> {\n\n let (rest, parse_tree) = module_expr(i).map_err(|err| ParseError {\n\n info: format!(\"{:#?}\", err),\n\n })?;\n\n if rest.is_empty() {\n\n Ok(parse_tree)\n\n } else {\n\n Err(ParseError {\n\n info: \"Failed to consume all of input\".to_string(),\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ParseError {\n\n pub info: String,\n\n}\n\n\n\nimpl std::fmt::Display for ParseError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n", "file_path": "ext/crates/algebra/src/steenrod_parser.rs", "rank": 94, "score": 127597.27834636928 }, { "content": "pub fn gens_to_json(gens: &BiVec<Vec<String>>) -> serde_json::Value {\n\n let mut gens_json = json!({});\n\n for (i, deg_i_gens) in gens.iter_enum() {\n\n for gen in deg_i_gens {\n\n gens_json[gen] = json!(i);\n\n }\n\n }\n\n gens_json\n\n}\n\n\n", "file_path": "ext/examples/define_module.rs", "rank": 95, "score": 126118.27937653218 }, { "content": "fn parse_vec(s: &str) -> Result<Vec<u32>, core::num::ParseIntError> {\n\n s[1..s.len() - 1]\n\n .split(',')\n\n .map(|x| x.trim().parse())\n\n .collect::<Result<Vec<_>, _>>()\n\n}\n\n\n", "file_path": "ext/examples/massey.rs", "rank": 96, "score": 125287.29582759166 }, { "content": "fn benchmark(module_name: &str, max_degree: i32, algebra: &str, n_times: u128) {\n\n for _ in 0..n_times {\n\n let res = construct((module_name, algebra), None).unwrap();\n\n res.compute_through_bidegree(max_degree as u32, max_degree);\n\n assert!(\n\n res.module(max_degree as u32)\n\n .number_of_gens_in_degree(max_degree)\n\n < 1000\n\n );\n\n }\n\n}\n\n\n", "file_path": "ext/benches/iai.rs", "rank": 97, "score": 125285.060112116 }, { "content": "fn benchmark(module_name: &str, max_degree: i32, algebra: &str, n_times: u128) {\n\n print!(\n\n \"benchmark {:6} {} {}: \",\n\n algebra, module_name, max_degree\n\n );\n\n std::io::stdout().flush().unwrap();\n\n\n\n let start = Instant::now();\n\n for _ in 0..n_times {\n\n let res = construct((module_name, algebra), None).unwrap();\n\n res.compute_through_bidegree(max_degree as u32, max_degree);\n\n assert!(\n\n res.module(max_degree as u32)\n\n .number_of_gens_in_degree(max_degree)\n\n < 1000\n\n );\n\n }\n\n let dur = start.elapsed();\n\n\n\n println!(\"{} ms / iter\", dur.as_millis() / n_times as u128);\n\n}\n\n\n", "file_path": "ext/benches/resolve.rs", "rank": 98, "score": 125285.060112116 }, { "content": "use std::sync::Arc;\n\n\n\nuse ext::resolution_homomorphism::{\n\n ResolutionHomomorphism as ResolutionHomomorphismRust, \n\n ResolutionHomomorphismToUnit as ResolutionHomomorphismToUnitRust\n\n};\n\n\n\nuse pyo3::prelude::*;\n\n\n\nuse python_fp::vector::FpVector;\n\nuse python_fp::matrix::Matrix;\n\nuse crate::resolution::{CCRust, Resolution};\n\n\n\npython_utils::rc_wrapper_type!(ResolutionHomomorphism, ResolutionHomomorphismToUnitRust<CCRust>);\n\n\n\n#[pymethods]\n\nimpl ResolutionHomomorphism {\n\n #[new]\n\n pub fn new(\n\n name : String,\n", "file_path": "python_ext/pyo3/src/resolution_homomorphism.rs", "rank": 99, "score": 41.16995094270719 } ]
Rust
lib/megstd/src/drawing/color.rs
neri/toe
8442f82bce551ce27b23b24336a285f25d422e98
use core::mem::transmute; pub trait ColorTrait: Sized + Copy + Clone + PartialEq + Eq {} #[repr(transparent)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct IndexedColor(pub u8); impl ColorTrait for IndexedColor {} impl IndexedColor { pub const MIN: Self = Self(u8::MIN); pub const MAX: Self = Self(u8::MAX); pub const DEFAULT_KEY: Self = Self(u8::MAX); pub const BLACK: Self = Self(0); pub const BLUE: Self = Self(1); pub const GREEN: Self = Self(2); pub const CYAN: Self = Self(3); pub const RED: Self = Self(4); pub const MAGENTA: Self = Self(5); pub const BROWN: Self = Self(6); pub const LIGHT_GRAY: Self = Self(7); pub const DARK_GRAY: Self = Self(8); pub const LIGHT_BLUE: Self = Self(9); pub const LIGHT_GREEN: Self = Self(10); pub const LIGHT_CYAN: Self = Self(11); pub const LIGHT_RED: Self = Self(12); pub const LIGHT_MAGENTA: Self = Self(13); pub const YELLOW: Self = Self(14); pub const WHITE: Self = Self(15); pub const COLOR_PALETTE: [u32; 256] = [ 0xFF212121, 0xFF0D47A1, 0xFF1B5E20, 0xFF006064, 0xFFb71c1c, 0xFF4A148C, 0xFF795548, 0xFF9E9E9E, 0xFF616161, 0xFF2196F3, 0xFF4CAF50, 0xFF00BCD4, 0xFFf44336, 0xFF9C27B0, 0xFFFFEB3B, 0xFFFFFFFF, 0xFF000000, 0xFF330000, 0xFF660000, 0xFF990000, 0xFFCC0000, 0xFFFF0000, 0xFF003300, 0xFF333300, 0xFF663300, 0xFF993300, 0xFFCC3300, 0xFFFF3300, 0xFF006600, 0xFF336600, 0xFF666600, 0xFF996600, 0xFFCC6600, 0xFFFF6600, 0xFF009900, 0xFF339900, 0xFF669900, 0xFF999900, 0xFFCC9900, 0xFFFF9900, 0xFF00CC00, 0xFF33CC00, 0xFF66CC00, 0xFF99CC00, 0xFFCCCC00, 0xFFFFCC00, 0xFF00FF00, 0xFF33FF00, 0xFF66FF00, 0xFF99FF00, 0xFFCCFF00, 0xFFFFFF00, 0xFF000033, 0xFF330033, 0xFF660033, 0xFF990033, 0xFFCC0033, 0xFFFF0033, 0xFF003333, 0xFF333333, 0xFF663333, 0xFF993333, 0xFFCC3333, 0xFFFF3333, 0xFF006633, 0xFF336633, 0xFF666633, 0xFF996633, 0xFFCC6633, 0xFFFF6633, 0xFF009933, 0xFF339933, 0xFF669933, 0xFF999933, 0xFFCC9933, 0xFFFF9933, 0xFF00CC33, 0xFF33CC33, 0xFF66CC33, 0xFF99CC33, 0xFFCCCC33, 0xFFFFCC33, 0xFF00FF33, 0xFF33FF33, 0xFF66FF33, 0xFF99FF33, 0xFFCCFF33, 0xFFFFFF33, 0xFF000066, 0xFF330066, 0xFF660066, 0xFF990066, 0xFFCC0066, 0xFFFF0066, 0xFF003366, 0xFF333366, 0xFF663366, 0xFF993366, 0xFFCC3366, 0xFFFF3366, 0xFF006666, 0xFF336666, 0xFF666666, 0xFF996666, 0xFFCC6666, 0xFFFF6666, 0xFF009966, 0xFF339966, 0xFF669966, 0xFF999966, 0xFFCC9966, 0xFFFF9966, 0xFF00CC66, 0xFF33CC66, 0xFF66CC66, 0xFF99CC66, 0xFFCCCC66, 0xFFFFCC66, 0xFF00FF66, 0xFF33FF66, 0xFF66FF66, 0xFF99FF66, 0xFFCCFF66, 0xFFFFFF66, 0xFF000099, 0xFF330099, 0xFF660099, 0xFF990099, 0xFFCC0099, 0xFFFF0099, 0xFF003399, 0xFF333399, 0xFF663399, 0xFF993399, 0xFFCC3399, 0xFFFF3399, 0xFF006699, 0xFF336699, 0xFF666699, 0xFF996699, 0xFFCC6699, 0xFFFF6699, 0xFF009999, 0xFF339999, 0xFF669999, 0xFF999999, 0xFFCC9999, 0xFFFF9999, 0xFF00CC99, 0xFF33CC99, 0xFF66CC99, 0xFF99CC99, 0xFFCCCC99, 0xFFFFCC99, 0xFF00FF99, 0xFF33FF99, 0xFF66FF99, 0xFF99FF99, 0xFFCCFF99, 0xFFFFFF99, 0xFF0000CC, 0xFF3300CC, 0xFF6600CC, 0xFF9900CC, 0xFFCC00CC, 0xFFFF00CC, 0xFF0033CC, 0xFF3333CC, 0xFF6633CC, 0xFF9933CC, 0xFFCC33CC, 0xFFFF33CC, 0xFF0066CC, 0xFF3366CC, 0xFF6666CC, 0xFF9966CC, 0xFFCC66CC, 0xFFFF66CC, 0xFF0099CC, 0xFF3399CC, 0xFF6699CC, 0xFF9999CC, 0xFFCC99CC, 0xFFFF99CC, 0xFF00CCCC, 0xFF33CCCC, 0xFF66CCCC, 0xFF99CCCC, 0xFFCCCCCC, 0xFFFFCCCC, 0xFF00FFCC, 0xFF33FFCC, 0xFF66FFCC, 0xFF99FFCC, 0xFFCCFFCC, 0xFFFFFFCC, 0xFF0000FF, 0xFF3300FF, 0xFF6600FF, 0xFF9900FF, 0xFFCC00FF, 0xFFFF00FF, 0xFF0033FF, 0xFF3333FF, 0xFF6633FF, 0xFF9933FF, 0xFFCC33FF, 0xFFFF33FF, 0xFF0066FF, 0xFF3366FF, 0xFF6666FF, 0xFF9966FF, 0xFFCC66FF, 0xFFFF66FF, 0xFF0099FF, 0xFF3399FF, 0xFF6699FF, 0xFF9999FF, 0xFFCC99FF, 0xFFFF99FF, 0xFF00CCFF, 0xFF33CCFF, 0xFF66CCFF, 0xFF99CCFF, 0xFFCCCCFF, 0xFFFFCCFF, 0xFF00FFFF, 0xFF33FFFF, 0xFF66FFFF, 0xFF99FFFF, 0xFFCCFFFF, 0xFFFFFFFF, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; #[inline] pub const fn from_rgb(rgb: u32) -> Self { let b = (((rgb & 0xFF) + 25) / 51) as u8; let g = ((((rgb >> 8) & 0xFF) + 25) / 51) as u8; let r = ((((rgb >> 16) & 0xFF) + 25) / 51) as u8; Self(16 + r + g * 6 + b * 36) } #[inline] pub const fn as_rgb(self) -> u32 { Self::COLOR_PALETTE[self.0 as usize] & 0xFF_FF_FF } #[inline] pub const fn as_argb(self) -> u32 { Self::COLOR_PALETTE[self.0 as usize] } #[inline] pub const fn as_true_color(self) -> TrueColor { TrueColor::from_argb(self.as_argb()) } } impl From<u8> for IndexedColor { fn from(val: u8) -> Self { Self(val) } } impl From<IndexedColor> for TrueColor { fn from(val: IndexedColor) -> Self { val.as_true_color() } } #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct TrueColor { argb: u32, } impl ColorTrait for TrueColor {} impl TrueColor { pub const TRANSPARENT: Self = Self::from_argb(0); pub const WHITE: Self = Self::from_rgb(0xFFFFFF); #[inline] pub const fn from_rgb(rgb: u32) -> Self { Self { argb: rgb | 0xFF000000, } } #[inline] pub const fn from_argb(argb: u32) -> Self { Self { argb } } #[inline] pub const fn gray(white: u8, alpha: u8) -> Self { Self { argb: white as u32 * 0x00_01_01_01 + alpha as u32 * 0x01_00_00_00, } } #[inline] pub fn components(self) -> ColorComponents { self.into() } #[inline] pub const fn rgb(self) -> u32 { self.argb & 0x00FFFFFF } #[inline] pub const fn argb(self) -> u32 { self.argb } #[inline] pub fn brightness(self) -> u8 { let cc = self.components(); ((cc.r as usize * 19589 + cc.g as usize * 38444 + cc.b as usize * 7502 + 32767) >> 16) as u8 } #[inline] pub const fn opacity(self) -> u8 { (self.argb >> 24) as u8 } #[inline] pub fn set_opacity(mut self, alpha: u8) -> Self { let mut components = self.components(); components.a = alpha; self.argb = components.into(); self } #[inline] pub const fn is_opaque(self) -> bool { self.opacity() == 0xFF } #[inline] pub const fn is_transparent(self) -> bool { self.opacity() == 0 } #[inline] pub fn blend_each<F>(self, rhs: Self, f: F) -> Self where F: Fn(u8, u8) -> u8, { self.components().blend_each(rhs.into(), f).into() } #[inline] pub fn blend_color<F1, F2>(self, rhs: Self, f_rgb: F1, f_a: F2) -> Self where F1: Fn(u8, u8) -> u8, F2: Fn(u8, u8) -> u8, { self.components().blend_color(rhs.into(), f_rgb, f_a).into() } #[inline] pub fn blend(self, other: Self) -> Self { let c = other.components(); let alpha_l = c.a as usize; let alpha_r = 255 - alpha_l; c.blend_each(self.components(), |a, b| { ((a as usize * alpha_l + b as usize * alpha_r) / 255) as u8 }) .into() } } impl From<u32> for TrueColor { fn from(val: u32) -> Self { Self::from_argb(val) } } impl From<TrueColor> for IndexedColor { fn from(val: TrueColor) -> Self { Self::from_rgb(val.rgb()) } } #[repr(C)] #[derive(Debug, Copy, Clone)] #[cfg(target_endian = "little")] pub struct ColorComponents { pub b: u8, pub g: u8, pub r: u8, pub a: u8, } impl ColorComponents { #[inline] pub fn blend_each<F>(self, rhs: Self, f: F) -> Self where F: Fn(u8, u8) -> u8, { Self { a: f(self.a, rhs.a), r: f(self.r, rhs.r), g: f(self.g, rhs.g), b: f(self.b, rhs.b), } } #[inline] pub fn blend_color<F1, F2>(self, rhs: Self, f_rgb: F1, f_a: F2) -> Self where F1: Fn(u8, u8) -> u8, F2: Fn(u8, u8) -> u8, { Self { a: f_a(self.a, rhs.a), r: f_rgb(self.r, rhs.r), g: f_rgb(self.g, rhs.g), b: f_rgb(self.b, rhs.b), } } #[inline] pub const fn is_opaque(self) -> bool { self.a == 255 } #[inline] pub const fn is_transparent(self) -> bool { self.a == 0 } } impl From<TrueColor> for ColorComponents { fn from(color: TrueColor) -> Self { unsafe { transmute(color) } } } impl From<ColorComponents> for TrueColor { fn from(components: ColorComponents) -> Self { unsafe { transmute(components) } } } impl Into<u32> for ColorComponents { fn into(self) -> u32 { unsafe { transmute(self) } } } #[repr(C)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct DeepColor30 { rgb: u32, } impl ColorTrait for DeepColor30 {} impl DeepColor30 { #[inline] pub const fn from_rgb(rgb: u32) -> Self { Self { rgb } } #[inline] pub const fn from_true_color(val: TrueColor) -> Self { let rgb32 = val.argb(); let components = ( (rgb32 & 0xFF), ((rgb32 >> 8) & 0xFF), ((rgb32 >> 16) & 0xFF), ); Self { rgb: Self::c8c10(components.0) | (Self::c8c10(components.1) << 10) | (Self::c8c10(components.2) << 20), } } #[inline] pub const fn components(&self) -> (u32, u32, u32) { let rgb = self.rgb(); ((rgb & 0x3FF), ((rgb >> 10) & 0x3FF), ((rgb >> 20) & 0x3FF)) } #[inline] pub const fn into_true_color(&self) -> TrueColor { let components = self.components(); TrueColor::from_rgb( (components.0 >> 2) | ((components.1 >> 2) << 8) | ((components.2 >> 2) << 16), ) } const fn c8c10(c8: u32) -> u32 { (c8 * 0x0101) >> 6 } #[inline] pub const fn rgb(&self) -> u32 { self.rgb } } impl From<TrueColor> for DeepColor30 { fn from(val: TrueColor) -> Self { Self::from_true_color(val) } } impl From<DeepColor30> for TrueColor { fn from(val: DeepColor30) -> Self { val.into_true_color() } } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum AmbiguousColor { Indexed(IndexedColor), Argb32(TrueColor), } impl ColorTrait for AmbiguousColor {} impl AmbiguousColor { pub const TRANSPARENT: Self = Self::Argb32(TrueColor::TRANSPARENT); pub const BLACK: Self = Self::Indexed(IndexedColor::BLACK); pub const BLUE: Self = Self::Indexed(IndexedColor::BLUE); pub const GREEN: Self = Self::Indexed(IndexedColor::GREEN); pub const CYAN: Self = Self::Indexed(IndexedColor::CYAN); pub const RED: Self = Self::Indexed(IndexedColor::RED); pub const MAGENTA: Self = Self::Indexed(IndexedColor::MAGENTA); pub const BROWN: Self = Self::Indexed(IndexedColor::BROWN); pub const LIGHT_GRAY: Self = Self::Indexed(IndexedColor::LIGHT_GRAY); pub const DARK_GRAY: Self = Self::Indexed(IndexedColor::DARK_GRAY); pub const LIGHT_BLUE: Self = Self::Indexed(IndexedColor::LIGHT_BLUE); pub const LIGHT_GREEN: Self = Self::Indexed(IndexedColor::LIGHT_GREEN); pub const LIGHT_CYAN: Self = Self::Indexed(IndexedColor::LIGHT_CYAN); pub const LIGHT_RED: Self = Self::Indexed(IndexedColor::LIGHT_RED); pub const LIGHT_MAGENTA: Self = Self::Indexed(IndexedColor::LIGHT_MAGENTA); pub const YELLOW: Self = Self::Indexed(IndexedColor::YELLOW); pub const WHITE: Self = Self::Indexed(IndexedColor::WHITE); #[inline] pub const fn from_rgb(rgb: u32) -> Self { Self::Argb32(TrueColor::from_rgb(rgb)) } #[inline] pub const fn from_argb(rgb: u32) -> Self { Self::Argb32(TrueColor::from_argb(rgb)) } #[inline] pub const fn into_argb(&self) -> TrueColor { match self { AmbiguousColor::Indexed(v) => v.as_true_color(), AmbiguousColor::Argb32(v) => *v, } } } impl Into<IndexedColor> for AmbiguousColor { fn into(self) -> IndexedColor { match self { AmbiguousColor::Indexed(v) => v, AmbiguousColor::Argb32(v) => v.into(), } } } impl Into<TrueColor> for AmbiguousColor { fn into(self) -> TrueColor { match self { AmbiguousColor::Indexed(v) => v.into(), AmbiguousColor::Argb32(v) => v, } } } impl From<IndexedColor> for AmbiguousColor { fn from(val: IndexedColor) -> Self { Self::Indexed(val) } } impl From<TrueColor> for AmbiguousColor { fn from(val: TrueColor) -> Self { Self::Argb32(val) } }
use core::mem::transmute; pub trait ColorTrait: Sized + Copy + Clone + PartialEq + Eq {} #[repr(transparent)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct IndexedColor(pub u8); impl ColorTrait for IndexedColor {} impl IndexedColor { pub const MIN: Self = Self(u8::MIN); pub const MAX: Self = Self(u8::MAX); pub const DEFAULT_KEY: Self = Self(u8::MAX); pub const BLACK: Self = Self(0); pub const BLUE: Self = Self(1); pub const GREEN: Self = Self(2); pub const CYAN: Self = Self(3); pub const RED: Self = Self(4); pub const MAGENTA: Self = Self(5); pub const BROWN: Self = Self(6); pub const LIGHT_GRAY: Self = Self(7); pub const DARK_GRAY: Self = Self(8); pub const LIGHT_BLUE: Self = Self(9); pub const LIGHT_GREEN: Self = Self(10); pub const LIGHT_CYAN: Self = Self(11); pub const LIGHT_RED: Self = Self(12); pub const LIGHT_MAGENTA: Self = Self(13); pub const YELLOW: Self = Self(14); pub const WHITE: Self = Self(15); pub const COLOR_PALETTE: [u32; 256] = [ 0xFF212121, 0xFF0D47A1, 0xFF1B5E20, 0xFF006064, 0xFFb71c1c, 0xFF4A148C, 0xFF795548, 0xFF9E9E9E, 0xFF616161, 0xFF2196F3, 0xFF4CAF50, 0xFF00BCD4, 0xFFf44336, 0xFF9C27B0, 0xFFFFEB3B, 0xFFFFFFFF, 0xFF000000, 0xFF330000, 0xFF660000, 0xFF990000, 0xFFCC0000, 0xFFFF0000, 0xFF003300, 0xFF333300, 0xFF663300, 0xFF993300, 0xFFCC3300, 0xFFFF3300, 0xFF006600, 0xFF336600, 0xFF666600, 0xFF996600, 0xFFCC6600, 0xFFFF6600, 0xFF009900, 0xFF339900, 0xFF669900, 0xFF999900, 0xFFCC9900, 0xFFFF9900, 0xFF00CC00, 0xFF33CC00, 0xFF66CC00, 0xFF99CC00, 0xFFCCCC00, 0xFFFFCC00, 0xFF00FF00, 0xFF33FF00, 0xFF66FF00, 0xFF99FF00, 0xFFCCFF00, 0xFFFFFF00, 0xFF000033, 0xFF330033, 0xFF660033, 0xFF990033, 0xFFCC0033, 0xFFFF0033, 0xFF003333, 0xFF333333, 0xFF663333, 0xFF993333, 0xFFCC3333, 0xFFFF3333, 0xFF006633, 0xFF336633, 0xFF666633, 0xFF996633, 0xFFCC6633, 0xFFFF6633, 0xFF009933, 0xFF339933, 0xFF669933, 0xFF999933, 0xFFCC9933, 0xFFFF9933, 0xFF00CC33, 0xFF33CC33, 0xFF66CC33, 0xFF99CC33, 0xFFCCCC33, 0xFFFFCC33, 0xFF00FF33, 0xFF33FF33, 0xFF66FF33, 0xFF99FF33, 0xFFCCFF33, 0xFFFFFF33, 0xFF000066, 0xFF330066, 0xFF660066, 0xFF990066, 0xFFCC0066, 0xFFFF0066, 0xFF003366, 0xFF333366, 0xFF663366, 0xFF993366, 0xFFCC3366, 0xFFFF3366, 0xFF006666, 0xFF336666, 0xFF666666, 0xFF996666, 0xFFCC6666, 0xFFFF6666, 0xFF009966, 0xFF339966, 0xFF669966, 0xFF999966, 0xFFCC9966, 0xFFFF9966, 0xFF00CC66, 0xFF33CC66, 0xFF66CC66, 0xFF99CC66, 0xFFCCCC66, 0xFFFFCC66, 0xFF00FF66, 0xFF33FF66, 0xFF66FF66, 0xFF99FF66, 0xFFCCFF66, 0xFFFFFF66, 0xFF000099, 0xFF330099, 0xFF660099, 0xFF990099, 0xFFCC0099, 0xFFFF0099, 0xFF003399, 0xFF333399, 0xFF663399, 0xFF993399, 0xFFCC3399, 0xFFFF3399, 0xFF006699, 0xFF336699, 0xFF666699, 0xFF996699, 0xFFCC6699, 0xFFFF6699, 0xFF009999, 0xFF339999, 0xFF669999, 0xFF999999, 0xFFCC9999, 0xFFFF9999, 0xFF00CC99, 0xFF33CC99, 0xFF66CC99, 0xFF99CC99, 0xFFCCCC99, 0xFFFFCC99, 0xFF00FF99, 0xFF33FF99, 0xFF66FF99, 0xFF99FF99, 0xFFCCFF99, 0xFFFFFF99, 0xFF0000CC, 0xFF3300CC, 0xFF6600CC, 0xFF9900CC, 0xFFCC00CC, 0xFFFF00CC, 0xFF0033CC, 0xFF3333CC, 0xFF6633CC, 0xFF9933CC, 0xFFCC33CC, 0xFFFF33CC, 0xFF0066CC, 0xFF3366CC, 0xFF6666CC, 0xFF9966CC, 0xFFCC66CC, 0xFFFF66CC, 0xFF0099CC, 0xFF3399CC, 0xFF6699CC, 0xFF9999CC, 0xFFCC99CC, 0xFFFF99CC, 0xFF00CCCC, 0xFF33CCCC, 0xFF66CCCC, 0xFF99CCCC, 0xFFCCCCCC, 0xFFFFCCCC, 0xFF00FFCC, 0xFF33FFCC, 0xFF66FFCC, 0xFF99FFCC, 0xFFCCFFCC, 0xFFFFFFCC, 0xFF0000FF, 0xFF3300FF, 0xFF6600FF, 0xFF9900FF, 0xFFCC00FF, 0xFFFF00FF, 0xFF0033FF, 0xFF3333FF, 0xFF6633FF, 0xFF9933FF, 0xFFCC33FF, 0xFFFF33FF, 0xFF0066FF, 0xFF3366FF, 0xFF6666FF, 0xFF9966FF, 0xFFCC66FF, 0xFFFF66FF, 0xFF0099FF, 0xFF3399FF, 0xFF6699FF, 0xFF9999FF, 0xFFCC99FF, 0xFFFF99FF, 0xFF00CCFF, 0xFF33CCFF, 0xFF66CCFF, 0xFF99CCFF, 0xFFCCCCFF, 0xFFFFCCFF, 0xFF00FFFF, 0xFF33FFFF, 0xFF66FFFF, 0xFF99FFFF, 0xFFCCFFFF, 0xFFFFFFFF, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; #[inline] pub const fn from_rgb(rgb: u32) -> Self { let b = (((rgb & 0xFF) + 25) / 51) as u8; let g = ((((rgb >> 8) & 0xFF) + 25) / 51) as u8; let r = ((((rgb >> 16) & 0xFF) + 25) / 51) as u8; Self(16 + r + g * 6 + b * 36) } #[inline] pub const fn as_rgb(self) -> u32 { Self::COLOR_PALETTE[self.0 as usize] & 0xFF_FF_FF } #[inline] pub const fn as_argb(self) -> u32 { Self::COLOR_PALETTE[self.0 as usize] } #[inline] pub const fn as_true_color(self) -> TrueColor { TrueColor::from_argb(self.as_argb()) } } impl From<u8> for IndexedColor { fn from(val: u8) -> Self { Self(val) } } impl From<IndexedColor> for TrueColor { fn from(val: IndexedColor) -> Self { val.as_true_color() } } #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct TrueColor { argb: u32, } impl ColorTrait for TrueColor {} impl TrueColor { pub const TRANSPARENT: Self = Self::from_argb(0); pub const WHITE: Self = Self::f
f { AmbiguousColor::Indexed(v) => v.as_true_color(), AmbiguousColor::Argb32(v) => *v, } } } impl Into<IndexedColor> for AmbiguousColor { fn into(self) -> IndexedColor { match self { AmbiguousColor::Indexed(v) => v, AmbiguousColor::Argb32(v) => v.into(), } } } impl Into<TrueColor> for AmbiguousColor { fn into(self) -> TrueColor { match self { AmbiguousColor::Indexed(v) => v.into(), AmbiguousColor::Argb32(v) => v, } } } impl From<IndexedColor> for AmbiguousColor { fn from(val: IndexedColor) -> Self { Self::Indexed(val) } } impl From<TrueColor> for AmbiguousColor { fn from(val: TrueColor) -> Self { Self::Argb32(val) } }
rom_rgb(0xFFFFFF); #[inline] pub const fn from_rgb(rgb: u32) -> Self { Self { argb: rgb | 0xFF000000, } } #[inline] pub const fn from_argb(argb: u32) -> Self { Self { argb } } #[inline] pub const fn gray(white: u8, alpha: u8) -> Self { Self { argb: white as u32 * 0x00_01_01_01 + alpha as u32 * 0x01_00_00_00, } } #[inline] pub fn components(self) -> ColorComponents { self.into() } #[inline] pub const fn rgb(self) -> u32 { self.argb & 0x00FFFFFF } #[inline] pub const fn argb(self) -> u32 { self.argb } #[inline] pub fn brightness(self) -> u8 { let cc = self.components(); ((cc.r as usize * 19589 + cc.g as usize * 38444 + cc.b as usize * 7502 + 32767) >> 16) as u8 } #[inline] pub const fn opacity(self) -> u8 { (self.argb >> 24) as u8 } #[inline] pub fn set_opacity(mut self, alpha: u8) -> Self { let mut components = self.components(); components.a = alpha; self.argb = components.into(); self } #[inline] pub const fn is_opaque(self) -> bool { self.opacity() == 0xFF } #[inline] pub const fn is_transparent(self) -> bool { self.opacity() == 0 } #[inline] pub fn blend_each<F>(self, rhs: Self, f: F) -> Self where F: Fn(u8, u8) -> u8, { self.components().blend_each(rhs.into(), f).into() } #[inline] pub fn blend_color<F1, F2>(self, rhs: Self, f_rgb: F1, f_a: F2) -> Self where F1: Fn(u8, u8) -> u8, F2: Fn(u8, u8) -> u8, { self.components().blend_color(rhs.into(), f_rgb, f_a).into() } #[inline] pub fn blend(self, other: Self) -> Self { let c = other.components(); let alpha_l = c.a as usize; let alpha_r = 255 - alpha_l; c.blend_each(self.components(), |a, b| { ((a as usize * alpha_l + b as usize * alpha_r) / 255) as u8 }) .into() } } impl From<u32> for TrueColor { fn from(val: u32) -> Self { Self::from_argb(val) } } impl From<TrueColor> for IndexedColor { fn from(val: TrueColor) -> Self { Self::from_rgb(val.rgb()) } } #[repr(C)] #[derive(Debug, Copy, Clone)] #[cfg(target_endian = "little")] pub struct ColorComponents { pub b: u8, pub g: u8, pub r: u8, pub a: u8, } impl ColorComponents { #[inline] pub fn blend_each<F>(self, rhs: Self, f: F) -> Self where F: Fn(u8, u8) -> u8, { Self { a: f(self.a, rhs.a), r: f(self.r, rhs.r), g: f(self.g, rhs.g), b: f(self.b, rhs.b), } } #[inline] pub fn blend_color<F1, F2>(self, rhs: Self, f_rgb: F1, f_a: F2) -> Self where F1: Fn(u8, u8) -> u8, F2: Fn(u8, u8) -> u8, { Self { a: f_a(self.a, rhs.a), r: f_rgb(self.r, rhs.r), g: f_rgb(self.g, rhs.g), b: f_rgb(self.b, rhs.b), } } #[inline] pub const fn is_opaque(self) -> bool { self.a == 255 } #[inline] pub const fn is_transparent(self) -> bool { self.a == 0 } } impl From<TrueColor> for ColorComponents { fn from(color: TrueColor) -> Self { unsafe { transmute(color) } } } impl From<ColorComponents> for TrueColor { fn from(components: ColorComponents) -> Self { unsafe { transmute(components) } } } impl Into<u32> for ColorComponents { fn into(self) -> u32 { unsafe { transmute(self) } } } #[repr(C)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct DeepColor30 { rgb: u32, } impl ColorTrait for DeepColor30 {} impl DeepColor30 { #[inline] pub const fn from_rgb(rgb: u32) -> Self { Self { rgb } } #[inline] pub const fn from_true_color(val: TrueColor) -> Self { let rgb32 = val.argb(); let components = ( (rgb32 & 0xFF), ((rgb32 >> 8) & 0xFF), ((rgb32 >> 16) & 0xFF), ); Self { rgb: Self::c8c10(components.0) | (Self::c8c10(components.1) << 10) | (Self::c8c10(components.2) << 20), } } #[inline] pub const fn components(&self) -> (u32, u32, u32) { let rgb = self.rgb(); ((rgb & 0x3FF), ((rgb >> 10) & 0x3FF), ((rgb >> 20) & 0x3FF)) } #[inline] pub const fn into_true_color(&self) -> TrueColor { let components = self.components(); TrueColor::from_rgb( (components.0 >> 2) | ((components.1 >> 2) << 8) | ((components.2 >> 2) << 16), ) } const fn c8c10(c8: u32) -> u32 { (c8 * 0x0101) >> 6 } #[inline] pub const fn rgb(&self) -> u32 { self.rgb } } impl From<TrueColor> for DeepColor30 { fn from(val: TrueColor) -> Self { Self::from_true_color(val) } } impl From<DeepColor30> for TrueColor { fn from(val: DeepColor30) -> Self { val.into_true_color() } } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum AmbiguousColor { Indexed(IndexedColor), Argb32(TrueColor), } impl ColorTrait for AmbiguousColor {} impl AmbiguousColor { pub const TRANSPARENT: Self = Self::Argb32(TrueColor::TRANSPARENT); pub const BLACK: Self = Self::Indexed(IndexedColor::BLACK); pub const BLUE: Self = Self::Indexed(IndexedColor::BLUE); pub const GREEN: Self = Self::Indexed(IndexedColor::GREEN); pub const CYAN: Self = Self::Indexed(IndexedColor::CYAN); pub const RED: Self = Self::Indexed(IndexedColor::RED); pub const MAGENTA: Self = Self::Indexed(IndexedColor::MAGENTA); pub const BROWN: Self = Self::Indexed(IndexedColor::BROWN); pub const LIGHT_GRAY: Self = Self::Indexed(IndexedColor::LIGHT_GRAY); pub const DARK_GRAY: Self = Self::Indexed(IndexedColor::DARK_GRAY); pub const LIGHT_BLUE: Self = Self::Indexed(IndexedColor::LIGHT_BLUE); pub const LIGHT_GREEN: Self = Self::Indexed(IndexedColor::LIGHT_GREEN); pub const LIGHT_CYAN: Self = Self::Indexed(IndexedColor::LIGHT_CYAN); pub const LIGHT_RED: Self = Self::Indexed(IndexedColor::LIGHT_RED); pub const LIGHT_MAGENTA: Self = Self::Indexed(IndexedColor::LIGHT_MAGENTA); pub const YELLOW: Self = Self::Indexed(IndexedColor::YELLOW); pub const WHITE: Self = Self::Indexed(IndexedColor::WHITE); #[inline] pub const fn from_rgb(rgb: u32) -> Self { Self::Argb32(TrueColor::from_rgb(rgb)) } #[inline] pub const fn from_argb(rgb: u32) -> Self { Self::Argb32(TrueColor::from_argb(rgb)) } #[inline] pub const fn into_argb(&self) -> TrueColor { match sel
random
[]
Rust
src/vcs.rs
kascote/rpure
7b12608ad46d2d81ead01abc2655153b431c1d09
use git2::{Repository, Status}; use std::env; use std::path::Path; #[derive(Debug)] pub enum VcsDirty { WTmodified, IDmodified, Green, } #[derive(Debug)] pub struct VcsStatus { pub name: String, pub dirty: String, pub dirty_status: VcsDirty, pub ahead: usize, pub behind: usize, pub stash: bool, pub state: String, } impl VcsStatus { pub fn blank() -> VcsStatus { VcsStatus { name: String::from(""), dirty: String::from(""), dirty_status: VcsDirty::Green, ahead: 0, behind: 0, stash: false, state: String::from(""), } } } pub fn vcs_status(cfg: &super::config::Config) -> Option<VcsStatus> { let current_dir = env::var("PWD").unwrap(); let mut status = VcsStatus::blank() ; let mut repo = vcs_repo(&current_dir)?; let state: String = format!("{:?}", repo.state()); if state != "Clean" { status.state = state; } let (ahead, behind) = get_ahead_behind(&repo)?; status.ahead = ahead; status.behind = behind; let each_stash = |_idx: usize, _name: &str, _oid: &git2::Oid| -> bool { status.stash = true; false }; repo.stash_foreach(each_stash).expect("error checking stashed files"); status.name = get_repo_name(&repo); status.dirty = cfg.git_clean.to_string(); let file_stats = repo.statuses(None).unwrap(); for file in file_stats.iter() { match file.status() { Status::WT_NEW | Status::WT_MODIFIED | Status::WT_DELETED | Status::WT_TYPECHANGE | Status::WT_RENAMED => { status.dirty = cfg.git_wt_modified.to_string(); status.dirty_status = VcsDirty::WTmodified; break; } Status::INDEX_NEW | Status::INDEX_MODIFIED | Status::INDEX_DELETED | Status::INDEX_TYPECHANGE | Status::INDEX_RENAMED => { status.dirty = cfg.git_index_modified.to_string(); status.dirty_status = VcsDirty::IDmodified; } _ => { status.dirty_status = VcsDirty::Green } } } return Some(status); } fn vcs_repo(current_dir: &String) -> Option<Repository> { let mut repo: Option<Repository> = None; let current_path = Path::new(&current_dir[..]); let mut idx = 0; for path in current_path.ancestors() { match Repository::open(path) { Ok(r) => { repo = Some(r); break; } Err(_) => {} } if idx == 10 { break; } idx += 1; } return repo; } fn get_ahead_behind(repo: &Repository) -> Option<(usize, usize)> { let head = match repo.head() { Ok(h) => h, Err(_) => return Some((0, 0)) }; if !head.is_branch() { return Some((0, 0)); } let head_name = head.shorthand()?; let head_branch = (repo.find_branch(head_name, git2::BranchType::Local).ok())?; let upstream = match head_branch.upstream() { Ok(u) => u, Err(_) => { return Some((0, 0)) } }; let head_oid = head.target()?; let upstream_oid = (upstream.get().target())?; let status = match repo.graph_ahead_behind(head_oid, upstream_oid) { Ok(r) => { let mut ahead = 0; let mut behind = 0; if r.0 > 0 { ahead = r.0; } if r.1 > 0 { behind = r.1; } (ahead, behind) }, Err(_) => (0, 0) }; return Some(status); } fn get_repo_name(repo: &Repository) -> String { let head = match repo.head() { Ok(r) => r, Err(_) => return String::from(""), }; if head.is_branch() { return head.shorthand().unwrap_or("err").into(); } else { let commit = head.peel_to_commit().unwrap(); return format!("{:.6}", commit.id()); } }
use git2::{Repository, Status}; use std::env; use std::path::Path; #[derive(Debug)] pub enum VcsDirty { WTmodified, IDmodified, Green, } #[derive(Debug)] pub struct VcsStatus { pub name: String, pub dirty: String, pub dirty_status: VcsDirty, pub ahead: usize, pub behind: usize, pub stash: bool, pub state: String, } impl VcsStatus { pub fn blank() -> VcsStatus { VcsStatus { name: String::from(""), dirty: String::from(""), dirty_status: VcsDirty::Green, ahead: 0, behind: 0, stash: false, state: String::from(""), } } } pub fn vcs_status(cfg: &super::config::Config) -> Option<VcsStatus> { let current_dir = env::var("PWD").unwrap(); let mut status = VcsStatus::blank() ; let mut repo = vcs_repo(&current_dir)?; let state: String = format!("{:?}", repo.state()); if state != "Clean" { status.state = state; } let (ahead, behind) = get_ahead_behind(&repo)?; status.ahead = ahead; status.behind = behind; let each_stash = |_idx: usize, _name: &str, _oid: &git2::Oid| -> bool { status.stash = true; false }; repo.stash_foreach(each_stash).expect("error checking stashed files"); status.name = get_repo_name(&repo); status.dirty = cfg.git_clean.to_string(); let file_stats = repo.statuses(None).unwrap(); for file in file_stats.iter() { match file.status() { Status::WT_NEW | Status::WT_MODIFIED | Status::WT_DELETED | Status::WT_TYPECHANGE | Status::WT_RENAMED => { status.dirty = cfg.git_wt_modified.to_string(); status.dirty_status = VcsDirty::WTmodified; break; } Status::INDEX_NEW | Status::INDEX_MODIFIED | Status::INDEX_DELETED | Status::INDEX_TYPECHANGE | Status::INDEX_RENAMED => { status.dirty = cfg.git_index_modified.to_string(); status.dirty_status = VcsDirty::IDmodified; } _ => { status.dirty_status = VcsDirty::Green } } } return Some(status); } fn vcs_repo(current_dir: &String) -> Option<Repository> { let mut repo: Option<Repository> = None; let current_path = Path::new(&current_dir[..]); let mut idx = 0; for path in current_path.ancestors() { match Repository::open(path) { Ok(r) => { repo = Some(r); break; } Err(_) => {} } if idx == 10 { break; } idx += 1; } return repo; } fn get_ahead_behind(repo: &Repository) -> Option<(usize, usize)> { let head = match repo.head() { Ok(h) => h, Err(_) => return Some((0, 0)) }; if !head.is_branch() { re
(ahead, behind) }, Err(_) => (0, 0) }; return Some(status); } fn get_repo_name(repo: &Repository) -> String { let head = match repo.head() { Ok(r) => r, Err(_) => return String::from(""), }; if head.is_branch() { return head.shorthand().unwrap_or("err").into(); } else { let commit = head.peel_to_commit().unwrap(); return format!("{:.6}", commit.id()); } }
turn Some((0, 0)); } let head_name = head.shorthand()?; let head_branch = (repo.find_branch(head_name, git2::BranchType::Local).ok())?; let upstream = match head_branch.upstream() { Ok(u) => u, Err(_) => { return Some((0, 0)) } }; let head_oid = head.target()?; let upstream_oid = (upstream.get().target())?; let status = match repo.graph_ahead_behind(head_oid, upstream_oid) { Ok(r) => { let mut ahead = 0; let mut behind = 0; if r.0 > 0 { ahead = r.0; } if r.1 > 0 { behind = r.1; }
function_block-random_span
[ { "content": "pub fn get_name() -> String {\n\n match env::var(\"VIRTUAL_ENV\") {\n\n Ok(venv_path) => {\n\n let venv_name = Path::new(&venv_path[..]).file_name();\n\n if let Some(name) = venv_name {\n\n if let Some(valid_name) = name.to_str() {\n\n return format!(\"({})\", valid_name);\n\n }\n\n }\n\n }\n\n Err(_) => {}\n\n }\n\n return \"\".into();\n\n}\n", "file_path": "src/venv.rs", "rank": 2, "score": 87763.78173990987 }, { "content": "pub fn cwd(cfg: &super::config::Config) -> Option<String> {\n\n let path_env = match env::var(\"PWD\") {\n\n Ok(p) => PathBuf::from(p),\n\n Err(_) => {\n\n match env::current_dir() {\n\n Ok(p) => p,\n\n Err(_) => {\n\n PathBuf::from(\"err current_dir\")\n\n }\n\n }\n\n }\n\n };\n\n\n\n let mut path: String = path_env.to_string_lossy().into_owned();\n\n let home = env::var(\"HOME\").unwrap();\n\n \n\n if !cfg.expand_home {\n\n let home_dir_ext = format!(\"{}{}\", home, \"/\");\n\n if (&path == &home) || (path.starts_with(&home_dir_ext)) {\n\n path = path.replacen(&home[..], \"~\", 1);\n\n }\n\n }\n\n\n\n if cfg.shorten_cwd { \n\n return Some(tico(&path, Option::None));\n\n } else {\n\n return Some(path);\n\n }\n\n}\n", "file_path": "src/cwd.rs", "rank": 4, "score": 55380.451248377125 }, { "content": "fn pista(zsh: bool) {\n\n // we force the color output, because PS1=$(rpure) will not print any color\n\n // have a point to not use colors ?¿ \n\n // https://github.com/mackwic/colored/blob/master/src/control.rs#L105\n\n colored::control::set_override(true);\n\n\n\n let cfg = config::Config::init();\n\n let cwd = match cwd::cwd(&cfg) {\n\n Some(c) => c.color(String::from(&cfg.cwd_color)),\n\n None => \"[directory does not exist]\".color(\"red\"),\n\n };\n\n let vcs_status = vcs::vcs_status(&cfg).unwrap_or(vcs::VcsStatus::blank());\n\n let venv = venv::get_name();\n\n let prompt_char = prompt_char::get_char(&cfg);\n\n let has_nl = if cfg.add_new_line { \"\\n\" } else { \"\" };\n\n\n\n let status_color = match vcs_status.dirty_status {\n\n vcs::VcsDirty::IDmodified => cfg.git_index_modified_color,\n\n vcs::VcsDirty::WTmodified => cfg.git_wt_modified_color,\n\n vcs::VcsDirty::Green => cfg.git_clean_color,\n", "file_path": "src/main.rs", "rank": 5, "score": 50825.16195051002 }, { "content": "pub fn get_char(cfg: &super::config::Config) -> colored::ColoredString {\n\n let euid = unsafe { libc::geteuid() };\n\n match euid {\n\n 0 => return cfg.prompt_char_root.color(String::from(&cfg.prompt_char_root_color)),\n\n _ => return cfg.prompt_char.color(String::from(&cfg.prompt_char_color)),\n\n }\n\n}\n", "file_path": "src/prompt_char.rs", "rank": 6, "score": 43457.30502944082 }, { "content": "fn help() {\n\n let msg = format!(\" \\\n\n rpure v{version} ~ {desc} \\n \\\n\n usage:\\n \\\n\n \\t -z print zshel compatible prompt \\n \\\n\n \\t -h this help\", \n\n version = env!(\"CARGO_PKG_VERSION\"),\n\n desc = env!(\"CARGO_PKG_DESCRIPTION\")\n\n );\n\n println!(\"{}\", msg);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 26447.486401578695 }, { "content": "fn main() {\n\n let param = std::env::args().nth(1).or(Some(\"\".to_string())).unwrap();\n\n\n\n match param.as_ref() {\n\n \"-z\" => pista(true),\n\n \"-h\" => help(),\n\n _ => pista(false),\n\n };\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 26447.486401578695 }, { "content": "# rpure\n\n\n\n> a simple prompt inspired by [pure](https://github.com/sindresorhus/pure)\n\n\n\n![rpure.png](https://github.com/kascote/rpure/raw/master/screenshot.png)\n\n\n\n\n\n`rpure` is based on code from ![pista](https://github.com/nerdypepper/pista). \n\nWhy? Started as a Rust learning project.\n\n\n\n### features\n\n\n\n - shortened current working directory\n\n - git info (branch/commit, ahead/behind status, repo status)\n\n - superuser indicator\n\n - fully configurable\n\n \n\n### Readline\n\n\n\nThe symbol (●) at the beginning of the 2nd line is the Readline vim state. \n\nTo setup it, need to put this lines on the file `.inputrc` on the home directory:\n\n\n\n```\n\nset show-mode-in-prompt on\n\nset vi-cmd-mode-string \"⊙\"\n\nset vi-ins-mode-string \"●\"\n\n```\n\n\n\nfor more information check the [Readline init file syntax](https://www.gnu.org/software/bash/manual/html_node/Readline-Init-File-Syntax.html#Readline-Init-File-Syntax)\n", "file_path": "readme.md", "rank": 10, "score": 10549.936634787973 }, { "content": " };\n\n\n\n let stash = match vcs_status.stash {\n\n true => cfg.git_stash_char.color(cfg.git_stash_color),\n\n false => \"\".color(\"white\"),\n\n };\n\n\n\n let mut state = String::from(\"\");\n\n if vcs_status.state != \"\" {\n\n state = format!(\"{}{}\", state, \"[\".color(colored::Color::BrightBlack));\n\n state = format!(\"{}{}\", state, vcs_status.state.color(cfg.git_action_color));\n\n state = format!(\"{}{}\", state, \"]\".color(colored::Color::BrightBlack));\n\n }\n\n\n\n let mut arrow = String::from(\"\");\n\n if vcs_status.ahead > 0 {\n\n arrow = format!(\"{}{}\", arrow, &cfg.git_arrow_up_char.color(cfg.git_arrow_up_color));\n\n }\n\n if vcs_status.behind > 0 {\n\n arrow = format!(\"{}{}\", arrow, &cfg.git_arrow_down_char.color(cfg.git_arrow_down_color));\n", "file_path": "src/main.rs", "rank": 15, "score": 7.919841696100731 }, { "content": "use std::env;\n\nuse std::env::VarError;\n\n\n\n#[derive(Debug)]\n\npub struct Config {\n\n pub shorten_cwd: bool,\n\n pub expand_home: bool,\n\n pub cwd_color: String,\n\n pub prompt_char: String,\n\n pub prompt_char_root: String,\n\n pub prompt_char_color: String,\n\n pub prompt_char_root_color: String,\n\n pub branch_color: String,\n\n pub commit_color: String,\n\n pub git_clean_color: String,\n\n pub git_wt_modified_color: String,\n\n pub git_index_modified_color: String,\n\n pub git_clean: String,\n\n pub git_wt_modified: String,\n\n pub git_index_modified: String,\n", "file_path": "src/config.rs", "rank": 16, "score": 7.520277148773067 }, { "content": " pub add_new_line: bool,\n\n pub git_arrow_up_char: String,\n\n pub git_arrow_down_char: String,\n\n pub git_arrow_up_color: String,\n\n pub git_arrow_down_color: String,\n\n pub git_stash_char: String,\n\n pub git_stash_color: String,\n\n pub git_action_color: String,\n\n}\n\n\n\nimpl Config {\n\n pub fn init() -> Config {\n\n Config {\n\n shorten_cwd: Config::unwrap_bool(env::var(\"RPURE_SHORT_DIR\")),\n\n expand_home: Config::unwrap_bool(env::var(\"RPURE_EXPAND_HOME\")),\n\n cwd_color: env::var(\"RPURE_COLOR_CURRENT_DIRECTORY\").unwrap_or(\"cyan\".into()),\n\n prompt_char: env::var(\"RPURE_CHAR_PROMPT\").unwrap_or(\"❯\".into()),\n\n prompt_char_color: env::var(\"RPURE_COLOR_PROMPT\").unwrap_or(\"magenta\".into()),\n\n prompt_char_root: env::var(\"RPURE_CHAR_PROMPT_ROOT\").unwrap_or(\"#\".into()),\n\n prompt_char_root_color: env::var(\"RPURE_COLOR_PROMPT_ROOT\").unwrap_or(\"red\".into()),\n", "file_path": "src/config.rs", "rank": 17, "score": 6.011569326065605 }, { "content": " }\n\n\n\n let format = if zsh {\n\n format!(\n\n \"%{{{cwd} {branch} {status} {state} {arrow} {stash}%}} %{{\\n{venv}{pchar}%}} \",\n\n cwd = cwd,\n\n branch = vcs_status.name.color(String::from(&cfg.branch_color)),\n\n status = vcs_status.dirty.color(status_color),\n\n arrow = arrow,\n\n stash = stash,\n\n venv = venv.bright_black(),\n\n pchar = prompt_char,\n\n state = state\n\n )\n\n } else {\n\n format!(\n\n \"{nl}{cwd} {branch} {status} {state} {arrow} {stash}\\n{venv}{pchar} \",\n\n cwd = cwd,\n\n branch = vcs_status.name.color(cfg.branch_color),\n\n status = vcs_status.dirty.color(status_color),\n", "file_path": "src/main.rs", "rank": 18, "score": 6.004220719643973 }, { "content": " arrow = arrow,\n\n stash = stash,\n\n venv = venv,\n\n pchar = prompt_char,\n\n state = state,\n\n nl = has_nl\n\n )\n\n };\n\n\n\n print!(\"{}\", format);\n\n}\n\n\n\n\n\n// https://github.com/Lucretiel/lazy_format\n", "file_path": "src/main.rs", "rank": 20, "score": 4.876201439597283 }, { "content": " branch_color: env::var(\"RPURE_COLOR_BRANCH\").unwrap_or(\"bright black\".into()),\n\n commit_color: env::var(\"RPURE_COLOR_COMMIT\").unwrap_or(\"bright black\".into()),\n\n git_clean: env::var(\"RPURE_GIT_CHAR_CLEAN\").unwrap_or(\"●\".into()),\n\n git_clean_color: env::var(\"RPURE_GIT_COLOR_CLEAN\").unwrap_or(\"green\".into()),\n\n git_wt_modified: env::var(\"RPURE_GIT_CHAR_DIRTY\").unwrap_or(\"●\".into()),\n\n git_wt_modified_color: env::var(\"RPURE_GIT_COLOR_CHAR_DIRTY\").unwrap_or(\"red\".into()),\n\n git_index_modified: env::var(\"RPURE_GIT_CHAR_INDEX_MODIFIED\").unwrap_or(\"±\".into()),\n\n git_index_modified_color: env::var(\"RPURE_GIT_COLOR_INDEX_MODIFIED\").unwrap_or(\"yellow\".into()),\n\n git_arrow_up_char: env::var(\"RPURE_GIT_CHAR_UNPUSHED\").unwrap_or(\"↑\".into()),\n\n git_arrow_down_char: env::var(\"RPURE_GIT_CHAR_UNPULLED\").unwrap_or(\"↓\".into()),\n\n git_arrow_up_color: env::var(\"RPURE_GIT_COLOR_UNPUSHED\").unwrap_or(\"cyan\".into()),\n\n git_arrow_down_color: env::var(\"RPURE_GIT_COLOR_UNPULLED\").unwrap_or(\"cyan\".into()),\n\n git_stash_char: env::var(\"RPURE_GIT_CHAR_STASH\").unwrap_or(\"☷\".into()),\n\n git_stash_color: env::var(\"RPURE_GIT_COLOR_STASH\").unwrap_or(\"yellow\".into()),\n\n git_action_color: env::var(\"RPURE_GIT_COLOR_ACTION\").unwrap_or(\"red\".into()),\n\n add_new_line: Config::unwrap_bool(Ok(env::var(\"RPURE_PROMPT_ADD_NEWLINE\").unwrap_or(\"1\".into())))\n\n }\n\n }\n\n\n\n fn unwrap_bool(value: Result<String, VarError>) -> bool {\n\n return match value {\n\n Ok(value) => value == \"1\",\n\n Err(_) => false\n\n }\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 21, "score": 4.6081082083024185 }, { "content": "use std::env;\n\nuse std::path::Path;\n\n\n", "file_path": "src/venv.rs", "rank": 22, "score": 4.467074934615683 }, { "content": "use std::env;\n\nuse tico::tico;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "src/cwd.rs", "rank": 23, "score": 4.415081949258713 }, { "content": "use colored::*;\n\n\n", "file_path": "src/prompt_char.rs", "rank": 24, "score": 2.0188129094787093 }, { "content": "mod cwd;\n\nmod prompt_char;\n\nmod vcs;\n\nmod venv;\n\nmod config;\n\n\n\nuse colored::*;\n\n\n", "file_path": "src/main.rs", "rank": 25, "score": 1.5495472521882088 } ]
Rust
utils/indigo-service-uploader/rust/src/sd_import/sd_import.rs
tsingdao-Tp/Indigo
b2d73faebb6a450e9b3d34fed553fad4f9d0012f
extern crate postgres; extern crate time; extern crate num_cpus; extern crate threadpool; extern crate flate2; extern crate yaml_rust; extern crate postgres_binary_copy; extern crate rustc_serialize; mod sd_batch_uploader; mod sd_parser; use postgres::{Connection, SslMode, ConnectParams, ConnectTarget, UserInfo}; use std::collections::BTreeMap; use time::PreciseTime; use std::sync::mpsc; use std::thread; use threadpool::ThreadPool; use std::io::prelude::*; use std::fs::File; use flate2::read::GzDecoder; use sd_batch_uploader::SdBatchUploader; use sd_parser::{SdParser, SdItem}; use yaml_rust::{Yaml, YamlLoader}; pub struct SdImport { pub db_config: BTreeMap<String, String>, pub general_config: BTreeMap<String, yaml_rust::yaml::Yaml>, pub db_conn: Connection, } fn read_config(config_name: &str) -> (BTreeMap<String, String>, BTreeMap<String, yaml_rust::yaml::Yaml>) { let mut f_config = File::open(config_name) .ok() .expect(&format!("Can not open configuration file '{}'", config_name)); let mut s = String::new(); f_config.read_to_string(&mut s) .ok() .expect(&format!("Error while reading configuration file '{}'", config_name)); let conf = YamlLoader::load_from_str(&s).unwrap(); let mut db_config: BTreeMap<String, String> = BTreeMap::new(); let mut general_config: BTreeMap<String, yaml_rust::yaml::Yaml> = BTreeMap::new(); for c in conf { let h = c.as_hash().unwrap(); let db = h.get(&Yaml::String("database".to_string())).unwrap(); let db_conf = db.as_hash().unwrap(); for (k, v) in db_conf { db_config.insert(k.as_str().unwrap().to_string(), v.as_str().unwrap().to_string()); } let general = h.get(&Yaml::String("general".to_string())).expect("no general section in config"); let general_conf = general.as_hash().unwrap(); for (k, v) in general_conf { general_config.insert(k.as_str().unwrap().to_string(), v.clone()); } } (db_config, general_config) } impl<'a> SdImport { pub fn new(config_name: &str) -> SdImport { let (db_conf, general_conf) = read_config(config_name); let params = ConnectParams { target: ConnectTarget::Tcp(db_conf.get("url").unwrap().clone()), port: Some(5432), user: Some(UserInfo { user: db_conf.get("user").unwrap().clone(), password: Some(db_conf.get("pass").unwrap().clone()), }), database: Some(db_conf.get("db").unwrap().clone()), options: vec![], }; let conn = Connection::connect(params, &SslMode::None).unwrap(); SdImport { db_config: db_conf, general_config: general_conf, db_conn: conn, } } pub fn insert(&mut self, file_name: &str, table_name: &str) { let t_name = format!("{}.{}", self.db_config.get("schema").unwrap(), table_name); self.parallel_insert(file_name, &t_name) } fn parallel_insert(&mut self, file_name: &str, table_name: &str) { println!("Start import"); let start_t = PreciseTime::now(); let mut str_count: u32 = 0; let buf_size: usize = self.general_config.get("buffer_size").unwrap().as_i64().unwrap() as usize; let (map_send, map_rec) = mpsc::sync_channel(buf_size); let (reduce_send, reduce_rec) = mpsc::sync_channel(buf_size); let reduce_sender = reduce_send.clone(); let f_name = file_name.to_string(); let sd_reader = thread::spawn(move || { let pool = ThreadPool::new(num_cpus::get()); let f = File::open(f_name).unwrap(); let mut f_str = GzDecoder::new(f).unwrap(); let parser = SdParser::new(&mut f_str); for sd in parser { reduce_sender.send(1u8).unwrap(); let map_sender = map_send.clone(); pool.execute(move || { let sd_item = SdItem::new(&sd).unwrap(); map_sender.send(sd_item).unwrap(); }); str_count += 1; } reduce_sender.send(0u8).unwrap(); }); let sd_uploader = &mut SdBatchUploader::new(&self.db_conn, table_name).unwrap(); loop { let status = reduce_rec.recv().unwrap(); match status { 1u8 => { let sd_item: SdItem = map_rec.recv().unwrap(); sd_uploader.upload(sd_item); str_count += 1; } _ => break, } } sd_reader.join().unwrap(); let end_t = PreciseTime::now(); let timer_ms = start_t.to(end_t).num_milliseconds() as f32 ; let timer_s = timer_ms / 1000f32; println!("Insert total time = {} ms ", timer_ms as i32); println!("Average insert time = {} structures per second", ((str_count as f32) / timer_s) as i32); println!("Total structures processed = {}", str_count); } }
extern crate postgres; extern crate time; extern crate num_cpus; extern crate threadpool; extern crate flate2; extern crate yaml_rust; extern crate postgres_binary_copy; extern crate rustc_serialize; mod sd_batch_uploader; mod sd_parser; use postgres::{Connection, SslMode, ConnectParams, ConnectTarget, UserInfo}; use std::collections::BTreeMap; use time::PreciseTime; use std::sync::mpsc; use std::thread; use threadpool::ThreadPool; use std::io::prelude::*; use std::fs::File; use flate2::read::GzDecoder; use sd_batch_uploader::SdBatchUploader; use sd_parser::{SdParser, SdItem}; use yaml_rust::{Yaml, YamlLoader}; pub struct SdImport { pub db_config: BTreeMap<String, String>, pub general_config: BTreeMap<String, yaml_rust::yaml::Yaml>, pub db_conn: Connection, } fn read_config(config_name: &str) -> (BTreeMap<String, String>, BTreeMap<String, yaml_rust::yaml::Yaml>) { let mut f_config = File::open(config_name) .ok() .expect(&format!("Can not open configuration file '{}'", config_name)); let mut s = String::new(); f_config.read_to_string(&mut s) .ok() .expect(&format!("Error while reading configuration file '{}'", config_name)); let conf = YamlLoader::load_from_str(&s).unwrap(); let mut db_config: BTreeMap<String, String> = BTreeMap::new(); let mut general_config: BTreeMap<String, yaml_rust::yaml::Yaml> = BTreeMap::new(); for c in conf { let h = c.as_hash().unwrap(); let db = h.get(&Yaml::String("database".to_string())).unwrap(); let db_conf = db.as_hash().unwrap(); for (k, v) in db_conf { db_config.insert(k.as_str().unwrap().to_string(), v.as_str().unwrap().to_string()); } let general = h.get(&Yaml::String("general".to_string())).expect("no general section in config"); let general_conf = general.as_hash().unwrap(); for (k, v) in general_conf { general_config.insert(k.as_str().unwrap().to_string(), v.clone()); } } (db_config, general_config) } impl<'a> SdImport { pub fn new(config_name: &str) -> SdImport { let (db_conf, general_conf) = read_config(config_name); let params = ConnectParams { target: ConnectTarget::Tcp(db_conf.get("url").unwrap().clone()), port: Some(5432), user: Some(UserInfo { user: db_conf.get("user").unwrap().clone(), password: Some(db_conf.get("pass").unwrap().clone()), }), database: Some(db_conf.get("db").unwrap().clone()), options: vec![], }; let conn = Connection::connect(params, &SslMode::None).unwrap(); SdImport { db_config: db_conf, general_config: general_conf, db_conn: conn, } } pub fn insert(&mut self, file_name: &str, table_name: &str) { let t_name = format!("{}.{}", self.db_config.get("schema").unwrap(), table_name); self.parallel_insert(file_name, &t_name) } fn parallel_insert(&mut self, file_name: &str, table_name: &str) { println!("Start import"); let start_t = PreciseTime::now(); let mut str_count: u32 = 0; let buf_size: usize = self.general_config.get("buffer_size").unwrap().as_i64().unwrap() as usize; let (map_send, map_rec) = mpsc::sync_channel(buf_size); let (reduce_send, reduce_rec) = mpsc::sync_channel(buf_size); let reduce_sender = reduce_send.clone(); let f_name = file_name.to_string(); let sd_reader = thread::spawn(move || { let pool = ThreadPool::new(num_cpus::get()); let f = File::open(f_name).unwrap(); let mut f_str = GzDecoder::new(f).unwrap(); let parser = SdParser::new(&mut f_str); for sd in parser { reduce_sender.send(1u8).unwrap(); let map_sender = map_send.clone(); pool.execute(move || { let sd_item = SdItem::new(&sd).unwrap(); map_sender.send(sd_item).unwrap(); }); str_count += 1; } reduce_sender.send(0u8).unwrap(); }); let sd_uploader = &mut SdBatchUploader::new(&self.db_conn, table_name).unwrap(); loop { let status = reduce_rec.recv().unwrap();
} sd_reader.join().unwrap(); let end_t = PreciseTime::now(); let timer_ms = start_t.to(end_t).num_milliseconds() as f32 ; let timer_s = timer_ms / 1000f32; println!("Insert total time = {} ms ", timer_ms as i32); println!("Average insert time = {} structures per second", ((str_count as f32) / timer_s) as i32); println!("Total structures processed = {}", str_count); } }
match status { 1u8 => { let sd_item: SdItem = map_rec.recv().unwrap(); sd_uploader.upload(sd_item); str_count += 1; } _ => break, }
if_condition
[ { "content": "fn import(file_name: &str, table_name: &str, config_name: Option<String>) {\n\n let conf_name = config_name.unwrap_or(\"config.yml\".to_string());\n\n let mut sd_import = SdImport::new(&conf_name);\n\n sd_import.insert(file_name, table_name);\n\n}\n\n\n", "file_path": "utils/indigo-service-uploader/rust/src/main.rs", "rank": 0, "score": 502706.4207155952 }, { "content": "fn drop_create_table(conn: &Connection, table_name: &str) {\n\n let drop_stmt = format!(\"drop table if exists {}\", table_name);\n\n let create_stmt = format!(\"create table {} (id serial, m bytea, p jsonb) \", table_name);\n\n\n\n conn.execute(&drop_stmt, &[]).ok().expect(\"Table drop failed\");\n\n conn.execute(&create_stmt, &[]).ok().expect(\"Table creation failed\");\n\n}\n\n\n", "file_path": "utils/indigo-service-uploader/rust/tests/test_upload.rs", "rank": 2, "score": 297297.8165052888 }, { "content": "fn get_query_count(conn: &Connection, query: String) -> i64 {\n\n get_single_value::<i64>(&conn, &query).unwrap()\n\n}\n\n\n", "file_path": "utils/indigo-service-uploader/rust/tests/test_upload.rs", "rank": 3, "score": 290378.9093748834 }, { "content": "fn get_single_value<T>(conn: &Connection, query: &str) -> PgResult<T>\n\n where T: FromSql\n\n{\n\n println!(\"Executing query: {}\", query);\n\n let stmt = try!(conn.prepare(query));\n\n let rows = try!(stmt.query(&[]));\n\n let row = rows.iter().next().unwrap();\n\n row.get_opt(0)\n\n}\n\n\n", "file_path": "utils/indigo-service-uploader/rust/tests/test_upload.rs", "rank": 4, "score": 269587.5260196792 }, { "content": "// A builtin parameterized test name generator which returns the result of\n\n// testing::PrintToString.\n\nstruct PrintToStringParamName {\n\n template <class ParamType>\n\n std::string operator()(const TestParamInfo<ParamType>& info) const {\n\n return PrintToString(info.param);\n\n }\n\n};\n\n\n\nnamespace internal {\n\n\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n\n// Utility Functions\n\n\n\n// Outputs a message explaining invalid registration of different\n\n// fixture class for the same test suite. This may happen when\n\n// TEST_P macro is used to define two tests with the same name\n\n// but in different namespaces.\n\nGTEST_API_ void ReportInvalidTestSuiteType(const char* test_suite_name,\n\n CodeLocation code_location);\n\n\n\ntemplate <typename> class ParamGeneratorInterface;\n\ntemplate <typename> class ParamGenerator;\n\n\n\n// Interface for iterating over elements provided by an implementation\n\n// of ParamGeneratorInterface<T>.\n\ntemplate <typename T>\n", "file_path": "third_party/googletest/include/gtest/internal/gtest-param-util.h", "rank": 5, "score": 245155.31573869492 }, { "content": "-- indigoservice user\n\ncreate user indigoservice with password 'p@ssw0rd';\n", "file_path": "utils/indigo-service/db/init_database.sql", "rank": 6, "score": 231785.17908810172 }, { "content": "fn print_usage(program: &str, opts: Options) {\n\n let brief = format!(\"Usage: {} [options] <file_path> <table_name>\", program);\n\n print!(\"{}\", opts.usage(&brief));\n\n}\n\n\n", "file_path": "utils/indigo-service-uploader/rust/src/main.rs", "rank": 7, "score": 226541.5161809002 }, { "content": " match Json::from_str(p_val) {\n\n Ok(e) => e,\n\n Err(_) => p_val.to_string().to_json(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> SdParser<'a> {\n\n pub fn new(input: &'a mut Read) -> SdParser {\n\n let buf = BufReader::new(input);\n\n let iter = buf.lines();\n\n SdParser { sd_iter: iter }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for SdParser<'a> {\n\n type Item = String;\n\n fn next(&mut self) -> Option<String> {\n\n let mut mol_str = String::new();\n\n\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 8, "score": 210298.25286909257 }, { "content": "extern crate regex;\n\n\n\nuse std::collections::BTreeMap;\n\nuse rustc_serialize::json::{Json, ToJson};\n\nuse std::io::BufReader;\n\nuse std::io::Lines;\n\nuse std::io::prelude::*;\n\nuse flate2::Compression;\n\nuse flate2::write::GzEncoder;\n\n\n\n\n\nstatic LINE_ENDING: char = '\\n';\n\n\n\n// Structure for keeping molecule and properties\n\npub struct SdItem {\n\n pub mol: Box<Vec<u8>>,\n\n pub props: Box<Json>,\n\n}\n\n/// SD files parser \n\npub struct SdParser<'a> {\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 9, "score": 210289.10355228843 }, { "content": " let mut f_str = GzDecoder::new(f).unwrap();\n\n let parser = SdParser::new(&mut f_str);\n\n let mut p_size: usize = 0;\n\n let mut m_size: usize = 0;\n\n for sd in parser {\n\n let sd_item = SdItem::new(sd.as_ref());\n\n let sd = &sd_item.unwrap();\n\n assert!(sd.props.is_array());\n\n p_size += sd.props.as_array().unwrap().len();\n\n m_size += 1;\n\n }\n\n assert_eq!(18, m_size);\n\n assert_eq!(576, p_size);\n\n }\n\n #[test]\n\n fn test_sd_scope_108() {\n\n let f = File::open(\"../data/test-108.sd.gz\").unwrap();\n\n let mut f_str = GzDecoder::new(f).unwrap();\n\n let parser = SdParser::new(&mut f_str);\n\n let mut p_size: usize = 0;\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 10, "score": 210288.10658773704 }, { "content": " let mut m_size: usize = 0;\n\n for sd in parser {\n\n let sd_item = SdItem::new(sd.as_ref());\n\n let sd = &sd_item.unwrap();\n\n assert!(sd.props.is_array());\n\n p_size += sd.props.as_array().unwrap().len();\n\n m_size += 1;\n\n }\n\n assert_eq!(108, m_size);\n\n assert_eq!(3456, p_size);\n\n }\n\n #[test]\n\n fn test_sd_scope_2759() {\n\n let f = File::open(\"../data/test-2759.sd.gz\").unwrap();\n\n let mut f_str = GzDecoder::new(f).unwrap();\n\n let parser = SdParser::new(&mut f_str);\n\n let mut p_size: usize = 0;\n\n let mut m_size: usize = 0;\n\n for sd in parser {\n\n let sd_item = SdItem::new(sd.as_ref());\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 11, "score": 210286.24758885938 }, { "content": " // Iterate SD file by $$$$\n\n for mol in self.sd_iter\n\n .by_ref()\n\n .filter_map(|a| a.ok())\n\n .take_while(|a| !a.starts_with(\"$$$$\")) {\n\n mol_str.push_str(mol.as_ref());\n\n mol_str.push(LINE_ENDING);\n\n }\n\n return match mol_str.len() {\n\n 0 => None,\n\n _ => Some(mol_str),\n\n };\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n static LINE_ENDING: char = '\\n';\n\n use std::fs::File;\n\n use sd_parser::{SdItem, SdParser};\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 12, "score": 210284.90321661034 }, { "content": " sd_iter: Lines<BufReader<&'a mut Read>>,\n\n}\n\n\n\nimpl SdItem {\n\n pub fn new(sd: &str) -> Result<SdItem, String> {\n\n let re = regex::Regex::new(\">[:space:]*<\").unwrap();\n\n let mut has_mol = false;\n\n let mut mol: Option<String> = None;\n\n let mut prop_array: Vec<BTreeMap<String, Json>> = Vec::new();\n\n\n\n // Iterate properties. First in iteration is a molecule\n\n for cap in re.split(sd) {\n\n if has_mol {\n\n let prop_list = cap.trim();\n\n let prop_len = prop_list.len();\n\n let end_idx = prop_list.find(LINE_ENDING).unwrap_or(prop_len);\n\n let mut key_idx = end_idx;\n\n\n\n if end_idx > 0 {\n\n key_idx -= 1;\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 13, "score": 210284.06753600668 }, { "content": " let parser = &mut SdParser::new(&mut f);\n\n assert_eq!(10, parser.count());\n\n }\n\n #[test]\n\n fn test_sd_parse_options() {\n\n let mut f = File::open(\"../data/test_pubchem_10.sdf\").unwrap();\n\n let parser = &mut SdParser::new(&mut f);\n\n\n\n let sd = parser.next().unwrap();\n\n let sd_item = SdItem::new(sd.as_ref()).unwrap();\n\n\n\n assert!(sd_item.props.is_array());\n\n let p_array = sd_item.props.as_array().unwrap();\n\n for p in p_array {\n\n let a = p.find(\"a\").unwrap().as_string().unwrap();\n\n let b = p.find(\"b\").unwrap().as_string().unwrap();\n\n assert!(!b.starts_with(LINE_ENDING));\n\n\n\n if a.contains(\"PUBCHEM_EXACT_MASS\") {\n\n assert!(p.find(\"y\").unwrap().is_number());\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 14, "score": 210283.43113477805 }, { "content": " }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_basic_sd_iterator() {\n\n let mut f = File::open(\"../data/test_pubchem_10.sdf\").unwrap();\n\n let parser = SdParser::new(&mut f);\n\n let mut p_size: usize = 0;\n\n for sd in parser {\n\n let sd_item = SdItem::new(sd.as_ref());\n\n let sd = &sd_item.unwrap();\n\n assert!(sd.props.is_array());\n\n p_size += sd.props.as_array().unwrap().len();\n\n }\n\n assert_eq!(325, p_size);\n\n }\n\n #[test]\n\n fn test_sd_scope_18() {\n\n let f = File::open(\"../data/test-18.sd.gz\").unwrap();\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 15, "score": 210281.56296306054 }, { "content": " mol = Some(cap.to_string());\n\n has_mol = true;\n\n }\n\n }\n\n let res = try!(mol.ok_or(\"no molecules\").and_then(|sd| {\n\n\n\n // Gzip molecular structure\n\n let mut e = GzEncoder::new(Vec::new(), Compression::Default);\n\n e.write(sd.as_bytes()).ok().expect(\"Error while writing a molecule\");\n\n let gz_mol = e.finish().unwrap();;\n\n\n\n Ok(SdItem {\n\n mol: Box::new(gz_mol),\n\n props: Box::new(prop_array.to_json()),\n\n })\n\n }));\n\n Ok(res)\n\n }\n\n\n\n fn read_property(p_val: &str) -> Json {\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 16, "score": 210279.202281378 }, { "content": " 426\");\n\n\n\n let sd = &sd_item.unwrap();\n\n assert!(sd.props.is_array());\n\n let p_array = sd.props.as_array().unwrap();\n\n let mut sd_properties: BTreeSet<String> = BTreeSet::new();\n\n\n\n for p in p_array {\n\n let a = p.find(\"a\").unwrap();\n\n sd_properties.insert(a.as_string().unwrap().to_string());\n\n }\n\n\n\n assert!(sd_properties.contains(\"COMPOUND_CANONICALIZED\"));\n\n assert!(sd_properties.contains(\"COMPOUND_CID\"));\n\n assert!(sd_properties.contains(\"CACTVS_COMPLEXITY\"));\n\n }\n\n\n\n #[test]\n\n fn test_sd_parser_basic() {\n\n let mut f = File::open(\"../data/test_pubchem_10.sdf\").unwrap();\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 17, "score": 210276.04206854771 }, { "content": " use flate2::read::GzDecoder;\n\n use std::collections::BTreeSet;\n\n #[test]\n\n fn test_all_properties_parsed() {\n\n let sd_item = SdItem::new(\"975001\n\n -OEChem-05211109542D\n\n\n\n 45 47 0 0 0 0 0 0 \\\n\n 0999 V2000\n\n 25 45 1 0 0 0 0\n\nM END\n\n> <COMPOUND_CID>\n\n\\\n\n 975001\n\n\n\n> <COMPOUND_CANONICALIZED>\n\n1\n\n\n\n> <CACTVS_COMPLEXITY>\n\n\\\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 18, "score": 210267.00588169502 }, { "content": " }\n\n\n\n let p_name = &cap[0..key_idx];\n\n\n\n let mut val_idx = end_idx;\n\n if val_idx + 1 < prop_len {\n\n val_idx += 1;\n\n }\n\n let p_val = &cap[val_idx..prop_len];\n\n\n\n let mut props: BTreeMap<String, Json> = BTreeMap::new();\n\n\n\n // Create x,y,a,b representations\n\n props.insert(\"x\".to_string(), p_name.to_string().to_lowercase().to_json());\n\n props.insert(\"y\".to_string(), SdItem::read_property(p_val));\n\n props.insert(\"a\".to_string(), p_name.to_string().to_json());\n\n props.insert(\"b\".to_string(), p_val.to_string().to_json());\n\n\n\n prop_array.push(props);\n\n } else {\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 19, "score": 210264.92761557086 }, { "content": " let sd = &sd_item.unwrap();\n\n assert!(sd.props.is_array());\n\n p_size += sd.props.as_array().unwrap().len();\n\n m_size += 1;\n\n }\n\n assert_eq!(2759, m_size);\n\n assert_eq!(8277, p_size);\n\n }\n\n}\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_parser.rs", "rank": 20, "score": 210251.13016443644 }, { "content": "class BingoPgExternalBitset;\n\n\n\n/*\n\n * Class for handling bingo postgres section\n\n * Section consists of:\n\n * section meta info (1 block) |\n\n * section removed bitset (1 block) |\n\n * bits count buffers (16 blocks) |\n\n * map buffers (64k / 500) |\n\n * fp buffers (fp count) |\n\n * binary buffers (dynamic)\n\n */\n", "file_path": "bingo/postgres/src/pg_common/bingo_pg_section.h", "rank": 21, "score": 190887.28569307213 }, { "content": "struct TestParamInfo {\n\n TestParamInfo(const ParamType& a_param, size_t an_index) :\n\n param(a_param),\n\n index(an_index) {}\n\n ParamType param;\n\n size_t index;\n\n};\n\n\n", "file_path": "third_party/googletest/include/gtest/internal/gtest-param-util.h", "rank": 22, "score": 184556.83314989618 }, { "content": " def connection(self):\n\n if not self._connection:\n\n self._connection = psycopg2.connect(**self._settings)\n", "file_path": "utils/indigo-service/service/v2/db/BingoPostgresAdapter.py", "rank": 23, "score": 181394.62511423038 }, { "content": " def user_all(self):\n\n try:\n\n cursor = self.connection.cursor()\n\n cursor.execute(\"select user_id, username, email from indigoservice.users\")\n\n result = []\n\n for item in cursor.fetchall():\n\n result.append({\n\n 'id': item[0],\n\n 'username': item[1],\n\n 'email': item[2],\n\n })\n\n return result\n\n finally:\n", "file_path": "utils/indigo-service/service/v2/db/BingoPostgresAdapter.py", "rank": 24, "score": 181372.42669504543 }, { "content": "struct IsRecursiveContainerImpl<C, true> {\n\n using value_type = decltype(*std::declval<typename C::const_iterator>());\n\n using type =\n\n std::is_same<typename std::remove_const<\n\n typename std::remove_reference<value_type>::type>::type,\n\n C>;\n\n};\n\n\n\n// IsRecursiveContainer<Type> is a unary compile-time predicate that\n\n// evaluates whether C is a recursive container type. A recursive container\n\n// type is a container type whose value_type is equal to the container type\n\n// itself. An example for a recursive container type is\n\n// boost::filesystem::path, whose iterator has a value_type that is equal to\n\n// boost::filesystem::path.\n\ntemplate <typename C>\n", "file_path": "third_party/googletest/include/gtest/internal/gtest-internal.h", "rank": 25, "score": 180685.60040408635 }, { "content": "def connect(self):\n", "file_path": "utils/indigo-service/service/v2/db/database.py", "rank": 26, "score": 179530.0941955044 }, { "content": "class User(Base):\n\n __tablename__ = 'users'\n\n user_id = Column(Integer, primary_key=True)\n\n username = Column(String(50))\n\n email = Column(String(100), unique=True)\n\n password = Column(String(100))\n\n foreign_auth_provider = Column(String(10))\n\n foreign_auth_id = Column(Integer)\n\n\n\n def __init__(self, params):\n\n self.username = params['username']\n\n self.email = params['email']\n\n self.foreign_auth_provider = params['foreign_auth_provider']\n\n self.foreign_auth_id = params['foreign_auth_id']\n\n self.set_password(params['password'])\n\n\n\n def __repr__(self):\n\n return '<User %r, id=%r>' % (self.username, self.user_id)\n\n\n\n def set_password(self, password):\n\n self.password = generate_password_hash(password)\n\n\n\n def check_password(self, password):\n", "file_path": "utils/indigo-service/service/v2/db/models.py", "rank": 27, "score": 179506.26508209942 }, { "content": "struct IndigoTautomerParams\n\n{\n\n int conditions;\n\n bool force_hydrogens;\n\n bool ring_chain;\n\n TautomerMethod method;\n\n};\n\n\n", "file_path": "api/c/indigo/src/indigo_match.h", "rank": 28, "score": 177922.880960507 }, { "content": "struct GenericStringStream;\n\n\n\ntypedef GenericStringStream<UTF8<char> > StringStream;\n\n\n\ntemplate <typename Encoding>\n", "file_path": "third_party/rapidjson/rapidjson/fwd.h", "rank": 29, "score": 177919.24943266346 }, { "content": "struct GenericStringRef {\n\n typedef CharType Ch; //!< character type of the string\n\n\n\n //! Create string reference from \\c const character array\n\n#ifndef __clang__ // -Wdocumentation\n\n /*!\n\n This constructor implicitly creates a constant string reference from\n\n a \\c const character array. It has better performance than\n\n \\ref StringRef(const CharType*) by inferring the string \\ref length\n\n from the array length, and also supports strings containing null\n\n characters.\n\n\n\n \\tparam N length of the string, automatically inferred\n\n\n\n \\param str Constant character array, lifetime assumed to be longer\n\n than the use of the string in e.g. a GenericValue\n\n\n\n \\post \\ref s == str\n\n\n\n \\note Constant complexity.\n", "file_path": "third_party/rapidjson/rapidjson/document.h", "rank": 30, "score": 177919.24943266346 }, { "content": "struct GenericStringStream {\n\n typedef typename Encoding::Ch Ch;\n\n\n\n GenericStringStream(const Ch *src) : src_(src), head_(src) {}\n\n\n\n Ch Peek() const { return *src_; }\n\n Ch Take() { return *src_++; }\n\n size_t Tell() const { return static_cast<size_t>(src_ - head_); }\n\n\n\n Ch* PutBegin() { RAPIDJSON_ASSERT(false); return 0; }\n\n void Put(Ch) { RAPIDJSON_ASSERT(false); }\n\n void Flush() { RAPIDJSON_ASSERT(false); }\n\n size_t PutEnd(Ch*) { RAPIDJSON_ASSERT(false); return 0; }\n\n\n\n const Ch* src_; //!< Current read position.\n\n const Ch* head_; //!< Original head of the string.\n\n};\n\n\n\ntemplate <typename Encoding>\n", "file_path": "third_party/rapidjson/rapidjson/stream.h", "rank": 31, "score": 177919.24943266346 }, { "content": "struct GenericStringRef;\n\n\n\ntemplate <typename Encoding, typename Allocator> \n", "file_path": "third_party/rapidjson/rapidjson/fwd.h", "rank": 32, "score": 177919.24943266346 }, { "content": "struct GenericInsituStringStream;\n\n\n\ntypedef GenericInsituStringStream<UTF8<char> > InsituStringStream;\n\n\n\n// stringbuffer.h\n\n\n\ntemplate <typename Encoding, typename Allocator>\n", "file_path": "third_party/rapidjson/rapidjson/fwd.h", "rank": 33, "score": 175408.34363344108 }, { "content": "struct GenericInsituStringStream {\n\n typedef typename Encoding::Ch Ch;\n\n\n\n GenericInsituStringStream(Ch *src) : src_(src), dst_(0), head_(src) {}\n\n\n\n // Read\n\n Ch Peek() { return *src_; }\n\n Ch Take() { return *src_++; }\n\n size_t Tell() { return static_cast<size_t>(src_ - head_); }\n\n\n\n // Write\n\n void Put(Ch c) { RAPIDJSON_ASSERT(dst_ != 0); *dst_++ = c; }\n\n\n\n Ch* PutBegin() { return dst_ = src_; }\n\n size_t PutEnd(Ch* begin) { return static_cast<size_t>(dst_ - begin); }\n\n void Flush() {}\n\n\n\n Ch* Push(size_t count) { Ch* begin = dst_; dst_ += count; return begin; }\n\n void Pop(size_t count) { dst_ -= count; }\n\n\n\n Ch* src_;\n\n Ch* dst_;\n\n Ch* head_;\n\n};\n\n\n\ntemplate <typename Encoding>\n", "file_path": "third_party/rapidjson/rapidjson/stream.h", "rank": 34, "score": 175408.34363344108 }, { "content": "\t unsigned int format:3;\n", "file_path": "third_party/cairo/src/drm/cairo-drm-intel-brw-structs.h", "rank": 35, "score": 175219.1399852928 }, { "content": "struct CheckParams\n\n{\n\n std::vector<StructureChecker::CheckTypeCode> check_types;\n\n std::vector<int> selected_atoms;\n\n std::vector<int> selected_bonds;\n\n};\n\n\n\nstatic CheckParams check_params_from_string(const std::string& params)\n\n{\n\n CheckParams r;\n\n if (!params.empty())\n\n {\n\n std::smatch sm1;\n\n std::unordered_set<int> ct;\n\n std::string s = params;\n\n std::regex rx1(R\"(\\b(\\w+)\\b)\", std::regex_constants::icase);\n\n while (std::regex_search(s, sm1, rx1))\n\n {\n\n auto code = StructureChecker::getCheckType(sm1[1]);\n\n if (code != StructureChecker::CheckTypeCode::CHECK_NONE)\n", "file_path": "core/indigo-core/molecule/src/structure_checker.cpp", "rank": 36, "score": 172991.5446067705 }, { "content": "struct ItemPointerData;\n", "file_path": "bingo/postgres/src/pg_common/bingo_pg_cursor.h", "rank": 37, "score": 170587.41251752182 }, { "content": "create or replace procedure ImportRDF (table_name in string, clob_col in string, other_cols in string, filename in string)\n\n AS language C name \"oraImportRDF\" library bingolib\n\n with context parameters(context,\n\n table_name, table_name indicator short,\n\n clob_col, clob_col indicator short,\n\n other_cols, other_cols indicator short,\n\n filename, filename indicator short);\n\n/\n\ngrant execute on ImportRDF to public;\n", "file_path": "bingo/oracle/sql/bingo/bingo_calls.sql", "rank": 38, "score": 169959.15686567192 }, { "content": "create or replace procedure ImportSDF (table_name in string, clob_col in string, other_cols in string, filename in string)\n\n AS language C name \"oraImportSDF\" library bingolib\n\n with context parameters(context,\n\n table_name, table_name indicator short,\n\n clob_col, clob_col indicator short,\n\n other_cols, other_cols indicator short,\n\n filename, filename indicator short);\n\n/\n\ngrant execute on ImportSDF to public;\n", "file_path": "bingo/oracle/sql/bingo/bingo_calls.sql", "rank": 39, "score": 169959.15686567192 }, { "content": "create or replace procedure ImportSMILES (table_name in string, smiles_col in string, id_col in string, filename in string)\n\n AS language C name \"oraImportSMILES\" library bingolib\n\n with context parameters(context,\n\n table_name, table_name indicator short,\n\n smiles_col, smiles_col indicator short,\n\n id_col, id_col indicator short,\n\n filename, filename indicator short);\n\n/\n\ngrant execute on ImportSMILES to public;\n", "file_path": "bingo/oracle/sql/bingo/bingo_calls.sql", "rank": 40, "score": 168633.97079354315 }, { "content": "struct DLLEXPORT ProductEnumeratorParams\n\n{\n\n ProductEnumeratorParams()\n\n {\n\n clear();\n\n }\n\n\n\n void clear()\n\n {\n\n is_multistep_reactions = false;\n\n is_one_tube = false;\n\n is_self_react = false;\n\n is_layout = true;\n\n transform_is_layout = true;\n\n max_deep_level = 2;\n\n max_product_count = 1000;\n\n }\n\n\n\n bool is_multistep_reactions;\n\n bool is_one_tube;\n\n bool is_self_react;\n\n bool is_layout;\n\n bool transform_is_layout;\n\n int max_deep_level;\n\n int max_product_count;\n\n};\n\n\n", "file_path": "api/c/indigo/src/indigo_internal.h", "rank": 41, "score": 167397.43448750512 }, { "content": "create index CONFIG_STR_N on CONFIG_STR(n);\n", "file_path": "bingo/oracle/sql/bingo/bingo_config.sql", "rank": 42, "score": 165453.68950525916 }, { "content": " class DLLEXPORT StringPool\n\n {\n\n public:\n\n DECL_ERROR;\n\n\n\n StringPool();\n\n ~StringPool();\n\n\n\n int add(const char* str);\n\n int add(Array<char>& str);\n\n int add(int size);\n\n void remove(int idx);\n\n int size() const;\n\n int begin() const;\n\n int end() const;\n\n int next(int i) const;\n\n void clear();\n\n\n\n char* at(int idx);\n\n const char* at(int idx) const;\n\n /*\n\n * Iterators\n\n */\n", "file_path": "core/indigo-core/common/base_cpp/string_pool.h", "rank": 51, "score": 161336.41387769784 }, { "content": "struct IsRecursiveContainer : public IsRecursiveContainerImpl<C>::type {};\n\n\n\n// Utilities for native arrays.\n\n\n\n// ArrayEq() compares two k-dimensional native arrays using the\n\n// elements' operator==, where k can be any integer >= 0. When k is\n\n// 0, ArrayEq() degenerates into comparing a single pair of values.\n\n\n\ntemplate <typename T, typename U>\n\nbool ArrayEq(const T* lhs, size_t size, const U* rhs);\n\n\n\n// This generic version is used when k is 0.\n\ntemplate <typename T, typename U>\n\ninline bool ArrayEq(const T& lhs, const U& rhs) { return lhs == rhs; }\n\n\n\n// This overload is used when k >= 1.\n\ntemplate <typename T, typename U, size_t N>\n\ninline bool ArrayEq(const T(&lhs)[N], const U(&rhs)[N]) {\n\n return internal::ArrayEq(lhs, N, rhs);\n\n}\n", "file_path": "third_party/googletest/include/gtest/internal/gtest-internal.h", "rank": 52, "score": 160910.8808495094 }, { "content": "struct StreamTraits<GenericStringStream<Encoding> > {\n\n enum { copyOptimization = 1 };\n\n};\n\n\n\n//! String stream with UTF8 encoding.\n\ntypedef GenericStringStream<UTF8<> > StringStream;\n\n\n\n///////////////////////////////////////////////////////////////////////////////\n\n// InsituStringStream\n\n\n\n//! A read-write string stream.\n\n/*! This string stream is particularly designed for in-situ parsing.\n\n \\note implements Stream concept\n\n*/\n\ntemplate <typename Encoding>\n", "file_path": "third_party/rapidjson/rapidjson/stream.h", "rank": 53, "score": 157231.45685647032 }, { "content": " private final String str;\n", "file_path": "utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/SqlBatchInserter.java", "rank": 54, "score": 156757.9008601719 }, { "content": "create or replace function ConfigGetString (context_id in binary_integer, key_name in string) return string\n\n AS language C name \"oraConfigGetString\" library bingolib\n\n with context parameters(context, context_id, key_name, key_name indicator short, return OCIString);\n\n/\n\n\n", "file_path": "bingo/oracle/sql/bingo/bingo_calls.sql", "rank": 55, "score": 156753.33048274665 }, { "content": " private static final ThreadLocal<JsonParser> parser = new ThreadLocal<>();\n", "file_path": "utils/indigo-service-uploader/java/src/main/java/com/epam/indigo/uploader/SqlBatchInserter.java", "rank": 56, "score": 156747.47243398897 }, { "content": " class IndigoSDFileIterator : public IndigoObject\n\n {\n\n private:\n\n IndigoMoleculeSPtr _current = nullptr;\n\n\n\n public:\n", "file_path": "api/cpp/src/IndigoSDFileIterator.h", "rank": 57, "score": 156177.93568589742 }, { "content": "create or replace procedure ConfigSetString (context_id in binary_integer, key_name in string, value in string)\n\n AS language C name \"oraConfigSetString\" library bingolib\n\n with context parameters(context, context_id, key_name, key_name indicator short, value, value indicator short);\n\n/\n\n\n", "file_path": "bingo/oracle/sql/bingo/bingo_calls.sql", "rank": 58, "score": 155607.97130807728 }, { "content": "struct StreamTraits<GenericInsituStringStream<Encoding> > {\n\n enum { copyOptimization = 1 };\n\n};\n\n\n\n//! Insitu string stream with UTF8 encoding.\n\ntypedef GenericInsituStringStream<UTF8<> > InsituStringStream;\n\n\n\nRAPIDJSON_NAMESPACE_END\n\n\n\n#endif // RAPIDJSON_STREAM_H_\n", "file_path": "third_party/rapidjson/rapidjson/stream.h", "rank": 59, "score": 154978.25332363974 }, { "content": "struct IsRecursiveContainerImpl<C, false> : public std::false_type {};\n\n\n\n// Since the IsRecursiveContainerImpl depends on the IsContainerTest we need to\n\n// obey the same inconsistencies as the IsContainerTest, namely check if\n\n// something is a container is relying on only const_iterator in C++11 and\n\n// is relying on both const_iterator and iterator otherwise\n\ntemplate <typename C>\n", "file_path": "third_party/googletest/include/gtest/internal/gtest-internal.h", "rank": 60, "score": 154412.65432356598 }, { "content": "-- Copyright (C) from 2009 to Present EPAM Systems.\n", "file_path": "bingo/oracle/sql/bingo/bingo_config.sql", "rank": 61, "score": 150597.05768188956 }, { "content": " class PoolAuto\n\n {\n\n public:\n\n PoolAuto(StringPool& owner) : _owner(owner)\n\n {\n\n }\n\n PoolIter begin()\n\n {\n\n return StringPool::PoolIter(_owner, _owner.begin());\n\n }\n\n PoolIter end()\n\n {\n\n return StringPool::PoolIter(_owner, _owner.end());\n\n }\n\n\n\n private:\n\n StringPool& _owner;\n\n };\n\n\n\n PoolAuto elements()\n", "file_path": "core/indigo-core/common/base_cpp/string_pool.h", "rank": 62, "score": 147500.01218692664 }, { "content": " class ImportColumn\n\n {\n\n public:\n\n ImportColumn()\n\n {\n\n }\n\n ~ImportColumn()\n\n {\n\n }\n\n\n\n Array<char> columnName;\n\n Oid type;\n\n\n\n private:\n\n ImportColumn(const ImportColumn&); // no implicit copy\n\n };\n\n\n", "file_path": "bingo/postgres/src/pg_am/pg_bingo_import.cpp", "rank": 63, "score": 147448.0814453504 }, { "content": " class ImportData\n\n {\n\n public:\n\n ImportData()\n\n {\n\n }\n\n virtual ~ImportData()\n\n {\n\n }\n\n\n\n virtual uintptr_t getDatum() = 0;\n\n virtual void convert(const char* str) = 0;\n\n\n\n private:\n\n ImportData(const ImportData&); // no implicit copy\n\n };\n", "file_path": "bingo/postgres/src/pg_am/pg_bingo_import.cpp", "rank": 64, "score": 147448.0814453504 }, { "content": " class ImportTextData : public ImportData\n\n {\n\n public:\n\n ImportTextData()\n\n {\n\n }\n\n ~ImportTextData() override\n\n {\n\n }\n\n\n\n BingoPgText data;\n\n\n\n uintptr_t getDatum() override\n\n {\n\n return data.getDatum();\n\n }\n\n void convert(const char* str) override\n\n {\n\n if (str != 0)\n\n data.initFromString(str);\n\n else\n\n data.clear();\n\n }\n\n\n\n private:\n\n ImportTextData(const ImportTextData&); // no implicit copy\n\n };\n\n\n", "file_path": "bingo/postgres/src/pg_am/pg_bingo_import.cpp", "rank": 65, "score": 147246.0954438796 }, { "content": " class ImportInt4Data : public ImportData\n\n {\n\n public:\n\n ImportInt4Data()\n\n {\n\n }\n\n ~ImportInt4Data() override\n\n {\n\n }\n\n std::unique_ptr<int32> data;\n\n\n\n void convert(const char* str) override\n\n {\n\n if (str == 0)\n\n data.reset(0);\n\n else\n\n {\n\n /*\n\n * Pg atoi workaround\n\n */\n", "file_path": "bingo/postgres/src/pg_am/pg_bingo_import.cpp", "rank": 66, "score": 147246.0954438796 }, { "content": " class ImportInt8Data : public ImportData\n\n {\n\n public:\n\n ImportInt8Data()\n\n {\n\n }\n\n ~ImportInt8Data() override\n\n {\n\n }\n\n std::unique_ptr<int64> data;\n\n\n\n void convert(const char* str) override\n\n {\n\n if (str == 0)\n\n data.reset(0);\n\n else\n\n {\n\n BINGO_PG_TRY\n\n {\n\n data.reset(new int64);\n", "file_path": "bingo/postgres/src/pg_am/pg_bingo_import.cpp", "rank": 67, "score": 147246.0954438796 }, { "content": "-- Copyright (C) from 2009 to Present EPAM Systems.\n", "file_path": "bingo/oracle/sql/bingo/bingo_config.sql", "rank": 68, "score": 146443.0602990217 }, { "content": "class BingoPgSection\n\n{\n\npublic:\n\n enum\n\n {\n\n SECTION_META_PAGES = 2,\n\n SECTION_BITSNUMBER_PAGES = 16,\n\n SECTION_BITS_PER_BLOCK = 4000 /* 4000 * sizeof(unsigned short) < 8K*/\n\n };\n\n BingoPgSection(BingoPgIndex& bingo_idx, int idx_strategy, int offset);\n\n ~BingoPgSection();\n\n\n\n void clear();\n\n\n\n /*\n\n * Returns true if section can be extended\n\n */\n\n bool isExtended();\n\n /*\n\n * Add a structure to current section\n", "file_path": "bingo/postgres/src/pg_common/bingo_pg_section.h", "rank": 69, "score": 145392.0870009856 }, { "content": "class BingoPgConfig\n\n{\n\npublic:\n\n BingoPgConfig();\n\n ~BingoPgConfig()\n\n {\n\n }\n\n\n\n void readDefaultConfig(const char* schema_name);\n\n void updateByIndexConfig(PG_OBJECT index);\n\n void replaceInsertParameter(uintptr_t name_datum, uintptr_t value_datum);\n\n void setUpBingoConfiguration();\n\n\n\n void serialize(indigo::Array<char>& config_data);\n\n void deserialize(void* data, int data_len);\n\n\n\n DECL_ERROR;\n\n\n\nprivate:\n\n BingoPgConfig(const BingoPgConfig&); // no implicit copy\n\n\n\n void _readTable(uintptr_t id, bool tau);\n\n int _getNumericValue(int c_idx);\n\n\n\n void _replaceInsertTauParameter(uintptr_t rule_datum, uintptr_t beg_datum, uintptr_t end_datum);\n\n void _toString(int value, indigo::Array<char>&);\n\n\n\n indigo::RedBlackStringObjMap<indigo::Array<char>> _rawConfig;\n\n indigo::RedBlackStringObjMap<indigo::Array<char>> _stringParams;\n\n\n", "file_path": "bingo/postgres/src/pg_core/bingo_pg_config.h", "rank": 70, "score": 145382.1900738619 }, { "content": "class BingoImportSmilesHandler : public BingoImportHandler\n\n{\n\npublic:\n\n BingoImportSmilesHandler(unsigned int func_id, const char* fname) : BingoImportHandler(func_id)\n\n {\n\n _parseColumns = false;\n\n setFunctionName(\"importSMILES\");\n\n bingo_res = bingoSMILESImportOpen(fname);\n\n CORE_HANDLE_ERROR(bingo_res, 1, \"importSmiles\", bingoGetError());\n\n }\n\n ~BingoImportSmilesHandler() override\n\n {\n\n bingo_res = bingoSMILESImportClose();\n\n CORE_HANDLE_WARNING(bingo_res, 0, \"importSmiles close\", bingoGetError());\n\n }\n\n\n\n bool hasNext() override\n\n {\n\n bingo_res = bingoSMILESImportEOF();\n\n CORE_HANDLE_ERROR(bingo_res, 0, \"importSmiles\", bingoGetError());\n", "file_path": "bingo/postgres/src/pg_am/pg_bingo_import.cpp", "rank": 71, "score": 143923.41080328432 }, { "content": "class BingoImportRdfHandler : public BingoImportHandler\n\n{\n\npublic:\n\n BingoImportRdfHandler(unsigned int func_id, const char* fname) : BingoImportHandler(func_id)\n\n {\n\n _parseColumns = true;\n\n setFunctionName(\"importRDF\");\n\n bingo_res = bingoRDFImportOpen(fname);\n\n CORE_HANDLE_ERROR(bingo_res, 1, \"importRDF\", bingoGetError());\n\n }\n\n ~BingoImportRdfHandler() override\n\n {\n\n bingo_res = bingoRDFImportClose();\n\n CORE_HANDLE_WARNING(bingo_res, 0, \"importRDF close\", bingoGetError());\n\n }\n\n\n\n bool hasNext() override\n\n {\n\n bingo_res = bingoRDFImportEOF();\n\n CORE_HANDLE_ERROR(bingo_res, 0, \"importRDF\", bingoGetError());\n", "file_path": "bingo/postgres/src/pg_am/pg_bingo_import.cpp", "rank": 72, "score": 143923.41080328432 }, { "content": "class BingoImportSdfHandler : public BingoImportHandler\n\n{\n\npublic:\n\n BingoImportSdfHandler(unsigned int func_id, const char* fname) : BingoImportHandler(func_id)\n\n {\n\n _parseColumns = true;\n\n setFunctionName(\"importSDF\");\n\n bingo_res = bingoSDFImportOpen(fname);\n\n CORE_HANDLE_ERROR(bingo_res, 1, \"importSDF\", bingoGetError());\n\n }\n\n ~BingoImportSdfHandler() override\n\n {\n\n bingo_res = bingoSDFImportClose();\n\n CORE_HANDLE_WARNING(bingo_res, 0, \"importSDF close\", bingoGetError());\n\n }\n\n\n\n bool hasNext() override\n\n {\n\n bingo_res = bingoSDFImportEOF();\n\n CORE_HANDLE_ERROR(bingo_res, 0, \"importSDF\", bingoGetError());\n", "file_path": "bingo/postgres/src/pg_am/pg_bingo_import.cpp", "rank": 73, "score": 143923.41080328432 }, { "content": "struct ConstRef<T&> { typedef T& type; };\n\n\n\n// The argument T must depend on some template parameters.\n\n#define GTEST_REFERENCE_TO_CONST_(T) \\\n\n typename ::testing::internal::ConstRef<T>::type\n\n\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n\n//\n\n// Use ImplicitCast_ as a safe version of static_cast for upcasting in\n\n// the type hierarchy (e.g. casting a Foo* to a SuperclassOfFoo* or a\n\n// const Foo*). When you use ImplicitCast_, the compiler checks that\n\n// the cast is safe. Such explicit ImplicitCast_s are necessary in\n\n// surprisingly many situations where C++ demands an exact type match\n\n// instead of an argument type convertable to a target type.\n\n//\n\n// The syntax for using ImplicitCast_ is the same as for static_cast:\n\n//\n\n// ImplicitCast_<ToType>(expr)\n\n//\n\n// ImplicitCast_ would have been part of the C++ standard library,\n", "file_path": "third_party/googletest/include/gtest/internal/gtest-port.h", "rank": 74, "score": 143874.56641874285 }, { "content": "struct ConstRef { typedef const T& type; };\n\ntemplate <typename T>\n", "file_path": "third_party/googletest/include/gtest/internal/gtest-port.h", "rank": 75, "score": 143874.56641874285 }, { "content": "use sd_parser::SdItem;\n\nuse postgres::Connection;\n\nuse postgres::types::{Type, ToSql};\n\n\n\nuse postgres_binary_copy::BinaryCopyReader;\n\n\n\nstatic MAX_CAPACITY: u64 = 1 << 23; // 8 MB\n\n\n\n/// Postgres Batch Uploader\n\n/// Uses a cache and binary copy\n\npub struct SdBatchUploader<'a> {\n\n conn: &'a Connection,\n\n buf: Vec<Box<ToSql>>,\n\n copy_stmt: String,\n\n buf_size: u64,\n\n}\n\n\n\nimpl<'a> SdBatchUploader<'a> {\n\n pub fn new(pg_conn: &'a Connection, table_name: &str) -> Result<SdBatchUploader<'a>, String> {\n\n Ok(SdBatchUploader {\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_batch_uploader.rs", "rank": 76, "score": 143526.24735597 }, { "content": " conn: pg_conn,\n\n buf: Vec::new(),\n\n copy_stmt: format!(\"COPY {} (m, p) FROM STDIN BINARY\", table_name),\n\n buf_size: 0,\n\n })\n\n }\n\n\n\n pub fn upload(&mut self, sd_item: SdItem) {\n\n self.buf_size += sd_item.mol.as_ref().len() as u64;\n\n // for (key, value) in sd_item.props.iter() {\n\n // self.buf_size += key.len() as u64;\n\n // self.buf_size += value.len() as u64;\n\n // }\n\n self.buf.push(sd_item.mol);\n\n self.buf.push(sd_item.props);\n\n if self.buf_size > MAX_CAPACITY {\n\n self.flush();\n\n }\n\n }\n\n\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_batch_uploader.rs", "rank": 77, "score": 143490.49237252245 }, { "content": " fn flush(&mut self) {\n\n if self.buf.len() == 0 {\n\n return;\n\n }\n\n {\n\n let types = &[Type::Bytea, Type::Jsonb];\n\n let data = self.buf.iter().map(|v| &**v);\n\n let mut reader = BinaryCopyReader::new(types, data);\n\n\n\n let stmt = self.conn.prepare(&self.copy_stmt).unwrap();\n\n stmt.copy_in(&[], &mut reader).unwrap();\n\n }\n\n\n\n self.buf.clear();\n\n self.buf_size = 0;\n\n }\n\n}\n\n\n\n\n\nimpl<'a> Drop for SdBatchUploader<'a> {\n\n fn drop(&mut self) {\n\n self.flush();\n\n }\n\n}\n", "file_path": "utils/indigo-service-uploader/rust/src/sd_import/sd_batch_uploader.rs", "rank": 78, "score": 143481.7165306446 }, { "content": " class PoolIter : public AutoIterator\n\n {\n\n public:\n\n PoolIter(StringPool& owner, int idx) : AutoIterator(idx), _owner(owner)\n\n {\n\n }\n\n PoolIter& operator++()\n\n {\n\n _idx = _owner.next(_idx);\n\n return *this;\n\n }\n\n\n\n private:\n\n StringPool& _owner;\n\n };\n", "file_path": "core/indigo-core/common/base_cpp/string_pool.h", "rank": 79, "score": 141471.96645252634 }, { "content": "class ThreadWithParamSupport : public ThreadWithParamBase {\n\n public:\n\n static HANDLE CreateThread(Runnable* runnable,\n\n Notification* thread_can_start) {\n\n ThreadMainParam* param = new ThreadMainParam(runnable, thread_can_start);\n\n DWORD thread_id;\n\n HANDLE thread_handle = ::CreateThread(\n\n nullptr, // Default security.\n\n 0, // Default stack size.\n\n &ThreadWithParamSupport::ThreadMain,\n\n param, // Parameter to ThreadMainStatic\n\n 0x0, // Default creation flags.\n\n &thread_id); // Need a valid pointer for the call to work under Win98.\n\n GTEST_CHECK_(thread_handle != nullptr)\n\n << \"CreateThread failed with error \" << ::GetLastError() << \".\";\n\n if (thread_handle == nullptr) {\n\n delete param;\n\n }\n\n return thread_handle;\n\n }\n", "file_path": "third_party/googletest/src/gtest-port.cc", "rank": 80, "score": 139578.00744135788 }, { "content": "class ThreadWithParam : public ThreadWithParamBase {\n\n public:\n\n typedef void UserThreadFunc(T);\n\n\n\n ThreadWithParam(UserThreadFunc* func, T param, Notification* thread_can_start)\n\n : ThreadWithParamBase(new RunnableImpl(func, param), thread_can_start) {\n\n }\n\n virtual ~ThreadWithParam() {}\n\n\n\n private:\n", "file_path": "third_party/googletest/include/gtest/internal/gtest-port.h", "rank": 81, "score": 139578.00744135788 }, { "content": " void increment();\n\n };\n\n\n\n private:\n\n IndigoSDFileIterator(int id, IndigoSessionPtr session);\n\n\n\n friend class IndigoSession;\n\n\n\n public:\n\n iterator begin();\n\n static iterator end();\n\n\n\n void next();\n\n\n\n bool valid() const;\n\n };\n\n}\n", "file_path": "api/cpp/src/IndigoSDFileIterator.h", "rank": 82, "score": 139429.40493044036 }, { "content": "#pragma once\n\n\n\n#include <memory>\n\n\n\n#include \"IndigoMolecule.h\"\n\n#include \"IndigoObject.h\"\n\n\n\nnamespace indigo_cpp\n\n{\n", "file_path": "api/cpp/src/IndigoSDFileIterator.h", "rank": 83, "score": 139417.7930880261 }, { "content": "create table CONFIG_STR (n int, name varchar2(100), value varchar2(4000));\n", "file_path": "bingo/oracle/sql/bingo/bingo_config.sql", "rank": 84, "score": 137775.2735148946 }, { "content": " function AAM (target in CLOB, params in VARCHAR2) return CLOB;\n", "file_path": "bingo/oracle/sql/bingo/ringo_package.sql", "rank": 85, "score": 137035.12107650476 }, { "content": " function AAM (target in BLOB, params in VARCHAR2) return CLOB;\n\n\n", "file_path": "bingo/oracle/sql/bingo/ringo_package.sql", "rank": 86, "score": 137035.12107650476 }, { "content": " function AAM (target in VARCHAR2, params in VARCHAR2) return CLOB;\n", "file_path": "bingo/oracle/sql/bingo/ringo_package.sql", "rank": 87, "score": 137035.12107650476 }, { "content": " function InChI (target in BLOB, options in VARCHAR2) return CLOB;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 88, "score": 137010.26162196833 }, { "content": " function Molfile (target in CLOB, options in VARCHAR2) return CLOB;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 89, "score": 137010.26162196833 }, { "content": " function InChI (target in CLOB, options in VARCHAR2) return CLOB;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 90, "score": 137010.26162196833 }, { "content": " function Molfile (target in BLOB, options in VARCHAR2) return CLOB;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 91, "score": 137010.26162196833 }, { "content": " function InChI (target in VARCHAR2, options in VARCHAR2) return CLOB;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 92, "score": 137010.26162196833 }, { "content": " function Fingerprint (target in CLOB, options in VARCHAR2) return BLOB;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 93, "score": 137010.26162196833 }, { "content": " function SMILES (target in BLOB, options in VARCHAR2) return VARCHAR2;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 94, "score": 137010.26162196833 }, { "content": " function Molfile (target in VARCHAR2, options in VARCHAR2) return CLOB;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 95, "score": 137010.26162196833 }, { "content": " function SMILES (target in CLOB, options in VARCHAR2) return VARCHAR2;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 96, "score": 137010.26162196833 }, { "content": " function Fingerprint (target in VARCHAR2, options in VARCHAR2) return BLOB;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 97, "score": 137010.26162196833 }, { "content": " function SMILES (target in VARCHAR2, options in VARCHAR2) return VARCHAR2;\n", "file_path": "bingo/oracle/sql/bingo/mango_package.sql", "rank": 98, "score": 137010.26162196833 }, { "content": "extern crate postgres;\n\nextern crate flate2;\n\nextern crate sd_import;\n\n\n\n\n\nuse postgres::types::FromSql;\n\nuse postgres::Connection;\n\nuse postgres::Result as PgResult;\n\nuse sd_import::SdImport;\n\n\n", "file_path": "utils/indigo-service-uploader/rust/tests/test_upload.rs", "rank": 99, "score": 59.36545669801532 } ]
Rust
app/gui/src/ide/integration/file_system.rs
enso-org/enso
2676aa50a3cc86a1c0673a3e60a553134e350b1b
use crate::prelude::*; use crate::controller::graph::NewNodeInfo; use crate::controller::upload::pick_non_colliding_name; use engine_protocol::language_server; use engine_protocol::language_server::ContentRoot; use engine_protocol::language_server::FileSystemObject; use enso_frp as frp; use ensogl_component::file_browser::model::Entry; use ensogl_component::file_browser::model::EntryType; use ensogl_component::file_browser::model::FolderContent; use ensogl_component::file_browser::model::FolderType; use json_rpc::error::RpcError; use std::iter::once; #[derive(Clone, Debug, Fail)] #[fail(display = "Invalid path received from File Browser Component: {}", path)] struct InvalidPath { path: String, } pub fn to_file_browser_path(path: &language_server::Path) -> std::path::PathBuf { let root_id_str = path.root_id.to_string(); let segments_str = path.segments.iter().map(AsRef::<str>::as_ref); once("/").chain(once(root_id_str.as_ref())).chain(segments_str).collect() } pub fn from_file_browser_path(path: &std::path::Path) -> FallibleResult<language_server::Path> { use std::path::Component::*; let mut iter = path.components(); match (iter.next(), iter.next()) { (Some(RootDir), Some(Normal(root_id))) => { let root_id = root_id.to_string_lossy().parse()?; Ok(language_server::Path::new(root_id, iter.map(|s| s.as_os_str().to_string_lossy()))) } _ => { let path = path.to_string_lossy().to_string(); Err(InvalidPath { path }.into()) } } } #[derive(Clone, Debug)] pub struct FileProvider { connection: Rc<language_server::Connection>, content_roots: Vec<Rc<ContentRoot>>, } impl FileProvider { pub fn new(project: &model::Project) -> Self { Self { connection: project.json_rpc(), content_roots: project.content_roots() } } } impl FolderContent for FileProvider { fn request_entries( &self, entries_loaded: frp::Any<Rc<Vec<Entry>>>, _error_occurred: frp::Any<ImString>, ) { let entries = self.content_roots.iter().filter_map(|root| { let ls_path = language_server::Path::new_root(root.id()); let path = to_file_browser_path(&ls_path); let (name, folder_type) = match &**root { language_server::ContentRoot::Project { .. } => Some(("Project".to_owned(), FolderType::Project)), language_server::ContentRoot::FileSystemRoot { path, .. } => Some((path.clone(), FolderType::Root)), language_server::ContentRoot::Home { .. } => Some(("Home".to_owned(), FolderType::Home)), language_server::ContentRoot::Library { .. } => None, /* We skip libraries, as */ language_server::ContentRoot::Custom { .. } => None, /* Custom content roots are * not used. */ }?; let type_ = EntryType::Folder { type_: folder_type, content: { let connection = self.connection.clone_ref(); DirectoryView::new_from_root(connection, root.clone_ref()).into() }, }; Some(Entry { type_, name, path }) }); entries_loaded.emit(Rc::new(entries.sorted().collect_vec())); } } #[derive(Clone, CloneRef, Debug)] pub struct DirectoryView { connection: Rc<language_server::Connection>, content_root: Rc<ContentRoot>, path: Rc<language_server::Path>, } impl DirectoryView { pub fn new_from_root( connection: Rc<language_server::Connection>, content_root: Rc<ContentRoot>, ) -> Self { let path = Rc::new(language_server::Path::new_root(content_root.id())); Self { connection, content_root, path } } pub fn sub_view(&self, sub_dir: impl Str) -> DirectoryView { DirectoryView { connection: self.connection.clone_ref(), content_root: self.content_root.clone_ref(), path: Rc::new(self.path.append_im(sub_dir)), } } pub async fn get_entries_list(&self) -> Result<Vec<Entry>, RpcError> { let response = self.connection.file_list(&self.path).await?; let entries = response.paths.into_iter().map(|fs_obj| match fs_obj { FileSystemObject::Directory { name, path } | FileSystemObject::DirectoryTruncated { name, path } | FileSystemObject::SymlinkLoop { name, path, .. } => { let path = to_file_browser_path(&path).join(&name); let sub = self.sub_view(&name); let type_ = EntryType::Folder { type_: FolderType::Standard, content: sub.into() }; Entry { type_, name, path } } FileSystemObject::File { name, path } | FileSystemObject::Other { name, path } => { let path = to_file_browser_path(&path).join(&name); let type_ = EntryType::File; Entry { type_, name, path } } }); Ok(entries.sorted().collect()) } } impl FolderContent for DirectoryView { fn request_entries( &self, entries_loaded: frp::Any<Rc<Vec<Entry>>>, error_occurred: frp::Any<ImString>, ) { let this = self.clone_ref(); executor::global::spawn(async move { match this.get_entries_list().await { Ok(entries) => entries_loaded.emit(Rc::new(entries)), Err(RpcError::RemoteError(error)) => error_occurred.emit(ImString::new(error.message)), Err(error) => error_occurred.emit(ImString::new(error.to_string())), } }); } } #[derive(Clone, Debug, Fail)] #[fail(display = "Invalid source file for copy/move operation: {}", path)] struct InvalidSourceFile { path: String, } pub fn create_node_from_file( project: &model::Project, graph: &controller::Graph, path: &std::path::Path, ) -> FallibleResult { let ls_path = from_file_browser_path(path)?; let path_segments = ls_path.segments.into_iter().join("/"); let content_root = project.content_root_by_id(ls_path.root_id)?; let path = match &*content_root { ContentRoot::Project { .. } => format!("Enso_Project.root/\"{}\"", path_segments), ContentRoot::FileSystemRoot { path, .. } => format!("\"{}/{}\"", path, path_segments), ContentRoot::Home { .. } => format!("File.home/\"{}\"", path_segments), ContentRoot::Library { namespace, name, .. } => format!("{}.{}.Enso_Project.root / \"{}\"", namespace, name, path_segments), ContentRoot::Custom { .. } => "Unsupported Content Root".to_owned(), }; let expression = format!("File.read {}", path); let node_info = NewNodeInfo::new_pushed_back(expression); graph.add_node(node_info)?; Ok(()) } #[allow(missing_docs)] #[derive(Copy, Clone, Debug)] pub enum FileOperation { Copy, Move, } impl Default for FileOperation { fn default() -> Self { Self::Copy } } impl FileOperation { pub fn verb(&self) -> &'static str { match self { Self::Copy => "copy", Self::Move => "move", } } } pub async fn do_file_operation( project: &model::Project, source: &std::path::Path, dest_dir: &std::path::Path, operation: FileOperation, ) -> FallibleResult { let json_rpc = project.json_rpc(); let ls_source = from_file_browser_path(source)?; let ls_dest = from_file_browser_path(dest_dir)?; let src_name = ls_source .file_name() .ok_or_else(|| InvalidSourceFile { path: source.to_string_lossy().to_string() })?; let dest_name = pick_non_colliding_name(&*json_rpc, &ls_dest, src_name).await?; let dest_full = ls_dest.append_im(dest_name); match operation { FileOperation::Copy => json_rpc.copy_file(&ls_source, &dest_full).await?, FileOperation::Move => json_rpc.move_file(&ls_source, &dest_full).await?, } Ok(()) }
use crate::prelude::*; use crate::controller::graph::NewNodeInfo; use crate::controller::upload::pick_non_colliding_name; use engine_protocol::language_server; use engine_protocol::language_server::ContentRoot; use engine_protocol::language_server::FileSystemObject; use enso_frp as frp; use ensogl_component::file_browser::model::Entry; use ensogl_component::file_browser::model::EntryType; use ensogl_component::file_browser::model::FolderContent; use ensogl_component::file_browser::model::FolderType; use json_rpc::error::RpcError; use std::iter::once; #[derive(Clone, Debug, Fail)] #[fail(display = "Invalid path received from File Browser Component: {}", path)] struct InvalidPath { path: String, } pub fn to_file_browser_path(path: &language_server::Path) -> std::path::PathBuf { let root_id_str = path.root_id.to_string(); let segments_str = path.segments.iter().map(AsRef::<str>::as_ref); once("/").chain(once(root_id_str.as_ref())).chain(segments_str).collect() } pub fn from_file_browser_path(path: &std::path::Path) -> FallibleResult<language_server::Path> { use std::path::Component::*; let mut iter = path.components(); match (iter.next(), iter.next()) { (Some(RootDir), Some(Normal(root_id))) => { let root_id = root_id.to_string_lossy().parse()?; Ok(language_server::Path::new(root_id, iter.map(|s| s.as_os_str().to_string_lossy()))) } _ => { let path = path.to_string_lossy().to_string(); Err(InvalidPath { path }.into()) } } } #[derive(Clone, Debug)] pub struct FileProvider { connection: Rc<language_server::Connection>, content_roots: Vec<Rc<ContentRoot>>, } impl FileProvider { pub fn new(project: &model::Project) -> Self { Self { connection: project.json_rpc(), content_roots: project.content_roots() } } } impl FolderContent for FileProvider { fn request_entries( &self, entries_loaded: frp::Any<Rc<Vec<Entry>>>, _error_occurred: frp::Any<ImString>, ) { let entries = self.content_roots.iter().filter_map(|root| { let ls_path = language_server::Path::new_root(root.id()); let path = to_file_browser_path(&ls_path); let (name, folder_type) = match &**root { language_server::ContentRoot::Project { .. } => Some(("Project".to_owned(), FolderType::Project)), language_server::ContentRoot::FileSystemRoot { path, .. } => Some((path.clone(), FolderType::Root)), language_server::ContentRoot::Home { .. } => Some(("Home".to_owned(), FolderType::Home)), language_server::ContentRoot::Library { .. } => None, /* We skip libraries, as */ language_server::ContentRoot::Custom { .. } => None, /* Custom content roots are * not used. */ }?; let type_ = EntryType::Folder { type_: folder_type, content: { let connection = self.connection.clone_ref(); DirectoryView::new_from_root(connection, root.clone_ref()).into() }, }; Some(Entry { type_, name, path }) }); entries_loaded.emit(Rc::new(entries.sorted().collect_vec())); } } #[derive(Clone, CloneRef, Debug)] pub struct DirectoryView { connection: Rc<language_server::Connection>, content_root: Rc<ContentRoot>, path: Rc<language_server::Path>, } impl DirectoryView { pub fn new_from_root( connection: Rc<language_server::Connection>, content_root: Rc<ContentRoot>, ) -> Self { let path = Rc::new(language_server::Path::new_root(content_root.id())); Self { connection, content_root, path } }
pub async fn get_entries_list(&self) -> Result<Vec<Entry>, RpcError> { let response = self.connection.file_list(&self.path).await?; let entries = response.paths.into_iter().map(|fs_obj| match fs_obj { FileSystemObject::Directory { name, path } | FileSystemObject::DirectoryTruncated { name, path } | FileSystemObject::SymlinkLoop { name, path, .. } => { let path = to_file_browser_path(&path).join(&name); let sub = self.sub_view(&name); let type_ = EntryType::Folder { type_: FolderType::Standard, content: sub.into() }; Entry { type_, name, path } } FileSystemObject::File { name, path } | FileSystemObject::Other { name, path } => { let path = to_file_browser_path(&path).join(&name); let type_ = EntryType::File; Entry { type_, name, path } } }); Ok(entries.sorted().collect()) } } impl FolderContent for DirectoryView { fn request_entries( &self, entries_loaded: frp::Any<Rc<Vec<Entry>>>, error_occurred: frp::Any<ImString>, ) { let this = self.clone_ref(); executor::global::spawn(async move { match this.get_entries_list().await { Ok(entries) => entries_loaded.emit(Rc::new(entries)), Err(RpcError::RemoteError(error)) => error_occurred.emit(ImString::new(error.message)), Err(error) => error_occurred.emit(ImString::new(error.to_string())), } }); } } #[derive(Clone, Debug, Fail)] #[fail(display = "Invalid source file for copy/move operation: {}", path)] struct InvalidSourceFile { path: String, } pub fn create_node_from_file( project: &model::Project, graph: &controller::Graph, path: &std::path::Path, ) -> FallibleResult { let ls_path = from_file_browser_path(path)?; let path_segments = ls_path.segments.into_iter().join("/"); let content_root = project.content_root_by_id(ls_path.root_id)?; let path = match &*content_root { ContentRoot::Project { .. } => format!("Enso_Project.root/\"{}\"", path_segments), ContentRoot::FileSystemRoot { path, .. } => format!("\"{}/{}\"", path, path_segments), ContentRoot::Home { .. } => format!("File.home/\"{}\"", path_segments), ContentRoot::Library { namespace, name, .. } => format!("{}.{}.Enso_Project.root / \"{}\"", namespace, name, path_segments), ContentRoot::Custom { .. } => "Unsupported Content Root".to_owned(), }; let expression = format!("File.read {}", path); let node_info = NewNodeInfo::new_pushed_back(expression); graph.add_node(node_info)?; Ok(()) } #[allow(missing_docs)] #[derive(Copy, Clone, Debug)] pub enum FileOperation { Copy, Move, } impl Default for FileOperation { fn default() -> Self { Self::Copy } } impl FileOperation { pub fn verb(&self) -> &'static str { match self { Self::Copy => "copy", Self::Move => "move", } } } pub async fn do_file_operation( project: &model::Project, source: &std::path::Path, dest_dir: &std::path::Path, operation: FileOperation, ) -> FallibleResult { let json_rpc = project.json_rpc(); let ls_source = from_file_browser_path(source)?; let ls_dest = from_file_browser_path(dest_dir)?; let src_name = ls_source .file_name() .ok_or_else(|| InvalidSourceFile { path: source.to_string_lossy().to_string() })?; let dest_name = pick_non_colliding_name(&*json_rpc, &ls_dest, src_name).await?; let dest_full = ls_dest.append_im(dest_name); match operation { FileOperation::Copy => json_rpc.copy_file(&ls_source, &dest_full).await?, FileOperation::Move => json_rpc.move_file(&ls_source, &dest_full).await?, } Ok(()) }
pub fn sub_view(&self, sub_dir: impl Str) -> DirectoryView { DirectoryView { connection: self.connection.clone_ref(), content_root: self.content_root.clone_ref(), path: Rc::new(self.path.append_im(sub_dir)), } }
function_block-full_function
[ { "content": "/// Return FRP endpoints for the parameters that define a shadow.\n\npub fn frp_from_style(style: &StyleWatchFrp, path: impl Into<style::Path>) -> ParametersFrp {\n\n let path: style::Path = path.into();\n\n ParametersFrp {\n\n base_color: style.get_color(&path),\n\n fading: style.get_color(&path.sub(\"fading\")),\n\n size: style.get_number(&path.sub(\"size\")),\n\n spread: style.get_number(&path.sub(\"spread\")),\n\n exponent: style.get_number(&path.sub(\"exponent\")),\n\n offset_x: style.get_number(&path.sub(\"offset_x\")),\n\n offset_y: style.get_number(&path.sub(\"offset_y\")),\n\n }\n\n}\n", "file_path": "lib/rust/ensogl/component/shadow/src/lib.rs", "rank": 0, "score": 463854.3111939685 }, { "content": "/// Values implementing this trait describe the content of folders. They can be seen as a lazy,\n\n/// asynchronous wrapper around a `Vec` of file system entries.\n\npub trait FolderContent: Debug {\n\n /// Request the list of entries inside the folder. When the list is ready, it is emitted at\n\n /// `entries_loaded`. If an error occurs then the error message is emitted at `error_occurred`.\n\n fn request_entries(\n\n &self,\n\n entries_loaded: frp::Any<Rc<Vec<Entry>>>,\n\n error_occurred: frp::Any<ImString>,\n\n );\n\n}\n\n\n\n/// A wrapper around `Rc<dyn FolderContent>`. Necessary to implement the `Default` trait on this\n\n/// type, which we need to pass it through FRP networks.\n\n#[derive(Debug, Clone)]\n\npub struct AnyFolderContent(Rc<dyn FolderContent>);\n\n\n\nimpl Deref for AnyFolderContent {\n\n type Target = dyn FolderContent;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n self.0.deref()\n", "file_path": "lib/rust/ensogl/component/file-browser/src/model.rs", "rank": 1, "score": 435325.48252340546 }, { "content": " def build(projectName: String, root: Path, file: Path): Option[String] = {\n\n getModuleSegments(root, file).map { modules =>\n\n toModule(projectName +: modules :+ getModuleName(file))\n\n }\n\n }\n\n\n\n /** Extract segments related to the module from the file path.\n\n *\n\n * @param root the project root directory\n\n * @param file the module file path\n\n * @return the list of module segments\n\n */\n\n private def getModuleSegments(\n\n root: Path,\n\n file: Path\n\n ): Option[Vector[String]] = {\n\n Try(root.relativize(file)).toOption\n\n .map { relativePath =>\n\n val b = Vector.newBuilder[String]\n\n 1.until(relativePath.getNameCount - 1)\n", "file_path": "engine/language-server/src/main/scala/org/enso/languageserver/search/ModuleNameBuilder.scala", "rank": 2, "score": 432584.6749583563 }, { "content": " def relativePath(file: Path): String = projectRoot.relativize(file).toString\n", "file_path": "lib/scala/library-manager/src/main/scala/org/enso/libraryupload/LibraryUploader.scala", "rank": 3, "score": 422992.9224742293 }, { "content": "/// Split qualified name into segments, like `\"Int.add\"` into `[\"Int\",\"add\"]`.\n\npub fn name_segments(name: &str) -> impl Iterator<Item = &str> {\n\n name.split(predefined::ACCESS)\n\n}\n\n\n\n\n\n\n\n// ===========================\n\n// === Chain-related types ===\n\n// ===========================\n\n\n\n/// A structure which keeps argument's AST with information about offset between it and an operator.\n\n/// We cannot use `Shifted` because `Shifted` assumes that offset is always before ast it contains,\n\n/// what is not a case here.\n\n#[allow(missing_docs)]\n\n#[derive(Clone, Debug)]\n\npub struct ArgWithOffset<T> {\n\n pub arg: T,\n\n pub offset: usize,\n\n}\n\n\n\n/// Infix operator operand. Optional, as we deal with Section* nodes as well.\n\npub type Operand = Option<ArgWithOffset<Ast>>;\n\n\n\n/// Infix operator standing between (optional) operands.\n\npub type Operator = known::Opr;\n\n\n", "file_path": "app/gui/language/ast/impl/src/opr.rs", "rank": 4, "score": 420062.47826380504 }, { "content": "/// Checks if a given `Path` consists of a single identifier same as given string.\n\npub fn path_matching_ident(path: &syn::Path, str: impl Str) -> bool {\n\n path.get_ident().map_or(false, |ident| ident == str.as_ref())\n\n}\n\n\n\n\n\n\n\n// ======================\n\n// === Index Sequence ===\n\n// ======================\n\n\n", "file_path": "lib/rust/macro-utils/src/lib.rs", "rank": 7, "score": 415168.5578841974 }, { "content": " def file(name: String): File = new File(testContentRoot.file, name)\n\n\n\n \"text operations\" should {\n\n\n\n \"notify runtime about operations with files\" taggedAs Flaky in {\n\n // Interaction:\n\n // 1. Client 1 creates a file.\n\n // 2. Client 1 opens the file.\n\n // 3. Client 1 receives confirmation.\n\n // 4. Runtime receives open notification.\n\n // 5. Client 2 opens the same file.\n\n // 6. Client 2 receives confirmation\n\n // 7. Runtime receives no notifications.\n\n // 8. Client 1 edits the file.\n\n // 9. Client 2 receives a notification.\n\n // 10. Runtime receives edit notification.\n\n // 11. Client 1 saves the file.\n\n // 12. Runtime receives no notifications.\n\n // 13. Client 1 closes the file.\n\n // 14. Runtime receives no notifications.\n", "file_path": "engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/FileNotificationsTest.scala", "rank": 8, "score": 414744.3141303411 }, { "content": "#[allow(non_snake_case)]\n\npub fn Icon(name: impl Into<ImString>) -> Icon {\n\n let name = name.into();\n\n Icon { name }\n\n}\n\n\n\n\n\n\n\n// ===========\n\n// === FRP ===\n\n// ===========\n\n\n\n/// A type representing path to entry in some column.\n\npub type EntryPath<Id> = Rc<Vec<Id>>;\n\n\n\nensogl::define_endpoints! { <Id:(Debug+Clone+'static)>\n\n Input {\n\n reset(),\n\n directory_content (EntryPath<Id>,Entry),\n\n set_highlight (EntryPath<Id>),\n\n }\n", "file_path": "app/gui/view/src/searcher/new.rs", "rank": 9, "score": 409759.19763538206 }, { "content": "/// Loads shadow parameters from the given style, at the given path. The structure of the style\n\n/// definition should be analogous to that at `ensogl_hardcoded_theme::shadow`.\n\npub fn parameters_from_style_path(style: &StyleWatch, path: impl Into<style::Path>) -> Parameters {\n\n let path: style::Path = path.into();\n\n Parameters {\n\n base_color: style.get_color(&path),\n\n fading: style.get_color(&path.sub(\"fading\")),\n\n size: style.get_number(&path.sub(\"size\")),\n\n spread: style.get_number(&path.sub(\"spread\")),\n\n exponent: style.get_number(&path.sub(\"exponent\")),\n\n offset_x: style.get_number(&path.sub(\"offset_x\")),\n\n offset_y: style.get_number(&path.sub(\"offset_y\")),\n\n }\n\n}\n\n\n", "file_path": "lib/rust/ensogl/component/shadow/src/lib.rs", "rank": 10, "score": 400120.71521880804 }, { "content": "/// Returns the path to package.yaml file for given project.\n\npub fn package_yaml_path(project_name: &str) -> String {\n\n match platform::current() {\n\n Some(Platform::Linux) | Some(Platform::MacOS) =>\n\n format!(\"~/enso/projects/{}/package.yaml\", project_name),\n\n Some(Platform::Windows) =>\n\n format!(\"%userprofile%\\\\enso\\\\projects\\\\{}\\\\package.yaml\", project_name),\n\n _ => format!(\"<path-to-enso-projects>/{}/package.yaml\", project_name),\n\n }\n\n}\n\n\n\n\n\n// ==============\n\n// === Handle ===\n\n// ==============\n\n\n\n// === SetupResult ===\n\n\n\n/// The result of initial project setup, containing handy controllers to be used in the initial\n\n/// view.\n\n#[derive(Clone, CloneRef, Debug)]\n", "file_path": "app/gui/src/controller/project.rs", "rank": 11, "score": 393682.00464612956 }, { "content": "/// Read the text from the clipboard. Please note that:\n\n/// - It uses the [Clipboard API](https://developer.mozilla.org/en-US/docs/Web/API/Clipboard_API)\n\n/// under the hood.\n\n/// - This is an asynchronous function. The callback with the text will be called when the text will\n\n/// be ready. The delay may be caused for example by waiting for permissions from the user.\n\n/// - This will probably display a permission prompt to the user for the first time it is used.\n\n/// - The website has to be served over HTTPS for this function to work correctly.\n\n/// - This function needs to be called from within user-initiated event callbacks, like mouse or key\n\n/// press. Otherwise it will not work.\n\n///\n\n/// Moreover, this function works in a very strange way in Firefox.\n\n/// [Firefox only supports reading the clipboard in browser extensions](https://developer.mozilla.org/en-US/docs/Web/API/Clipboard/readText).\n\n/// In such case this function fallbacks to the `paste` event. Whenever it is triggered, it\n\n/// remembers its value and passes it to the callback. This means, that in Firefox this function\n\n/// will work correctly only when called as a direct action to the `cmd + v` shortcut.\n\n///\n\n/// To learn more, see this [StackOverflow question](https://stackoverflow.com/questions/400212/how-do-i-copy-to-the-clipboard-in-javascript).\n\npub fn read_text(callback: impl Fn(String) + 'static) {\n\n let handler: Rc<RefCell<Option<ReadTextClosure>>> = default();\n\n let handler_clone = handler.clone_ref();\n\n let closure = Closure::wrap(Box::new(move |result| {\n\n *handler_clone.borrow_mut() = None;\n\n callback(result);\n\n }) as Box<dyn Fn(String)>);\n\n *handler.borrow_mut() = Some(closure);\n\n readText(handler.borrow().as_ref().unwrap());\n\n}\n", "file_path": "lib/rust/web/src/clipboard.rs", "rank": 12, "score": 383906.9154696482 }, { "content": " def toFileInsideThisDirectory(rootPath: File, fileName: String): File = {\n\n val parentDir = toFile(rootPath)\n\n new File(parentDir, fileName)\n\n }\n\n}\n\n\n", "file_path": "engine/language-server/src/main/scala/org/enso/languageserver/filemanager/Path.scala", "rank": 13, "score": 380030.16663055227 }, { "content": "/// Write the provided text to the clipboard. Please note that:\n\n/// - It uses the [Clipboard API](https://developer.mozilla.org/en-US/docs/Web/API/Clipboard_API)\n\n/// under the hood.\n\n/// - This is an asynchronous function. The results will not appear in the clipboard immediately.\n\n/// The delay may be caused for example by waiting for permission from the user.\n\n/// - This will probably display a permission prompt to the user for the first time it is used.\n\n/// - The website has to be served over HTTPS for this function to work correctly.\n\n/// - This function needs to be called from within user-initiated event callbacks, like mouse or key\n\n/// press. Otherwise it will not work.\n\n///\n\n/// Moreover, in case something fails, this function implements a fallback mechanism which tries\n\n/// to create a hidden text field, fill it with the text and use the obsolete\n\n/// [Document.execCommand](https://developer.mozilla.org/en-US/docs/Web/API/Document/execCommand)\n\n/// function.\n\n///\n\n/// To learn more, see this [StackOverflow question](https://stackoverflow.com/questions/400212/how-do-i-copy-to-the-clipboard-in-javascript).\n\npub fn write_text(text: impl Into<String>) {\n\n let text = text.into();\n\n writeText(text)\n\n}\n\n\n", "file_path": "lib/rust/web/src/clipboard.rs", "rank": 14, "score": 370404.1190578899 }, { "content": "/// Converts given `JsValue` into a `String`. Uses JS's `String` function,\n\n/// see: https://www.w3schools.com/jsref/jsref_string.asp\n\npub fn js_to_string(s: impl AsRef<JsValue>) -> String {\n\n js_to_string_inner(s.as_ref())\n\n}\n\n\n\n\n\n\n\n// =============\n\n// === Utils ===\n\n// =============\n\n\n\n/// Handle returned from `ignore_context_menu`. It unignores when the handle is dropped.\n\n#[derive(Debug)]\n\npub struct IgnoreContextMenuHandle {\n\n target: EventTarget,\n\n closure: Closure<dyn FnMut(MouseEvent)>,\n\n}\n\n\n\nimpl Drop for IgnoreContextMenuHandle {\n\n fn drop(&mut self) {\n\n let callback: &Function = self.closure.as_ref().unchecked_ref();\n\n self.target.remove_event_listener_with_callback(\"contextmenu\", callback).ok();\n\n }\n\n}\n\n\n", "file_path": "lib/rust/web/src/lib.rs", "rank": 15, "score": 368433.34214476973 }, { "content": " def findFreeName(root: File, name: String, counter: Int = 0): File = {\n\n val actualName = if (counter > 0) s\"$name.$counter\" else name\n\n val file = root / actualName\n\n if (Files.exists(file.toPath)) findFreeName(root, name, counter + 1)\n\n else file\n\n }\n\n\n\n /** Writes attached files to the given directory.\n\n */\n", "file_path": "project/src/main/scala/licenses/report/PackageNotices.scala", "rank": 16, "score": 367119.867054877 }, { "content": "/// Generate module with constants read from given configuration file in YAML format.\n\n///\n\n/// For examples, see the [`crate`] documentation.\n\n///\n\n/// # Panics\n\n///\n\n/// The function may panic when:\n\n/// - `CARGO_MANIFEST_DIR` or `OUT_DIR` env variable is missing.\n\n/// - The provided config file is not in the YAML format.\n\npub fn generate_config_module_from_yaml(config_path: impl AsRef<std::path::Path>) {\n\n let f = std::fs::File::open(config_path.as_ref()).unwrap();\n\n let value: Value = serde_yaml::from_reader(f).unwrap();\n\n let manifest_dir = std::env::var(\"CARGO_MANIFEST_DIR\")\n\n .expect(\"missing environment variable CARGO_MANIFEST_DIR:\");\n\n\n\n let indent = \" \".repeat(4);\n\n let mut def = \"\".to_string();\n\n let mut inst = \"\".to_string();\n\n let mut vars = \"\".to_string();\n\n match value {\n\n Value::Mapping(mapping) =>\n\n for (key, value) in mapping {\n\n let key = key.as_str().unwrap().to_snake_case();\n\n let value = value.as_str().unwrap();\n\n def.push_str(&format!(\"{}pub {}: &'static str,\\n\", indent, key));\n\n inst.push_str(&format!(\"{}{}: \\\"{}\\\",\\n\", indent, key, value));\n\n vars.push_str(&format!(\n\n \"#[allow(non_upper_case_globals)]\\npub const {}: &str = \\\"{}\\\";\\n\",\n\n key, value\n", "file_path": "lib/rust/config-reader/src/lib.rs", "rank": 17, "score": 366100.83715629485 }, { "content": "/// Checks if the given AST has Opr shape with the name matching given string.\n\npub fn is_opr_named(ast: &Ast, name: impl Str) -> bool {\n\n let opr_opt = known::Opr::try_from(ast).ok();\n\n opr_opt.contains_if(|opr| opr.name == name.as_ref())\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/opr.rs", "rank": 18, "score": 365817.64428115106 }, { "content": "/// Convert the syntax tree into a string.\n\npub fn show_code(tokens: &impl ToTokens) -> String {\n\n repr(tokens)\n\n}\n", "file_path": "lib/rust/parser/flexer/src/generate.rs", "rank": 19, "score": 361992.4571258183 }, { "content": "/// Visits each Ast node, while keeping track of its span.\n\npub fn traverse_with_span(ast: &impl HasTokens, mut f: impl FnMut(enso_text::Range<Chars>, &Ast)) {\n\n traverse_with_offset(ast, move |offset, ast| {\n\n f(enso_text::Range::new(offset, offset + ast.char_count()), ast)\n\n })\n\n}\n\n\n\n// === WithLength ===\n\n\n\n/// Stores a value of type `T` and information about its length.\n\n///\n\n/// Even if `T` is `Spanned`, keeping `length` variable is desired for performance\n\n/// purposes.\n\n#[derive(Eq, PartialEq, Debug, Shrinkwrap, Serialize, Deserialize)]\n\n#[shrinkwrap(mutable)]\n\npub struct WithLength<T> {\n\n #[shrinkwrap(main_field)]\n\n #[serde(flatten)]\n\n pub wrapped: T,\n\n pub length: Chars,\n\n}\n", "file_path": "app/gui/language/ast/impl/src/lib.rs", "rank": 20, "score": 357623.81508572923 }, { "content": "/// Converts path to an absolute form.\n\npub fn absolute_path(path: impl PathRef) -> std::io::Result<path::PathBuf> {\n\n use path_clean::PathClean;\n\n let path = path.as_ref();\n\n if path.is_absolute() {\n\n Ok(path.to_path_buf().clean())\n\n } else {\n\n Ok(std::env::current_dir()?.join(path).clean())\n\n }\n\n}\n\n\n", "file_path": "lib/rust/build-utils/src/lib.rs", "rank": 21, "score": 356997.57884956786 }, { "content": " def writeTextFile(path: Path, content: String): Unit = {\n\n Using(new PrintWriter(path.toFile)) { writer => writer.write(content) }.get\n\n }\n\n\n\n /** Copies a directory recursively.\n\n */\n", "file_path": "lib/scala/distribution-manager/src/main/scala/org/enso/distribution/FileSystem.scala", "rank": 23, "score": 353081.968620397 }, { "content": " def resolveLibraryPath(root: Path, libraryName: LibraryName): Path =\n\n root / libraryName.namespace / libraryName.name\n\n}\n", "file_path": "lib/scala/library-manager/src/main/scala/org/enso/librarymanager/local/LocalLibraryProvider.scala", "rank": 24, "score": 352066.0839222488 }, { "content": "/// Converts `IntoCrumbs` value into a `Crumb`-yielding iterator.\n\npub fn iter_crumbs(crumbs: impl IntoCrumbs) -> impl Iterator<Item = Crumb> {\n\n crumbs.into_iter().map(|crumb| crumb.into())\n\n}\n\n\n\n/// Sequence of `Crumb`s describing traversal path through AST.\n\npub type Crumbs = Vec<Crumb>;\n\n\n\n/// Helper macro. Behaves like `vec!` but converts each element into `Crumb`.\n\n#[macro_export]\n\nmacro_rules! crumbs {\n\n ( ) => {\n\n Vec::<$crate::crumbs::Crumb>::new()\n\n };\n\n ( $( $x:expr ),* ) => {\n\n vec![$($crate::crumbs::Crumb::from($x)),*]\n\n };\n\n}\n\n\n\n/// Crumb identifies location of child AST in an AST node. Allows for a single step AST traversal.\n\n/// The enum variants are paired with Shape variants. For example, `ModuleCrumb` allows obtaining\n", "file_path": "app/gui/language/ast/impl/src/crumbs.rs", "rank": 25, "score": 351028.3824065233 }, { "content": " def lockPath(locksRoot: Path, resourceName: String): Path =\n\n locksRoot.resolve(resourceName + \".lock\")\n\n}\n", "file_path": "lib/scala/distribution-manager/src/main/scala/org/enso/distribution/locking/FileLockManager.scala", "rank": 26, "score": 350804.1487544917 }, { "content": " def rootComponentsNames: Seq[String] = sbtComponents.map(_.name)\n\n}\n", "file_path": "project/src/main/scala/licenses/DistributionDescription.scala", "rank": 27, "score": 349837.0933898635 }, { "content": "/// Convert a string to a path.\n\npub fn str_to_path(str: impl Str) -> Result<Path, GenError> {\n\n parse_str(str.as_ref()).map_err(|_| GenError::BadPath(str.into()))\n\n}\n\n\n", "file_path": "lib/rust/parser/flexer/src/generate.rs", "rank": 28, "score": 349491.3523536884 }, { "content": "/// Suggests a variable name for storing results of the given expression.\n\n///\n\n/// Name will try to express result of an infix operation (`sum` for `a+b`), kind of literal\n\n/// (`number` for `5`) and target function name for prefix chain.\n\n///\n\n/// The generated name is not unique and might collide with already present identifiers.\n\npub fn name_for_ast(ast: &Ast) -> String {\n\n use ast::*;\n\n match ast.shape() {\n\n Shape::Var(ident) => ident.name.clone(),\n\n Shape::Cons(ident) => ident.name.to_lowercase(),\n\n Shape::Number(_) => \"number\".into(),\n\n Shape::DanglingBase(_) => \"number\".into(),\n\n Shape::TextLineRaw(_) => \"text\".into(),\n\n Shape::TextLineFmt(_) => \"text\".into(),\n\n Shape::TextBlockRaw(_) => \"text\".into(),\n\n Shape::TextBlockFmt(_) => \"text\".into(),\n\n Shape::TextUnclosed(_) => \"text\".into(),\n\n Shape::Opr(opr) => match opr.name.as_ref() {\n\n \"+\" => \"sum\",\n\n \"*\" => \"product\",\n\n \"-\" => \"difference\",\n\n \"/\" => \"quotient\",\n\n _ => \"operator\",\n\n }\n\n .into(),\n", "file_path": "app/gui/src/controller/graph.rs", "rank": 29, "score": 344978.98897719954 }, { "content": "/// Visits each Ast node, while keeping track of its index.\n\npub fn traverse_with_offset(ast: &impl HasTokens, f: impl FnMut(Chars, &Ast)) {\n\n let mut traverser = TraverserWithOffset::new(f);\n\n ast.feed_to(&mut traverser);\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/lib.rs", "rank": 30, "score": 342769.8057014407 }, { "content": "/// Check if the given macro match node is an import declaration.\n\npub fn is_match_import(ast: &known::Match) -> bool {\n\n let segment = &ast.segs.head;\n\n let keyword = crate::identifier::name(&segment.head);\n\n if keyword.contains_if(|str| *str == UNQUALIFIED_IMPORT_KEYWORD) {\n\n let second_segment = &ast.segs.tail.first();\n\n match second_segment {\n\n Some(seg) => {\n\n let keyword_2 = crate::identifier::name(&seg.head);\n\n if keyword_2.contains_if(|str| *str == QUALIFIED_IMPORT_KEYWORD) {\n\n return true;\n\n }\n\n }\n\n None => return false,\n\n }\n\n }\n\n keyword.contains_if(|str| *str == QUALIFIED_IMPORT_KEYWORD)\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/macros.rs", "rank": 31, "score": 340851.9949198642 }, { "content": "/// Fast-check if the pattern matches text.\n\n///\n\n/// This is faster way than calling `score_match(text,pattern,metric).is_some()`, therefore it's\n\n/// recommended to call this function before scoring when we are not sure if the pattern actually\n\n/// matches the text.\n\npub fn matches(text: impl Str, pattern: impl Str) -> bool {\n\n let mut pattern_chars = pattern.as_ref().chars();\n\n let mut next_pattern_char = pattern_chars.next();\n\n for text_char in text.as_ref().chars() {\n\n match next_pattern_char {\n\n Some(ch) if ch.eq_ignore_ascii_case(&text_char) =>\n\n next_pattern_char = pattern_chars.next(),\n\n Some(_) => {}\n\n None => {\n\n break;\n\n }\n\n }\n\n }\n\n next_pattern_char.is_none()\n\n}\n\n\n\n/// The result of `find_best_subsequence` function.\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct Subsequence {\n\n /// The score of found subsequence.\n", "file_path": "lib/rust/fuzzly/src/score.rs", "rank": 32, "score": 337153.1749473968 }, { "content": "/// Parse a single source file.\n\npub fn lexe_file(filename: String) -> AnyAst {\n\n parse_str(filename)\n\n}\n", "file_path": "lib/rust/parser/parser-jni/src/lib.rs", "rank": 33, "score": 336493.1383684202 }, { "content": "/// Parse a single source file.\n\npub fn parse_file(filename: String) -> AnyAst {\n\n parse_str(filename)\n\n}\n\n\n\n\n\n// === Tokens ===\n\n\n", "file_path": "lib/rust/parser/parser-jni/src/lib.rs", "rank": 34, "score": 336493.1383684202 }, { "content": " def resolvePath(root: Path, libraryName: LibraryName, version: SemVer): Path =\n\n root\n\n .resolve(libraryName.namespace)\n\n .resolve(libraryName.name)\n\n .resolve(version.toString)\n\n}\n", "file_path": "lib/scala/library-manager/src/main/scala/org/enso/librarymanager/published/cache/LibraryCache.scala", "rank": 35, "score": 334322.1691978643 }, { "content": " def createFileContaining(contents: String, path: Path): Path = {\n\n createFileContaining(contents.getBytes(StandardCharsets.UTF_8), path)\n\n }\n\n\n", "file_path": "engine/language-server/src/test/scala/org/enso/languageserver/filemanager/FileSystemSpec.scala", "rank": 36, "score": 334128.3794265658 }, { "content": "/// Returns future which returns once the msdfgen library is initialized.\n\npub fn initialized() -> impl Future<Output = ()> {\n\n MsdfgenJsInitialized()\n\n}\n\n\n\n/// The future for running test after initialization\n", "file_path": "lib/rust/ensogl/component/text/msdf-sys/src/lib.rs", "rank": 37, "score": 332716.7413419996 }, { "content": "/// Get the environment variable or panic if not available.\n\npub fn env_var_or_panic(var_name: &str) -> String {\n\n match std::env::var(var_name) {\n\n Ok(var) => var,\n\n Err(e) => panic!(\"Failed to read environment variable {}: {}.\", var_name, e),\n\n }\n\n}\n\n\n", "file_path": "lib/rust/build-utils/src/lib.rs", "rank": 38, "score": 332576.7614844311 }, { "content": "/// If this is the builtin macro for `->` (lambda expression), returns it as known `Match`.\n\npub fn as_lambda_match(ast: &Ast) -> Option<known::Match> {\n\n let macro_match = known::Match::try_from(ast).ok()?;\n\n let segment = &macro_match.segs.head;\n\n crate::opr::is_arrow_opr(&segment.head).then_some(macro_match)\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/macros.rs", "rank": 39, "score": 330073.95517546684 }, { "content": "/// Appends a line to the file located at the provided path.\n\npub fn append_to_log(path: PathBuf, line: impl AsRef<str>) -> io::Result<()> {\n\n let mut log_file = OpenOptions::new().create(true).write(true).append(true).open(path)?;\n\n writeln!(log_file, \"{}\", line.as_ref())?;\n\n Ok(())\n\n}\n", "file_path": "lib/rust/launcher-shims/src/lib.rs", "rank": 40, "score": 329110.2718899541 }, { "content": "/// Retrieves the identifier's name, if the Ast node is an identifier. Otherwise, returns None.\n\npub fn name(ast: &Ast) -> Option<&str> {\n\n match ast.shape() {\n\n Shape::Var(val) => Some(&val.name),\n\n Shape::Cons(val) => Some(&val.name),\n\n Shape::SectionSides(val) => name(&val.opr),\n\n Shape::Opr(val) => Some(&val.name),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/identifier.rs", "rank": 41, "score": 328610.11185002245 }, { "content": "#[wasm_bindgen]\n\n#[allow(dead_code)]\n\npub fn entry_point_interface() {\n\n web::forward_panic_hook_to_console();\n\n web::set_stack_trace_limit();\n\n run_once_initialized(|| {\n\n let app = Application::new(&web::get_html_element_by_id(\"root\").unwrap());\n\n init(&app);\n\n mem::forget(app);\n\n });\n\n}\n\n\n\n\n", "file_path": "app/gui/view/debug_scene/interface/src/lib.rs", "rank": 42, "score": 328406.36395151634 }, { "content": "#[wasm_bindgen]\n\n#[allow(dead_code, missing_docs)]\n\npub fn entry_point_visualization() {\n\n web::forward_panic_hook_to_console();\n\n web::set_stack_trace_limit();\n\n run_once_initialized(|| {\n\n let app = Application::new(&web::get_html_element_by_id(\"root\").unwrap());\n\n init(&app);\n\n std::mem::forget(app);\n\n });\n\n}\n\n\n", "file_path": "app/gui/view/debug_scene/visualization/src/lib.rs", "rank": 43, "score": 328406.36395151634 }, { "content": " def readFileContent(path: Path): String = {\n\n Using(Source.fromFile(path.toFile)) { source =>\n\n source.getLines().mkString(\"\\n\")\n\n }.get\n\n }\n\n}\n", "file_path": "engine/launcher/src/test/scala/org/enso/launcher/TestHelpers.scala", "rank": 44, "score": 328030.9072193181 }, { "content": "/// An object which can be entry in [`crate::ListView`] component.\n\n///\n\n/// The entries should not assume any padding - it will be granted by ListView itself. The Display\n\n/// Object position of this component is docked to the middle of left entry's boundary. It differs\n\n/// from usual behaviour of EnsoGl components, but makes the entries alignment much simpler.\n\n///\n\n/// This trait abstracts over model and its updating in order to support re-using shapes and gui\n\n/// components, so they are not deleted and created again. The ListView component does not create\n\n/// Entry object for each entry provided, and during scrolling, the instantiated objects will be\n\n/// reused: they position will be changed and they will be updated using `update` method.\n\npub trait Entry: CloneRef + Debug + display::Object + 'static {\n\n /// The model of this entry. The entry should be a representation of data from the Model.\n\n /// For example, the entry being just a caption can have [`String`] as its model - the text to\n\n /// be displayed.\n\n type Model: Debug + Default;\n\n\n\n /// An Object constructor.\n\n fn new(app: &Application) -> Self;\n\n\n\n /// Update content with new model.\n\n fn update(&self, model: &Self::Model);\n\n\n\n /// Set the layer of all [`text::Area`] components inside. The [`text::Area`] component is\n\n /// handled in a special way, and is often in different layer than shapes. See TODO comment\n\n /// in [`text::Area::add_to_scene_layer`] method.\n\n fn set_label_layer(&self, label_layer: &display::scene::Layer);\n\n}\n\n\n\n\n\n// =======================\n", "file_path": "lib/rust/ensogl/component/list-view/src/entry.rs", "rank": 45, "score": 326646.6533894321 }, { "content": "/// If the given AST node is an import declaration, returns it as a Match (which is the only shape\n\n/// capable of storing import declarations). Returns `None` otherwise.\n\npub fn ast_as_import_match(ast: &Ast) -> Option<known::Match> {\n\n let macro_match = known::Match::try_from(ast).ok()?;\n\n is_match_import(&macro_match).then_some(macro_match)\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/macros.rs", "rank": 46, "score": 326504.9808332488 }, { "content": " def getFileName: String = path.getFileName.toString\n\n}\n\n\n", "file_path": "project/src/main/scala/licenses/PortablePath.scala", "rank": 47, "score": 325341.695837305 }, { "content": " def findExecutable(name: String): Path = {\n\n val possibleExecutableNames =\n\n if (extensions.isEmpty) Seq(name)\n\n else extensions.map(ext => s\"$name.$ext\")\n\n possibleExecutableNames\n\n .map(binaryPath / _)\n\n .find(Files.isExecutable)\n\n .getOrElse(throw ExecutableNotFoundError(binaryPath, name))\n\n }\n\n\n\n /** Checks if the installation is not corrupted and reports any issues as\n\n * failures.\n\n */\n", "file_path": "lib/scala/runtime-version-manager/src/main/scala/org/enso/runtimeversionmanager/components/GraalRuntime.scala", "rank": 48, "score": 324652.62152311706 }, { "content": "/// Try Interpreting the line as disabling comment. Return the text after `#`.\n\npub fn as_disable_comment(ast: &Ast) -> Option<String> {\n\n let r#match = crate::known::Match::try_from(ast).ok()?;\n\n let first_segment = &r#match.segs.head;\n\n if crate::identifier::name(&first_segment.head) == Some(DISABLING_COMMENT_INTRODUCER) {\n\n Some(first_segment.body.repr())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/macros.rs", "rank": 49, "score": 324347.520906951 }, { "content": "#[test]\n\npub fn test_bad_state_name() {\n\n let lexer = Lexer3::define();\n\n let result = lexer.specialize();\n\n assert!(result.is_err());\n\n let message = result.unwrap_err().to_string();\n\n assert_eq!(message, \"`Bad Lexer Name` is not a valid rust identifier.\");\n\n}\n\n\n\n\n\n\n\n// ====================\n\n// === Definition 4 ===\n\n// ====================\n\n\n\npub struct Lexer4 {\n\n lexer: Flexer<LexerState2, Output, Logger>,\n\n}\n\n\n\nimpl Deref for Lexer4 {\n\n type Target = Flexer<LexerState2, Output, Logger>;\n", "file_path": "lib/rust/parser/flexer/tests/flexer_invalid_definitions.rs", "rank": 50, "score": 324209.3011780389 }, { "content": "#[test]\n\npub fn test_bad_output_name() {\n\n let lexer = Lexer4::define();\n\n let result = lexer.specialize();\n\n assert!(result.is_err());\n\n let message = result.unwrap_err().to_string();\n\n assert_eq!(message, \"`Bad output name` is not a valid rust path.\");\n\n}\n", "file_path": "lib/rust/parser/flexer/tests/flexer_invalid_definitions.rs", "rank": 51, "score": 324209.3011780389 }, { "content": " def artifactFile(name: String): File =\n\n if (Platform.isWindows) file(name + \".exe\")\n\n else file(name)\n\n\n\n private val muslBundleUrl =\n\n \"https://github.com/gradinac/musl-bundle-example/releases/download/\" +\n\n \"v1.0/musl.tar.gz\"\n\n\n\n /** Ensures that the `musl` bundle is installed.\n\n *\n\n * Checks for existence of its directory and if it does not exist, downloads\n\n * and extracts the bundle. After extracting it does the required\n\n * initialization (renaming paths to be absolute and creating a shell script\n\n * called `musl-gcc`).\n\n *\n\n * `musl` is needed for static builds on Linux.\n\n *\n\n * @param buildCache build-cache directory for the current project\n\n * @param log a logger instance\n\n * @return path to the `musl` bundle binary directory which should be added\n\n * to PATH of the launched native-image\n\n */\n\n private def ensureMuslIsInstalled(\n\n buildCache: File,\n\n log: ManagedLogger\n\n ): Path = {\n\n val muslRoot = buildCache / \"musl-1.2.0\"\n\n val bundleLocation = muslRoot / \"bundle\"\n\n val binaryLocation = bundleLocation / \"bin\"\n\n val gccLocation = binaryLocation / \"musl-gcc\"\n", "file_path": "project/NativeImage.scala", "rank": 52, "score": 321071.99077967496 }, { "content": "/// Iterate recursively over tree-like structure implementing `IntoIterator`.\n\npub fn iterate_subtree<T>(ast: T) -> impl Iterator<Item = T::Item>\n\nwhere T: IntoIterator<Item = T> + Copy {\n\n let generator = move || {\n\n let mut nodes: Vec<T> = vec![ast];\n\n while !nodes.is_empty() {\n\n let ast = nodes.pop().unwrap();\n\n nodes.extend(ast.into_iter());\n\n yield ast;\n\n }\n\n };\n\n\n\n enso_shapely::GeneratingIterator(generator)\n\n}\n", "file_path": "app/gui/language/ast/impl/src/internal.rs", "rank": 53, "score": 320014.4542722694 }, { "content": "pub fn display_graphviz(viz: Graphviz) {\n\n let code: String = viz.into();\n\n let url = percent_encoding::utf8_percent_encode(&code, percent_encoding::NON_ALPHANUMERIC);\n\n let url = format!(\"https://dreampuf.github.io/GraphvizOnline/#{}\", url);\n\n crate::web::window().open_with_url_and_target(&url, \"_blank\").unwrap();\n\n}\n\n\n\n\n\nimpl<T> GraphvizBuilder for T\n\nwhere\n\n T: ContentRef,\n\n Content<T>: GraphvizBuilder,\n\n{\n\n default fn graphviz_build(&self, builder: &mut Graphviz) {\n\n self.content().graphviz_build(builder)\n\n }\n\n}\n", "file_path": "lib/rust/frp/src/debug.rs", "rank": 54, "score": 319896.32499565126 }, { "content": " class ThreadSafeLock(name: String) { self =>\n\n @volatile var busy: Boolean = false\n\n @volatile var failed: Boolean = false\n\n var writers: Int = 0\n\n var readers: Int = 0\n\n var fileLock: Option[Lock] = None\n\n\n\n /** Checks if the lock has been set to failed state and throws an exception\n\n * if that is the case.\n\n */\n", "file_path": "lib/scala/distribution-manager/src/main/scala/org/enso/distribution/locking/ThreadSafeFileLockManager.scala", "rank": 55, "score": 315390.5336760272 }, { "content": " def findRelativePath(path: File): Option[Path] =\n\n toList.flatMap { root =>\n\n if (path.toPath.startsWith(root.file.toPath)) {\n\n Some(Path(root.id, root.file.toPath.relativize(path.toPath)))\n\n } else {\n\n None\n\n }\n\n }.headOption\n\n }\n\n\n\n /** Creates an initial content root configuration which consists of the main\n\n * project root, the home directory and filesystem roots.\n\n */\n\n private def initializeRoots(config: Config): ContentRoots = {\n\n val fsRoots = FileSystems.getDefault.getRootDirectories.asScala.map {\n\n path =>\n\n val absolutePath = path.toAbsolutePath.normalize\n\n ContentRootWithFile(\n\n ContentRoot.FileSystemRoot(\n\n id = UUID.randomUUID(),\n", "file_path": "engine/language-server/src/main/scala/org/enso/languageserver/filemanager/ContentRootManagerActor.scala", "rank": 56, "score": 314069.84899505903 }, { "content": "/// Is the given token an identifier matching to a given string?\n\npub fn matching_ident(token: &TokenTree, name: &str) -> bool {\n\n match token {\n\n TokenTree::Ident(ident) => *ident == name,\n\n _ => false,\n\n }\n\n}\n\n\n\n\n\n\n\n// ============\n\n// === Repr ===\n\n// ============\n\n\n", "file_path": "lib/rust/macro-utils/src/lib.rs", "rank": 57, "score": 313771.93647607206 }, { "content": " def getRelativePath(root: File, base: Path, path: nio.file.Path): Path =\n\n Path(base.rootId, root.toPath.relativize(path))\n\n\n\n /** Get path relative to the root, and return a parent path.\n\n *\n\n * @param root a root path\n\n * @param base a path relative to the root\n\n * @param path a path that will be relativized\n\n * @return a parent of a path relative to the root\n\n */\n", "file_path": "engine/language-server/src/main/scala/org/enso/languageserver/filemanager/Path.scala", "rank": 58, "score": 311871.61559514573 }, { "content": "/// Generate the `run` function for the specialized lexer.\n\n///\n\n/// This function is what the user of the lexer will call to begin execution.\n\npub fn run_function(output_type_name: impl Str) -> Result<ImplItem, GenError> {\n\n let output_type_name = str_to_path(output_type_name)?;\n\n let tree: ImplItem = parse_quote! {\n\n pub fn run<R:ReaderOps>(&mut self, mut reader:R) -> LexingResult<#output_type_name> {\n\n self.set_up();\n\n reader.advance_char(&mut self.bookmarks);\n\n while self.run_current_state(&mut reader) == StageStatus::ExitSuccess {}\n\n let result = match self.status {\n\n StageStatus::ExitFinished => LexingResult::success(\n\n mem::take(&mut self.output)\n\n ),\n\n StageStatus::ExitFail => LexingResult::failure(\n\n mem::take(&mut self.output)\n\n ),\n\n _ => LexingResult::partial(mem::take(&mut self.output))\n\n };\n\n self.tear_down();\n\n result\n\n }\n\n };\n\n Ok(tree)\n\n}\n\n\n", "file_path": "lib/rust/parser/flexer/src/generate.rs", "rank": 59, "score": 310145.332668406 }, { "content": " def getRelativeParent(root: File, base: Path, path: nio.file.Path): Path =\n\n getRelativePath(root, base, path.getParent())\n\n}\n", "file_path": "engine/language-server/src/main/scala/org/enso/languageserver/filemanager/Path.scala", "rank": 60, "score": 309318.3808782094 }, { "content": " def mkFile(name: String): File = new File(tmpDir, name)\n\n\n", "file_path": "engine/polyglot-api/src/test/scala/org/enso/polyglot/ModuleManagementTest.scala", "rank": 61, "score": 307930.10574845684 }, { "content": "/// Creates a new where clause from provided sequence of where predicates.\n\npub fn new_where_clause(predicates: impl IntoIterator<Item = WherePredicate>) -> WhereClause {\n\n let predicates = syn::punctuated::Punctuated::from_iter(predicates);\n\n WhereClause { where_token: Default::default(), predicates }\n\n}\n\n\n\n\n\n\n\n// =============\n\n// === Tests ===\n\n// =============\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use proc_macro2::TokenStream;\n\n\n\n fn parse<T: syn::parse::Parse>(code: &str) -> T {\n\n syn::parse_str(code).unwrap()\n\n }\n\n\n", "file_path": "lib/rust/macro-utils/src/lib.rs", "rank": 62, "score": 307658.9085083402 }, { "content": "/// The Model Provider for ListView's entries of type `E`.\n\n///\n\n/// The [`crate::ListView`] component does not display all entries at once, instead it lazily ask\n\n/// for models of entries when they're about to be displayed. So setting the select content is\n\n/// essentially providing an implementor of this trait.\n\npub trait ModelProvider<E>: Debug {\n\n /// Number of all entries.\n\n fn entry_count(&self) -> usize;\n\n\n\n /// Get the model of entry with given id. The implementors should return `None` only when\n\n /// requested id greater or equal to entries count.\n\n fn get(&self, id: Id) -> Option<E::Model>\n\n where E: Entry;\n\n}\n\n\n\n\n\n// === AnyModelProvider ===\n\n\n\n/// A wrapper for shared instance of some Provider of models for `E` entries.\n\n#[derive(Debug, Shrinkwrap)]\n\npub struct AnyModelProvider<E>(Rc<dyn ModelProvider<E>>);\n\n\n\nimpl<E> Clone for AnyModelProvider<E> {\n\n fn clone(&self) -> Self {\n\n Self(self.0.clone())\n", "file_path": "lib/rust/ensogl/component/list-view/src/entry.rs", "rank": 63, "score": 307229.05518880265 }, { "content": "/// The default content of the newly created initial main module file.\n\npub fn default_main_module_code() -> String {\n\n default_main_method_code()\n\n}\n\n\n", "file_path": "app/gui/src/controller/project.rs", "rank": 64, "score": 307150.46723834303 }, { "content": "/// The code with definition of the default `main` method.\n\npub fn default_main_method_code() -> String {\n\n format!(r#\"{} = \"Hello, World!\"\"#, MAIN_DEFINITION_NAME)\n\n}\n\n\n", "file_path": "app/gui/src/controller/project.rs", "rank": 65, "score": 307137.0363539817 }, { "content": "pub fn mk_out_name<S: Str>(s: S) -> String {\n\n format!(\"output_{}\", s.as_ref())\n\n}\n", "file_path": "lib/rust/ensogl/core/src/display/symbol/gpu/shader/builder.rs", "rank": 66, "score": 306110.8845597971 }, { "content": " def makeArchive(root: File, rootDir: String, target: File): Unit = {\n\n val exitCode = if (target.getName.endsWith(\"zip\")) {\n\n Process(\n\n Seq(\n\n \"zip\",\n\n \"-9\",\n\n \"-q\",\n\n \"-r\",\n\n target.toPath.toAbsolutePath.normalize.toString,\n\n rootDir\n\n ),\n\n cwd = Some(root)\n\n ).!\n\n } else {\n\n Process(\n\n Seq(\n\n \"tar\",\n\n \"--use-compress-program=gzip -9\",\n\n \"-cf\",\n\n target.toPath.toAbsolutePath.normalize.toString,\n", "file_path": "project/DistributionPackage.scala", "rank": 67, "score": 305596.3087805727 }, { "content": " def writeFile(name: String, contents: String): Unit = {\n\n Files.write(mkFile(name).toPath, contents.getBytes): Unit\n\n }\n\n\n", "file_path": "engine/polyglot-api/src/test/scala/org/enso/polyglot/ModuleManagementTest.scala", "rank": 68, "score": 305596.1998410636 }, { "content": " def fileName: String = path.getFileName\n\n}\n\n\n\n/** Represents a copyright mention extracted from a file.\n\n *\n\n * The copyright mention may come from comments in the source code, for\n\n * example.\n\n *\n\n * Equal copyright mentions are merged, so a single mention may contain\n\n * multiple contexts and origins if it was a result of merging from different\n\n * mentions.\n\n *\n\n * @param content cleaned content of the line that was extracted\n\n * @param contexts contexts (surrounding lines) that are associated with the mention\n\n * @param origins paths to the files that the mention comes from\n\n */\n\ncase class CopyrightMention(\n\n content: String,\n\n contexts: Seq[String],\n\n origins: Seq[PortablePath]\n\n) extends Attachment {\n\n\n\n /** @inheritdoc\n\n */\n\n override def toString: String = s\"CopyrightMention('$content')\"\n\n}\n\n\n", "file_path": "project/src/main/scala/licenses/Attachment.scala", "rank": 69, "score": 303165.24343050446 }, { "content": "/// The default metric, recommended by this library\n\npub fn default() -> impl Metric {\n\n SubsequentLettersBonus::default().sum(CaseMatchBonus::default())\n\n}\n\n\n\n\n\n\n\n// =======================\n\n// === Implementations ===\n\n// =======================\n\n\n\n// === Sum ===\n\n\n\n/// The structure representing the sum of two metrics\n\n#[derive(Copy, Clone, Debug, Default)]\n\npub struct Sum<Metrics1, Metrics2>(Metrics1, Metrics2);\n\n\n\nimpl<M1: Metric, M2: Metric> Metric for Sum<M1, M2> {\n\n fn measure_vertex(&self, vertex: subsequence_graph::Vertex, text: &str, pattern: &str) -> f32 {\n\n let Self(left, right) = self;\n\n let left = left.measure_vertex(vertex, text, pattern);\n", "file_path": "lib/rust/fuzzly/src/metric.rs", "rank": 70, "score": 302675.72022644547 }, { "content": "pub fn mk_fragment_name<S: Str>(s: S) -> String {\n\n format!(\"input_{}\", s.as_ref())\n\n}\n", "file_path": "lib/rust/ensogl/core/src/display/symbol/gpu/shader/builder.rs", "rank": 71, "score": 302634.0101743319 }, { "content": "pub fn mk_uniform_name<S: Str>(s: S) -> String {\n\n format!(\"input_{}\", s.as_ref())\n\n}\n", "file_path": "lib/rust/ensogl/core/src/display/symbol/gpu/shader/builder.rs", "rank": 72, "score": 302634.0101743319 }, { "content": "pub fn mk_vertex_name<S: Str>(s: S) -> String {\n\n format!(\"vertex_{}\", s.as_ref())\n\n}\n", "file_path": "lib/rust/ensogl/core/src/display/symbol/gpu/shader/builder.rs", "rank": 73, "score": 302634.0101743319 }, { "content": " def findRelativePath(path: File)(implicit\n\n ec: ExecutionContext\n\n ): Future[Option[Path]]\n\n}\n", "file_path": "engine/language-server/src/main/scala/org/enso/languageserver/filemanager/ContentRootManager.scala", "rank": 74, "score": 302348.29005362454 }, { "content": "/// Checks if the given Meta name-val pair defines user-provided bounds.\n\npub fn is_custom_bound(name_val: &MetaNameValue) -> bool {\n\n path_matching_ident(&name_val.path, BOUND_NAME)\n\n}\n\n\n", "file_path": "lib/rust/shapely/macros/src/derive_clone_ref.rs", "rank": 75, "score": 301955.2126949813 }, { "content": "/// Provides reference to the content of this value.\n\npub fn content<T: ContentRef>(t: &T) -> &T::Content {\n\n T::content(t)\n\n}\n\n\n", "file_path": "lib/rust/prelude/src/wrapper.rs", "rank": 76, "score": 297127.61390854977 }, { "content": " def writeTextFile(relativePath: String, content: String): Unit = {\n\n val bytes = content.getBytes(StandardCharsets.UTF_8)\n\n val entry = new TarArchiveEntry(relativePath)\n\n entry.setSize(bytes.size.toLong)\n\n archive.putArchiveEntry(entry)\n\n archive.write(bytes)\n\n archive.closeArchiveEntry()\n\n }\n\n\n\n /** Adds a file from the filesystem to the archive.\n\n *\n\n * @param relativePath path of the file in the archive\n\n * @param filePath a path to a file on the filesystem that will be read and\n\n * put into the archive\n\n * @return returns the number of bytes that were transferrred from the input\n\n * file\n\n */\n", "file_path": "lib/scala/downloader/src/main/scala/org/enso/downloader/archive/TarGzWriter.scala", "rank": 77, "score": 296471.50807263516 }, { "content": " def writeInSrcDir(moduleName: String, contents: String): File = {\n\n val file = new File(pkg.sourceDir, s\"$moduleName.enso\")\n\n Files.write(file.toPath, contents.getBytes).toFile\n\n }\n\n\n", "file_path": "engine/runtime/src/test/scala/org/enso/interpreter/test/instrument/RuntimeStdlibTest.scala", "rank": 78, "score": 296387.16610486 }, { "content": " def writeInSrcDir(moduleName: String, contents: String): File = {\n\n val file = new File(pkg.sourceDir, s\"$moduleName.enso\")\n\n Files.write(file.toPath, contents.getBytes).toFile\n\n }\n\n\n", "file_path": "engine/runtime/src/test/scala/org/enso/interpreter/test/instrument/RuntimeVisualisationsTest.scala", "rank": 79, "score": 296387.16610486 }, { "content": " def writeInSrcDir(moduleName: String, contents: String): File = {\n\n val file = new File(pkg.sourceDir, s\"$moduleName.enso\")\n\n Files.write(file.toPath, contents.getBytes).toFile\n\n }\n\n\n", "file_path": "engine/runtime/src/test/scala/org/enso/interpreter/test/instrument/RuntimeErrorsTest.scala", "rank": 80, "score": 296387.16610486 }, { "content": " def writeInSrcDir(moduleName: String, contents: String): File = {\n\n val file = new File(pkg.sourceDir, s\"$moduleName.enso\")\n\n Files.write(file.toPath, contents.getBytes).toFile\n\n }\n\n\n", "file_path": "engine/runtime/src/test/scala/org/enso/interpreter/test/instrument/RuntimeInstrumentTest.scala", "rank": 81, "score": 296387.16610486 }, { "content": " def writeInSrcDir(moduleName: String, contents: String): File = {\n\n val file = new File(pkg.sourceDir, s\"$moduleName.enso\")\n\n Files.write(file.toPath, contents.getBytes).toFile\n\n }\n\n\n", "file_path": "engine/runtime/src/test/scala/org/enso/interpreter/test/instrument/RuntimeServerTest.scala", "rank": 82, "score": 296387.16610486 }, { "content": "/// If given Ast is a specific infix operator application, returns it.\n\npub fn to_specific_infix(ast: &Ast, name: &str) -> Option<known::Infix> {\n\n let infix = known::Infix::try_from(ast).ok()?;\n\n is_opr_named(&infix.opr, name).then_some(infix)\n\n}\n\n\n", "file_path": "app/gui/language/ast/impl/src/opr.rs", "rank": 83, "score": 294818.24287493684 }, { "content": "/// Execute the provided benchmark for each of the [`SIZES`] above.\n\npub fn run_bench_sizes(name: &str, input: &str, add_newline: bool, c: &mut Criterion) {\n\n let mut group = c.benchmark_group(name);\n\n SIZES.iter().for_each(|(size, size_name)| {\n\n group.throughput(Throughput::Bytes(*size as u64));\n\n let input = replicate_to_size(input, *size, add_newline);\n\n group.bench_function(*size_name, |b| {\n\n b.iter(|| {\n\n lexer::run(black_box(input.as_str()));\n\n })\n\n });\n\n })\n\n}\n\n\n", "file_path": "lib/rust/parser/lexer/generation/benches/lexer_bench_sources.rs", "rank": 84, "score": 293987.4541413936 }, { "content": " def writeInSrcDir(moduleName: String, contents: String): File = {\n\n val file = new File(pkg.sourceDir, s\"$moduleName.enso\")\n\n Files.write(file.toPath, contents.getBytes).toFile\n\n }\n\n\n", "file_path": "engine/runtime/src/test/scala/org/enso/interpreter/test/instrument/RuntimeSuggestionUpdatesTest.scala", "rank": 85, "score": 293484.7749121061 }, { "content": "pub fn expression_mock_string(label: &str) -> Expression {\n\n let pattern = Some(label.to_string());\n\n let code = format!(\"\\\"{}\\\"\", label);\n\n let parser = Parser::new_or_panic();\n\n let parameters = vec![];\n\n let ast = parser.parse_line_ast(&code).unwrap();\n\n let invocation_info = span_tree::generate::context::CalledMethodInfo { parameters };\n\n let ctx = span_tree::generate::MockContext::new_single(ast.id.unwrap(), invocation_info);\n\n let output_span_tree = span_tree::SpanTree::default();\n\n let input_span_tree = span_tree::SpanTree::new(&ast, &ctx).unwrap();\n\n let whole_expression_id = default();\n\n Expression { pattern, code, whole_expression_id, input_span_tree, output_span_tree }\n\n}\n\n\n", "file_path": "app/gui/view/debug_scene/interface/src/lib.rs", "rank": 86, "score": 292623.88832944754 }, { "content": " def relativePath(file: Path): String = {\n\n val normalized = file.toAbsolutePath.normalize\n\n if (!normalized.startsWith(normalizedBase)) {\n\n throw new IllegalArgumentException(\n\n \"TarGzWriter precondition failure: \" +\n\n \"Files should all be inside of the provided basePath.\"\n\n )\n\n }\n\n normalizedBase.relativize(normalized).toString\n\n }\n\n\n\n val taskProgress = new TaskProgressImplementation[Unit]()\n\n\n", "file_path": "lib/scala/downloader/src/main/scala/org/enso/downloader/archive/TarGzWriter.scala", "rank": 87, "score": 292407.23148575926 }, { "content": "/// Trait which enables `Sized` super-bound on the `Content` type.\n\npub trait HasSizedContent = HasContent where Content<Self>: Sized;\n\n\n", "file_path": "lib/rust/prelude/src/wrapper.rs", "rank": 88, "score": 291109.27262370166 }, { "content": " def newProject(name: String, path: Path, version: SemVer): Unit = {\n\n PackageManager.Default.create(\n\n root = path.toFile,\n\n name = name,\n\n edition = Some(Config.makeCompatibilityEditionFromVersion(version))\n\n )\n\n }\n\n}\n", "file_path": "lib/scala/runtime-version-manager-test/src/main/scala/org/enso/runtimeversionmanager/test/RuntimeVersionManagerTest.scala", "rank": 90, "score": 286168.64831685333 }, { "content": "pub fn create_element(name: &str) -> Element {\n\n try_create_element(name).unwrap()\n\n}\n\n\n", "file_path": "lib/rust/web/src/lib.rs", "rank": 91, "score": 286100.0476393307 }, { "content": " def mask(path: Path): String = MaskedPath(path).applyMasking()\n\n\n\n logger.trace(\n\n s\"Local library search paths = ${localLibrarySearchPaths.map(mask)}\"\n\n )\n\n logger.trace(\n\n s\"Primary library cache = ${mask(cacheRoot)}\"\n\n )\n\n logger.trace(\n\n s\"Auxiliary (bundled) library caches = \" +\n\n s\"${additionalCacheLocations.map(mask)}\"\n\n )\n\n\n\n LibraryLocations(\n\n localLibrarySearchPaths = localLibrarySearchPaths,\n\n primaryCacheRoot = cacheRoot,\n\n additionalCacheRoots = additionalCacheLocations\n\n )\n\n }\n\n}\n", "file_path": "lib/scala/library-manager/src/main/scala/org/enso/librarymanager/LibraryLocations.scala", "rank": 92, "score": 284519.90120077354 }, { "content": "#[wasm_bindgen]\n\n#[allow(dead_code)]\n\npub fn entry_point_ide() {\n\n web::forward_panic_hook_to_error();\n\n\n\n ensogl_text_msdf_sys::run_once_initialized(|| {\n\n // Logging of build information.\n\n #[cfg(debug_assertions)]\n\n analytics::remote_log_value(\n\n \"debug_mode\",\n\n \"debug_mode_is_active\",\n\n analytics::AnonymousData(true),\n\n );\n\n #[cfg(not(debug_assertions))]\n\n analytics::remote_log_value(\n\n \"debug_mode\",\n\n \"debug_mode_is_active\",\n\n analytics::AnonymousData(false),\n\n );\n\n\n\n let config =\n\n crate::config::Startup::from_web_arguments().expect(\"Failed to read configuration.\");\n\n crate::ide::Initializer::new(config).start_and_forget();\n\n });\n\n}\n", "file_path": "app/gui/src/lib.rs", "rank": 93, "score": 283855.22807402984 }, { "content": " def readTxtFile(path: Path): String = {\n\n val buffer = Source.fromFile(path.toFile)\n\n val content = buffer.getLines().mkString\n\n buffer.close()\n\n content\n\n }\n\n\n", "file_path": "engine/language-server/src/test/scala/org/enso/languageserver/filemanager/FileSystemSpec.scala", "rank": 94, "score": 283047.4986723595 }, { "content": "/// Formats the type for the user-facing output.\n\npub fn type_display<T: TypeDisplay>() -> String {\n\n <T as TypeDisplay>::type_display()\n\n}\n\n\n\n\n\n// =============\n\n// === Value ===\n\n// =============\n\n\n", "file_path": "lib/rust/prelude/src/tp.rs", "rank": 95, "score": 282607.1758675398 }, { "content": "#[wasm_bindgen]\n\npub fn entry_point_panic() {\n\n forward_panic_hook_to_error();\n\n panic!();\n\n}\n\n\n\n\n\n/// Common traits.\n\npub mod traits {\n\n pub use super::NodeInserter;\n\n pub use super::NodeRemover;\n\n}\n\n\n\n/// Sleeps for the specified amount of time.\n\n///\n\n/// This function might sleep for slightly longer than the specified duration but never less.\n\n///\n\n/// This function is an async version of std::thread::sleep, its timer starts just after the\n\n/// function call.\n\n#[cfg(target_arch = \"wasm32\")]\n\npub async fn sleep(duration: Duration) {\n\n use gloo_timers::future::TimeoutFuture;\n\n\n\n TimeoutFuture::new(duration.as_millis() as u32).await\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\npub use async_std::task::sleep;\n\n\n", "file_path": "lib/rust/web/src/lib.rs", "rank": 96, "score": 280258.4415848936 }, { "content": " def keyForFile(file: AttachedFile): String = file.path.toString\n\n val keys = files.map(keyForFile)\n\n for {\n\n ignore <- readExpectedLines(Paths.filesIgnore, keys, packageRoot)\n\n keep <- readExpectedLines(Paths.filesKeep, keys, packageRoot)\n\n } yield {\n", "file_path": "project/src/main/scala/licenses/report/Review.scala", "rank": 97, "score": 280231.64019586926 }, { "content": "/// Wrap the provided implementation items into an `impl` block for the provided `state_name` type.\n\npub fn wrap_in_impl_for(\n\n state_name: impl Into<String>,\n\n body: Vec<ImplItem>,\n\n) -> Result<ItemImpl, GenError> {\n\n let state_name: Ident = str_to_ident(state_name.into().as_str())?;\n\n let mut tree: ItemImpl = parse_quote! {\n\n #[allow(missing_docs,dead_code,clippy::all)]\n\n impl #state_name {}\n\n };\n\n tree.items.extend(body);\n\n Ok(tree)\n\n}\n\n\n", "file_path": "lib/rust/parser/flexer/src/generate.rs", "rank": 98, "score": 280179.7522604452 }, { "content": " def validateName(name: String): Either[InvalidNameError, String] =\n\n if (name.isEmpty) {\n\n Left(InvalidNameError.Empty)\n\n } else if (!name.head.isLetter || !name.head.isUpper) {\n\n Left(InvalidNameError.ShouldStartWithCapitalLetter)\n\n } else if (!name.forall(isAllowedNameCharacter)) {\n\n val invalidCharacters = name.filterNot(isAllowedNameCharacter)\n\n Left(\n\n InvalidNameError.ContainsInvalidCharacters(\n\n ListSet(invalidCharacters: _*)\n\n )\n\n )\n\n } else if (name != toUpperSnakeCase(name)) {\n\n Left(InvalidNameError.ShouldBeUpperSnakeCase(toUpperSnakeCase(name)))\n\n } else {\n\n Right(name)\n\n }\n\n\n\n /** Checks if a character is allowed in a project name.\n\n *\n", "file_path": "lib/scala/pkg/src/main/scala/org/enso/pkg/validation/NameValidation.scala", "rank": 99, "score": 280065.32671948906 } ]
Rust
hsp3-analyzer-mini/ham-core/src/assists/completion.rs
vain0x/hsp3-ginger
c5924b60686d4bf1769569c013c8110f7636732c
use super::*; use crate::{ analysis::{HspSymbolKind, LocalScope, Scope, SymbolRc}, assists::from_document_position, lang_service::docs::Docs, parse::{p_param_ty::PParamCategory, PToken}, source::*, token::TokenKind, }; use lsp_types::{CompletionItem, CompletionItemKind, CompletionList, Documentation, Position, Url}; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::collections::HashSet; pub(crate) enum ACompletionItem { Symbol(SymbolRc), } pub(crate) fn in_str_or_comment(pos: Pos16, tokens: &[PToken]) -> bool { let i = match tokens.binary_search_by_key(&pos, |t| Pos16::from(t.ahead().range.start())) { Ok(i) | Err(i) => i.saturating_sub(1), }; tokens[i..] .iter() .take_while(|t| t.ahead().start() <= pos) .flat_map(|t| t.iter()) .filter(|t| t.loc.range.contains_inclusive(pos)) .any(|t| match t.kind { TokenKind::Str => t.loc.range.start() < pos && pos < t.loc.range.end(), TokenKind::Comment => t.loc.range.start() < pos, _ => false, }) } pub(crate) fn in_preproc(pos: Pos16, tokens: &[PToken]) -> bool { let mut i = match tokens.binary_search_by_key(&pos, |token| token.body_pos16()) { Ok(i) | Err(i) => i, }; loop { match tokens.get(i).map(|t| (t.kind(), t.body_pos())) { Some((TokenKind::Hash, p)) if p <= pos => return true, Some((TokenKind::Eos, p)) if p < pos => return false, _ if i == 0 => return false, _ => i -= 1, } } } fn collect_local_completion_items( symbols: &[SymbolRc], local: &LocalScope, completion_items: &mut Vec<ACompletionItem>, ) { for s in symbols { let scope = match &s.scope_opt { Some(it) => it, None => continue, }; if scope.is_visible_to(local) { completion_items.push(ACompletionItem::Symbol(s.clone())); } } } fn collect_global_completion_items( symbols: &[SymbolRc], completion_items: &mut Vec<ACompletionItem>, ) { for s in symbols { if let Some(Scope::Global) = s.scope_opt { completion_items.push(ACompletionItem::Symbol(s.clone())); } } } pub(crate) fn collect_symbols_as_completion_items( doc: DocId, scope: LocalScope, doc_symbols: &[(DocId, &[SymbolRc])], completion_items: &mut Vec<ACompletionItem>, ) { if let Some((_, symbols)) = doc_symbols.iter().find(|&&(d, _)| d == doc) { collect_local_completion_items(symbols, &scope, completion_items); } if scope.is_outside_module() { for &(d, symbols) in doc_symbols { if d == doc { continue; } collect_local_completion_items(symbols, &scope, completion_items); } } for &(_, symbols) in doc_symbols { collect_global_completion_items(symbols, completion_items); } } fn to_completion_symbol_kind(kind: HspSymbolKind) -> CompletionItemKind { use CompletionItemKind as K; match kind { HspSymbolKind::Unresolved | HspSymbolKind::Unknown => K::TEXT, HspSymbolKind::Label => K::VALUE, HspSymbolKind::StaticVar => K::VARIABLE, HspSymbolKind::Const => K::CONSTANT, HspSymbolKind::Enum => K::ENUM_MEMBER, HspSymbolKind::Macro { ctype: false } => K::VALUE, HspSymbolKind::Macro { ctype: true } => K::FUNCTION, HspSymbolKind::DefFunc => K::METHOD, HspSymbolKind::DefCFunc => K::FUNCTION, HspSymbolKind::ModFunc => K::METHOD, HspSymbolKind::ModCFunc => K::FUNCTION, HspSymbolKind::Param(None) => K::VARIABLE, HspSymbolKind::Param(Some(param)) => match param.category() { PParamCategory::ByValue => K::VALUE, PParamCategory::ByRef => K::PROPERTY, PParamCategory::Local => K::VARIABLE, PParamCategory::Auto => K::TEXT, }, HspSymbolKind::Module => K::MODULE, HspSymbolKind::Field => K::FIELD, HspSymbolKind::LibFunc => K::FUNCTION, HspSymbolKind::PluginCmd => K::KEYWORD, HspSymbolKind::ComInterface => K::INTERFACE, HspSymbolKind::ComFunc => K::METHOD, } } fn to_lsp_completion_item(symbol: &SymbolRc) -> CompletionItem { let details = symbol.compute_details(); let detail = details.desc.map(|s| s.to_string()); let documentation = if details.docs.is_empty() { None } else { Some(Documentation::String(details.docs.join("\r\n\r\n"))) }; let sort_text = { let sort_prefix = match (&symbol.scope_opt, symbol.kind) { (Some(Scope::Local(local)), _) => match (&local.module_opt, local.deffunc_opt) { (Some(_), Some(_)) => 'a', (Some(_), None) => 'b', (None, None) => 'c', (None, Some(_)) => 'd', }, (_, HspSymbolKind::Module) => 'f', (Some(Scope::Global), _) => 'e', (None, _) => 'g', }; Some(format!("{}{}", sort_prefix, symbol.name)) }; CompletionItem { kind: Some(to_completion_symbol_kind(symbol.kind)), label: symbol.name.to_string(), detail, documentation, sort_text, ..CompletionItem::default() } } fn new_completion_list(items: Vec<CompletionItem>) -> CompletionList { CompletionList { is_incomplete: false, items, } } pub(crate) fn incomplete_completion_list() -> CompletionList { CompletionList { is_incomplete: true, items: vec![], } } fn do_completion( uri: &Url, position: Position, docs: &Docs, wa: &mut WorkspaceAnalysis, ) -> Option<CompletionList> { let mut items = vec![]; let (doc, pos) = from_document_position(uri, position, docs)?; if wa.in_str_or_comment(doc, pos).unwrap_or(true) { return None; } if wa.in_preproc(doc, pos).unwrap_or(false) { wa.require_project_for_doc(doc) .collect_preproc_completion_items(&mut items); return Some(new_completion_list(items)); } let mut completion_items = vec![]; let p = wa.require_project_for_doc(doc); p.collect_completion_items(doc, pos, &mut completion_items); for item in completion_items { match item { ACompletionItem::Symbol(symbol) => { if symbol.linked_symbol_opt.borrow().is_some() { continue; } items.push(to_lsp_completion_item(&symbol)); } } } p.collect_hsphelp_completion_items(&mut items); if let Some(i) = items.iter().position(|item| item.label == "__hspdef__") { items.swap_remove(i); } { let mut set = HashSet::new(); let retain = items .iter() .map(|item| set.insert(item.label.as_str())) .collect::<Vec<_>>(); let mut i = 0; items.retain(|_| { i += 1; retain[i - 1] }); } Some(new_completion_list(items)) } #[derive(Serialize, Deserialize)] struct CompletionData { uri: Url, position: Position, data_opt: Option<Value>, } pub(crate) fn completion( uri: Url, position: Position, docs: &Docs, wa: &mut WorkspaceAnalysis, ) -> Option<CompletionList> { let mut completion_list = do_completion(&uri, position, docs, wa)?; for item in &mut completion_list.items { if item.documentation.is_none() && item.data.is_none() { continue; } item.documentation = None; let data_opt = item.data.take(); let data = CompletionData { uri: uri.clone(), position, data_opt, }; item.data = Some(serde_json::to_value(&data).unwrap()); } Some(completion_list) } pub(crate) fn completion_resolve( mut resolved_item: CompletionItem, docs: &Docs, wa: &mut WorkspaceAnalysis, ) -> Option<CompletionItem> { let data: CompletionData = match resolved_item .data .take() .and_then(|data| serde_json::from_value(data).ok()) { Some(it) => it, None => { return Some(resolved_item); } }; let CompletionData { uri, position, data_opt, } = data; let list = do_completion(&uri, position, docs, wa)?; let item = list .items .into_iter() .find(|i| i.label == resolved_item.label)?; resolved_item.documentation = item.documentation; resolved_item.data = data_opt; Some(resolved_item) }
use super::*; use crate::{ analysis::{HspSymbolKind, LocalScope, Scope, SymbolRc}, assists::from_document_position, lang_service::docs::Docs, parse::{p_param_ty::PParamCategory, PToken}, source::*, token::TokenKind, }; use lsp_types::{CompletionItem, CompletionItemKind, CompletionList, Documentation, Position, Url}; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::collections::HashSet; pub(crate) enum ACompletionItem { Symbol(SymbolRc), } pub(crate) fn in_str_or_comment(pos: Pos16, tokens: &[PToken]) -> bool { let i = match tokens.binary_search_by_key(&pos, |t| Pos16::from(t.ahead().range.start())) { Ok(i) | Err(i) => i.saturating_sub(1), }; tokens[i..] .iter() .take_while(|t| t.ahead().start() <= pos) .flat_map(|t| t.iter()) .filter(|t| t.loc.range.contains_inclusive(pos)) .any(|t| match t.kind { TokenKind::Str => t.loc.range.start() < pos && pos < t.loc.range.end(), TokenKind::Comment => t.loc.range.start() < pos, _ => false, }) } pub(crate) fn in_preproc(pos: Pos16, tokens: &[PToken]) -> bool { let mut i = match tokens.binary_search_by_key(&pos, |token| token.body_pos16()) { Ok(i) | Err(i) => i, }; loop { match tokens.get(i).map(|t| (t.kind(), t.body_pos())) { Some((TokenKind::Hash, p)) if p <= pos => return true, Some((TokenKind::Eos, p)) if p < pos => return false, _ if i == 0 => return false, _ => i -= 1, } } } fn collect_local_completion_items( symbols: &[SymbolRc], local: &LocalScope, completion_items: &mut Vec<ACompletionItem>, ) { for s in symbols { let scope = match &s.scope_opt { Some(it) => it, None => continue, }; if scope.is_visible_to(local) { completion_items.push(ACompletionItem::Symbol(s.clone())); } } } fn collect_global_completion_items( symbols: &[SymbolRc], completion_items: &mut Vec<ACompletionItem>, ) { for s in symbols { if let Some(Scope::Global) = s.scope_opt { completion_items.push(ACompletionItem::Symbol(s.clone())); } } } pub(crate) fn collect_symbols_as_completion_items( doc: DocId, scope: LocalScope, doc_symbols: &[(DocId, &[SymbolRc])], completion_items: &mut Vec<ACompletionItem>, ) { if let Some((_, symbols)) = doc_symbols.iter().find(|&&(d, _)| d == doc) { collect_local_completion_items(symbols, &scope, completion_items); } if scope.is_outside_module() { for &(d, symbols) in doc_symbols { if d == doc { continue; } collect_local_completion_items(symbols, &scope, completion_items); } } for &(_, symbols) in doc_symbols { collect_global_completion_items(symbols, completion_items); } } fn to_completion_symbol_kind(kind: HspSymbolKind) -> CompletionItemKind { use CompletionItemKind as K; match kind { HspSymbolKind::Unresolved | HspSymbolKind::Unknown => K::TEXT, HspSymbolKind::Label => K::VALUE, HspSymbolKind::StaticVar => K::VARIABLE, HspSymbolKind::Const => K::CONSTANT, HspSymbolKind::Enum => K::ENUM_MEMBER, HspSymbolKind::Macro { ctype: false } => K::VALUE, HspSymbolKind::Macro { ctype: true } => K::FUNCTION, HspSymbolKind::DefFunc => K::METHOD, HspSymbolKind::DefCFunc => K::FUNCTION, HspSymbolKind::ModFunc => K::METHOD, HspSymbolKind::ModCFunc => K::FUNCTION, HspSymbolKind::Param(None) => K::VARIABLE, HspSymbolKind::Param(Some(param)) => match param.category() { PParamCategory::ByValue => K::VALUE, PParamCategory::ByRef => K::PROPERTY, PParamCategory::Local => K::VARIABLE, PParamCategory::Auto => K::TEXT, }, HspSymbolKind::Module => K::MODULE, HspSymbolKind::Field => K::FIELD, HspSymbolKind::LibFunc => K::FUNCTION, HspSymbolKind::PluginCmd => K::KEYWORD, HspSymbolKind::ComInterface => K::INTERFACE, HspSymbolKind::ComFunc => K::METHOD, } } fn to_lsp_completion_item(symbol: &SymbolRc) -> CompletionItem { let details = symbol.compute_details(); let detail = details.desc.map(|s| s.to_string()); let documentation = if details.docs.is_empty() { None } else { Some(Documentation::String(details.docs.join("\r\n\r\n"))) }; let sort_text = { let sort_prefix = match (&symbol.scope_opt, symbol.kind) { (Some(Scope::Local(local)), _) => match (&local.module_opt, local.deffunc_opt) { (Some(_), Some(_)) => 'a', (Some(_), None) => 'b', (None, None) => 'c', (None, Some(_)) => 'd', }, (_, HspSymbolKind::Module) => 'f', (Some(Scope::Global), _) => 'e', (None, _) => 'g', }; Some(format!("{}{}", sort_prefix, symbol.name)) }; CompletionItem { kind: Some(to_completion_symbol_kind(symbol.kind)), label: symbol.name.to_string(), detail, documentation, sort_text, ..CompletionItem::default() } } fn new_completion_list(items: Vec<CompletionItem>) -> CompletionList { CompletionList { is_incomplete: false, items, } } pub(crate) fn incomp
items.swap_remove(i); } { let mut set = HashSet::new(); let retain = items .iter() .map(|item| set.insert(item.label.as_str())) .collect::<Vec<_>>(); let mut i = 0; items.retain(|_| { i += 1; retain[i - 1] }); } Some(new_completion_list(items)) } #[derive(Serialize, Deserialize)] struct CompletionData { uri: Url, position: Position, data_opt: Option<Value>, } pub(crate) fn completion( uri: Url, position: Position, docs: &Docs, wa: &mut WorkspaceAnalysis, ) -> Option<CompletionList> { let mut completion_list = do_completion(&uri, position, docs, wa)?; for item in &mut completion_list.items { if item.documentation.is_none() && item.data.is_none() { continue; } item.documentation = None; let data_opt = item.data.take(); let data = CompletionData { uri: uri.clone(), position, data_opt, }; item.data = Some(serde_json::to_value(&data).unwrap()); } Some(completion_list) } pub(crate) fn completion_resolve( mut resolved_item: CompletionItem, docs: &Docs, wa: &mut WorkspaceAnalysis, ) -> Option<CompletionItem> { let data: CompletionData = match resolved_item .data .take() .and_then(|data| serde_json::from_value(data).ok()) { Some(it) => it, None => { return Some(resolved_item); } }; let CompletionData { uri, position, data_opt, } = data; let list = do_completion(&uri, position, docs, wa)?; let item = list .items .into_iter() .find(|i| i.label == resolved_item.label)?; resolved_item.documentation = item.documentation; resolved_item.data = data_opt; Some(resolved_item) }
lete_completion_list() -> CompletionList { CompletionList { is_incomplete: true, items: vec![], } } fn do_completion( uri: &Url, position: Position, docs: &Docs, wa: &mut WorkspaceAnalysis, ) -> Option<CompletionList> { let mut items = vec![]; let (doc, pos) = from_document_position(uri, position, docs)?; if wa.in_str_or_comment(doc, pos).unwrap_or(true) { return None; } if wa.in_preproc(doc, pos).unwrap_or(false) { wa.require_project_for_doc(doc) .collect_preproc_completion_items(&mut items); return Some(new_completion_list(items)); } let mut completion_items = vec![]; let p = wa.require_project_for_doc(doc); p.collect_completion_items(doc, pos, &mut completion_items); for item in completion_items { match item { ACompletionItem::Symbol(symbol) => { if symbol.linked_symbol_opt.borrow().is_some() { continue; } items.push(to_lsp_completion_item(&symbol)); } } } p.collect_hsphelp_completion_items(&mut items); if let Some(i) = items.iter().position(|item| item.label == "__hspdef__") {
random
[ { "content": "fn add_symbol(kind: HspSymbolKind, name: &PToken, def_site: bool, ctx: &mut Ctx) {\n\n let NameScopeNsTriple {\n\n basename,\n\n scope_opt,\n\n ns_opt,\n\n } = resolve_name_scope_ns_for_def(\n\n &name.body.text,\n\n ImportMode::Local,\n\n &ctx.scope,\n\n ctx.module_map,\n\n );\n\n\n\n let symbol = DefInfo::Name {\n\n kind,\n\n name: name.clone(),\n\n basename: basename.clone(),\n\n scope_opt: scope_opt.clone(),\n\n ns_opt: ns_opt.clone(),\n\n }\n\n .into_symbol();\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 0, "score": 364484.3138519664 }, { "content": "fn on_symbol_use(name: &PToken, is_var: bool, ctx: &mut Ctx) {\n\n match resolve_implicit_symbol(\n\n &name.body.text,\n\n &ctx.scope,\n\n &ctx.public_env,\n\n &ctx.ns_env,\n\n &ctx.local_env,\n\n &ctx.module_map,\n\n ) {\n\n Some(symbol) => {\n\n ctx.public_use_sites.push((symbol, name.body.loc));\n\n }\n\n None => {\n\n let kind = if is_var {\n\n HspSymbolKind::StaticVar\n\n } else {\n\n HspSymbolKind::Unresolved\n\n };\n\n add_symbol(kind, name, USE_SITE, ctx);\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 1, "score": 342508.070559758 }, { "content": "fn from_document_position(uri: &Url, position: Position, docs: &Docs) -> Option<(DocId, Pos16)> {\n\n let uri = CanonicalUri::from_url(uri);\n\n let doc = docs.find_by_uri(&uri)?;\n\n\n\n let pos = {\n\n let row = position.line as u32;\n\n let column = position.character as u32;\n\n Pos16::new(row, column)\n\n };\n\n\n\n Some((doc, pos))\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/assists.rs", "rank": 2, "score": 331093.73646300694 }, { "content": "fn symbol_kind_is_definitely_rval(kind: HspSymbolKind) -> bool {\n\n match kind {\n\n HspSymbolKind::Label\n\n | HspSymbolKind::Const\n\n | HspSymbolKind::Enum\n\n | HspSymbolKind::DefFunc\n\n | HspSymbolKind::DefCFunc\n\n | HspSymbolKind::ModFunc\n\n | HspSymbolKind::ModCFunc\n\n | HspSymbolKind::ComInterface\n\n | HspSymbolKind::ComFunc => true,\n\n HspSymbolKind::Param(Some(param)) => match param {\n\n PParamTy::Var | PParamTy::Array | PParamTy::Modvar | PParamTy::Local => false,\n\n _ => true,\n\n },\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/sema.rs", "rank": 4, "score": 287287.84750533337 }, { "content": "fn trailing_is_all_blank(token: &PToken) -> bool {\n\n token.trailing.iter().all(|t| t.kind == TokenKind::Blank)\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/assists/formatting.rs", "rank": 5, "score": 275834.60246895533 }, { "content": "// completion, workspace/symbol も参照\n\nfn to_lsp_symbol_kind(kind: HspSymbolKind) -> Option<lsp_types::SymbolKind> {\n\n use lsp_types::SymbolKind as K;\n\n let it = match kind {\n\n HspSymbolKind::Unresolved => return None,\n\n HspSymbolKind::Module => K::MODULE,\n\n HspSymbolKind::StaticVar => K::VARIABLE,\n\n HspSymbolKind::Unknown\n\n | HspSymbolKind::Label\n\n | HspSymbolKind::Const\n\n | HspSymbolKind::Enum\n\n | HspSymbolKind::Macro { ctype: false }\n\n | HspSymbolKind::Param(None)\n\n | HspSymbolKind::PluginCmd => K::CONSTANT,\n\n HspSymbolKind::Macro { ctype: true }\n\n | HspSymbolKind::DefFunc\n\n | HspSymbolKind::DefCFunc\n\n | HspSymbolKind::LibFunc => K::FUNCTION,\n\n HspSymbolKind::ModFunc | HspSymbolKind::ModCFunc | HspSymbolKind::ComFunc => K::METHOD,\n\n HspSymbolKind::Param(Some(param)) => match param.category() {\n\n PParamCategory::ByValue => K::CONSTANT,\n", "file_path": "hsp3-analyzer-mini/ham-core/src/assists/document_symbol.rs", "rank": 6, "score": 275358.9380161527 }, { "content": "fn leading_in_same_line_is_all_blank(token: &PToken) -> bool {\n\n token\n\n .leading\n\n .iter()\n\n .rev()\n\n .take_while(|t| t.kind != TokenKind::Newlines)\n\n .all(|t| t.kind == TokenKind::Blank)\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/assists/formatting.rs", "rank": 7, "score": 272716.0604786229 }, { "content": "fn parse_include_stmt(hash: PToken, kind: PIncludeKind, px: &mut Px) -> PIncludeStmt {\n\n let keyword = px.bump();\n\n let file_path_opt = px.eat(TokenKind::Str);\n\n parse_end_of_preproc(px);\n\n\n\n PIncludeStmt {\n\n hash,\n\n keyword,\n\n kind,\n\n file_path_opt,\n\n }\n\n}\n\n\n\npub(crate) fn parse_preproc_stmt(px: &mut Px) -> Option<PStmt> {\n\n let hash = px.eat(TokenKind::Hash)?;\n\n\n\n let stmt = match px.next_token().body_text() {\n\n \"const\" => PStmt::Const(parse_const_stmt(hash, px)),\n\n \"enum\" => PStmt::Enum(parse_enum_stmt(hash, px)),\n\n \"define\" => PStmt::Define(parse_define_stmt(hash, px)),\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 8, "score": 269945.830588881 }, { "content": "fn parse_enum_stmt(hash: PToken, px: &mut Px) -> PEnumStmt {\n\n assert_eq!(px.next_token().body_text(), \"enum\");\n\n let keyword = px.bump();\n\n\n\n let privacy_opt = parse_privacy(px);\n\n let name_opt = px.eat(TokenKind::Ident);\n\n\n\n let equal_opt = px.eat(TokenKind::Equal);\n\n let init_opt = parse_expr(px);\n\n parse_end_of_preproc(px);\n\n\n\n PEnumStmt {\n\n hash,\n\n keyword,\n\n privacy_opt,\n\n name_opt,\n\n equal_opt,\n\n init_opt,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 9, "score": 267954.91458337917 }, { "content": "fn to_symbol_kind(kind: PDefFuncKind) -> HspSymbolKind {\n\n match kind {\n\n PDefFuncKind::DefFunc => HspSymbolKind::DefFunc,\n\n PDefFuncKind::DefCFunc => HspSymbolKind::DefCFunc,\n\n PDefFuncKind::ModInit | PDefFuncKind::ModTerm | PDefFuncKind::ModFunc => {\n\n HspSymbolKind::ModFunc\n\n }\n\n PDefFuncKind::ModCFunc => HspSymbolKind::ModCFunc,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/preproc.rs", "rank": 10, "score": 266438.8250778093 }, { "content": "fn tokenize_other(pp: bool, t: &mut TokenizeContext) -> bool {\n\n if !t.at_eof() && char_is_other_first(pp, t.next()) {\n\n while !t.at_eof() && char_is_other_first(pp, t.next()) {\n\n t.bump();\n\n }\n\n\n\n t.commit(Token::Other);\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 11, "score": 266337.174354995 }, { "content": "fn tokenize_number(pp: bool, t: &mut TokenizeContext) -> bool {\n\n if t.eat(\"0b\") {\n\n t.commit(Token::ZeroB);\n\n tokenize_binary(t);\n\n return true;\n\n }\n\n\n\n if (pp && t.eat(\"%%\")) || (!pp && t.eat(\"%\")) {\n\n t.commit(Token::Percent);\n\n tokenize_binary(t);\n\n return true;\n\n }\n\n\n\n if t.eat(\"0x\") {\n\n t.commit(Token::ZeroX);\n\n tokenize_hex(t);\n\n return true;\n\n }\n\n\n\n if t.eat(\"$\") {\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 12, "score": 263630.3233146109 }, { "content": "fn tokenize_space(pp: bool, t: &mut TokenizeContext) -> bool {\n\n // 改行エスケープ\n\n if pp && t.next() == '\\\\' && char_is_eol(t.nth(1)) {\n\n t.eat(\"\\\\\");\n\n if !t.eat(\"\\r\\n\") {\n\n t.eat(\"\\n\");\n\n }\n\n\n\n t.commit(Token::Space);\n\n return true;\n\n }\n\n\n\n let mut ok = false;\n\n\n\n while char_is_space(t.next()) {\n\n t.bump();\n\n ok = true;\n\n }\n\n\n\n if ok {\n\n t.commit(Token::Space);\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 13, "score": 263630.323314611 }, { "content": "fn on_symbol_def(name: &PToken, ctx: &mut Ctx) {\n\n match resolve_implicit_symbol(\n\n &name.body.text,\n\n &ctx.scope,\n\n &ctx.public_env,\n\n &ctx.ns_env,\n\n &ctx.local_env,\n\n ctx.module_map,\n\n ) {\n\n Some(symbol) => {\n\n ctx.public_def_sites.push((symbol, name.body.loc));\n\n }\n\n None => add_symbol(HspSymbolKind::StaticVar, name, DEF_SITE, ctx),\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 14, "score": 262670.9653966576 }, { "content": "fn parse_deffunc_like_stmt(hash: PToken, kind: PDefFuncKind, px: &mut Px) -> PDefFuncStmt {\n\n assert!(DEFFUNC_LIKE_KEYWORDS.contains(&px.next_token().body_text()));\n\n\n\n let keyword = px.bump();\n\n\n\n let privacy_opt = parse_privacy(px);\n\n let name_opt = if kind.is_anonymous() {\n\n None\n\n } else {\n\n px.eat(TokenKind::Ident)\n\n };\n\n\n\n let onexit_opt = eat_ident(\"onexit\", px);\n\n let params = parse_deffunc_params(px);\n\n parse_end_of_preproc(px);\n\n\n\n let mut stmts = vec![];\n\n let behind = loop {\n\n match px.next() {\n\n TokenKind::Eof => break px.next_token().behind(),\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 15, "score": 262126.73412679983 }, { "content": "fn tokenize_pun(t: &mut TokenizeContext) -> bool {\n\n for &(token, pun_text) in PUN_TABLE {\n\n if t.eat(pun_text) {\n\n t.commit(token);\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 16, "score": 256090.08767492545 }, { "content": "fn tokenize_eol(t: &mut TokenizeContext) -> bool {\n\n if t.is_followed_by(\"\\r\\n\") || t.next() == '\\n' {\n\n t.commit(Token::Semi);\n\n\n\n loop {\n\n while char_is_space(t.next()) {\n\n t.bump();\n\n }\n\n\n\n if t.eat(\"\\r\\n\") || t.eat(\"\\n\") {\n\n continue;\n\n }\n\n\n\n break;\n\n }\n\n\n\n t.commit(Token::Eol);\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 17, "score": 256090.08767492545 }, { "content": "fn tokenize_str(t: &mut TokenizeContext) -> bool {\n\n if t.eat(\"\\\"\") {\n\n t.commit(Token::StrStart);\n\n\n\n tokenize_char_or_str_content(t, '\"');\n\n\n\n if t.eat(\"\\\"\") {\n\n t.commit(Token::StrEnd);\n\n }\n\n\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 18, "score": 256090.08767492545 }, { "content": "fn tokenize_char(t: &mut TokenizeContext) -> bool {\n\n if t.eat(\"'\") {\n\n t.commit(Token::CharStart);\n\n\n\n tokenize_char_or_str_content(t, '\\'');\n\n\n\n if t.eat(\"'\") {\n\n t.commit(Token::CharEnd);\n\n }\n\n\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 19, "score": 256090.08767492545 }, { "content": "fn tokenize_ident(t: &mut TokenizeContext) -> bool {\n\n if char_is_ident_first(t.next()) || t.next() == '@' {\n\n while char_is_ident(t.next()) {\n\n t.bump();\n\n }\n\n\n\n let token = Token::parse_keyword(t.current_text()).unwrap_or(Token::Ident);\n\n t.commit(token);\n\n\n\n // モジュール名を指定する `@` と直後の名前を字句解析する。(念のため `a@b@c` みたいなのも解釈する。)\n\n loop {\n\n if t.eat(\"@\") {\n\n t.commit(Token::IdentAtSign);\n\n continue;\n\n }\n\n\n\n if char_is_ident_first(t.next()) {\n\n while char_is_ident(t.next()) {\n\n t.bump();\n\n }\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 20, "score": 256090.08767492545 }, { "content": "fn tokenize_comment(t: &mut TokenizeContext) -> bool {\n\n if t.eat(\";\") || t.eat(\"//\") {\n\n while !t.at_eof() && !char_is_eol(t.next()) {\n\n t.bump();\n\n }\n\n t.commit(Token::Comment);\n\n return true;\n\n }\n\n\n\n if t.eat(\"/*\") {\n\n while !t.at_eof() && !t.eat(\"*/\") {\n\n t.bump();\n\n }\n\n t.commit(Token::Comment);\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 21, "score": 256090.08767492545 }, { "content": "fn tokenize_multiline_str(t: &mut TokenizeContext) -> bool {\n\n if t.eat(\"{\\\"\") {\n\n t.commit(Token::StrStart);\n\n\n\n // FIXME: 各行の最初のタブ文字は文字列リテラルの値に含まれないので、Token::Space にする。\n\n while !t.at_eof() && !t.is_followed_by(\"\\\"}\") {\n\n t.bump();\n\n }\n\n t.commit(Token::StrVerbatim);\n\n\n\n if t.eat(\"\\\"}\") {\n\n t.commit(Token::StrEnd);\n\n }\n\n\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 22, "score": 253181.655961693 }, { "content": "fn parse_uselib_stmt(hash: PToken, px: &mut Px) -> PUseLibStmt {\n\n assert_eq!(px.next_token().body_text(), \"uselib\");\n\n\n\n let keyword = px.bump();\n\n let file_path_opt = px.eat(TokenKind::Str);\n\n parse_end_of_preproc(px);\n\n\n\n PUseLibStmt {\n\n hash,\n\n keyword,\n\n file_path_opt,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 23, "score": 251065.5091614254 }, { "content": "fn tokenize_spaces_comments(pp: bool, t: &mut TokenizeContext) {\n\n while tokenize_space(pp, t) || tokenize_comment(t) {\n\n // Pass.\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 24, "score": 244522.75777272027 }, { "content": "// SemanticTokensLegend を参照\n\nfn to_semantic_token_kind(symbol: &SymbolRc) -> Option<(u32, u32)> {\n\n let (ty, modifiers) = match symbol.kind {\n\n HspSymbolKind::Param(Some(param)) => match param.category() {\n\n PParamCategory::ByValue => (1, 0b01), // readonly variable\n\n PParamCategory::ByRef => (0, 0), // parameter\n\n PParamCategory::Local => (1, 0), // variable\n\n PParamCategory::Auto => return None,\n\n },\n\n HspSymbolKind::StaticVar => (1, 0b10), // static variable\n\n HspSymbolKind::Const | HspSymbolKind::Enum => (1, 0b01), // readonly variable\n\n HspSymbolKind::DefFunc\n\n | HspSymbolKind::DefCFunc\n\n | HspSymbolKind::ModFunc\n\n | HspSymbolKind::ModCFunc\n\n | HspSymbolKind::LibFunc\n\n | HspSymbolKind::ComFunc => (2, 0), // function\n\n HspSymbolKind::Macro { .. } => (3, 0), // macro\n\n HspSymbolKind::Module => (4, 0), // namespace\n\n HspSymbolKind::PluginCmd => (5, 0), // keyword\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/assists/semantic_tokens.rs", "rank": 25, "score": 242132.82407897458 }, { "content": "// completion, textDocument/documentSymbol も参照\n\nfn to_lsp_symbol_kind(kind: HspSymbolKind) -> Option<lsp_types::SymbolKind> {\n\n use lsp_types::SymbolKind as K;\n\n let it = match kind {\n\n // パラメータなどの単一ファイルにだけ属するシンボルはworkspace/symbolリクエストの結果には含めない。\n\n HspSymbolKind::Unresolved\n\n | HspSymbolKind::Unknown\n\n | HspSymbolKind::Param(_)\n\n | HspSymbolKind::Module\n\n | HspSymbolKind::Field => return None,\n\n\n\n HspSymbolKind::StaticVar => K::VARIABLE,\n\n HspSymbolKind::Label\n\n | HspSymbolKind::Const\n\n | HspSymbolKind::Enum\n\n | HspSymbolKind::Macro { ctype: false }\n\n | HspSymbolKind::PluginCmd => K::CONSTANT,\n\n HspSymbolKind::Macro { ctype: true }\n\n | HspSymbolKind::DefFunc\n\n | HspSymbolKind::DefCFunc\n\n | HspSymbolKind::LibFunc => K::FUNCTION,\n", "file_path": "hsp3-analyzer-mini/ham-core/src/assists/workspace_symbol.rs", "rank": 27, "score": 236840.00540840672 }, { "content": "fn parse_args_in_paren(px: &mut Px) -> Option<(PToken, Vec<PArg>, Option<PToken>)> {\n\n let left_paren = px.eat(TokenKind::LeftParen)?;\n\n let args = parse_args(px);\n\n let right_paren_opt = px.eat(TokenKind::RightParen);\n\n Some((left_paren, args, right_paren_opt))\n\n}\n\n\n\npub(crate) fn parse_compound(px: &mut Px) -> Option<PCompound> {\n\n let name = px.eat(TokenKind::Ident)?;\n\n\n\n match px.next() {\n\n TokenKind::Dot => {\n\n let mut args = vec![];\n\n while let Some(dot) = px.eat(TokenKind::Dot) {\n\n let expr_opt = parse_expr(px);\n\n args.push(PDotArg { dot, expr_opt });\n\n }\n\n Some(PCompound::Dots(PNameDot { name, args }))\n\n }\n\n TokenKind::LeftParen => {\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_expr.rs", "rank": 28, "score": 228338.27554329057 }, { "content": "fn eat_arbitrary_tokens(px: &mut Px) -> Vec<PToken> {\n\n let mut tokens = vec![];\n\n while !px.next().is_end_of_preproc() {\n\n tokens.push(px.bump());\n\n }\n\n tokens\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 29, "score": 228306.56876821545 }, { "content": "/// 文字が解釈不能か?\n\nfn char_is_other_first(pp: bool, c: char) -> bool {\n\n if !pp && c == '#' {\n\n return true;\n\n }\n\n\n\n !char_is_eol(c)\n\n && !char_is_space(c)\n\n && !char_is_comment_first(c)\n\n && !c.is_ascii_digit()\n\n && !char_is_ident_first(c)\n\n && !char_is_pun_first(c)\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 30, "score": 221009.58112196287 }, { "content": "fn parse_privacy(px: &mut Px) -> Option<(PPrivacy, PToken)> {\n\n if px.next() != TokenKind::Ident {\n\n return None;\n\n }\n\n\n\n let privacy = PPrivacy::parse(px.next_token().body_text())?;\n\n let token = px.bump();\n\n Some((privacy, token))\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 31, "score": 220847.31744945393 }, { "content": "fn to_pos16(p: Position) -> Pos16 {\n\n Pos16::new(p.line as u32, p.character as u32)\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/tests.rs", "rank": 32, "score": 220461.7339406807 }, { "content": "fn parse_const_ty(px: &mut Px) -> Option<(PConstTy, PToken)> {\n\n if px.next() != TokenKind::Ident {\n\n return None;\n\n }\n\n\n\n let const_ty = PConstTy::parse(px.next_token().body_text())?;\n\n let token = px.bump();\n\n Some((const_ty, token))\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 33, "score": 215649.669109967 }, { "content": "fn parse_module_stmt(hash: PToken, px: &mut Px) -> PModuleStmt {\n\n assert_eq!(px.next_token().body_text(), \"module\");\n\n\n\n let keyword = px.bump();\n\n\n\n let name_opt = match px.next() {\n\n TokenKind::Ident | TokenKind::Str => Some(px.bump()),\n\n _ => None,\n\n };\n\n\n\n let fields = parse_deffunc_params(px);\n\n parse_end_of_preproc(px);\n\n\n\n let mut stmts = vec![];\n\n let (global_opt, behind) = loop {\n\n match px.next() {\n\n TokenKind::Eof => break (None, px.next_token().behind()),\n\n TokenKind::Eos | TokenKind::LeftBrace | TokenKind::RightBrace | TokenKind::Colon => {\n\n px.skip();\n\n }\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 34, "score": 215649.669109967 }, { "content": "fn parse_define_stmt(hash: PToken, px: &mut Px) -> PDefineStmt {\n\n assert_eq!(px.next_token().body_text(), \"define\");\n\n let keyword = px.bump();\n\n\n\n let privacy_opt = parse_privacy(px);\n\n let ctype_opt = eat_ident(\"ctype\", px);\n\n\n\n let name_opt = px.eat(TokenKind::Ident);\n\n let has_params = {\n\n // マクロ名と `(` の間にスペースがないときだけパラメータリストとみなす。\n\n px.next() == TokenKind::LeftParen\n\n && name_opt.as_ref().map_or(false, |name| {\n\n name.body.loc.end() == px.next_token().body.loc.start()\n\n })\n\n };\n\n let (left_paren_opt, params, right_paren_opt) = if has_params {\n\n let left_paren_opt = px.eat(TokenKind::LeftParen);\n\n let params = parse_macro_params(px);\n\n let right_paren_opt = px.eat(TokenKind::RightParen);\n\n (left_paren_opt, params, right_paren_opt)\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 35, "score": 215649.669109967 }, { "content": "fn parse_cmd_stmt(hash: PToken, px: &mut Px) -> PCmdStmt {\n\n assert_eq!(px.next_token().body_text(), \"cmd\");\n\n\n\n let keyword = px.bump();\n\n let privacy_opt = parse_privacy(px);\n\n let name_opt = px.eat(TokenKind::Ident);\n\n let command_id_opt = px.eat(TokenKind::Number);\n\n parse_end_of_preproc(px);\n\n\n\n PCmdStmt {\n\n hash,\n\n keyword,\n\n privacy_opt,\n\n name_opt,\n\n command_id_opt,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 36, "score": 215649.669109967 }, { "content": "fn parse_global_stmt(hash: PToken, px: &mut Px) -> PGlobalStmt {\n\n assert_eq!(px.next_token().body_text(), \"global\");\n\n\n\n let keyword = px.bump();\n\n parse_end_of_preproc(px);\n\n\n\n PGlobalStmt { hash, keyword }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 37, "score": 215649.669109967 }, { "content": "fn parse_const_stmt(hash: PToken, px: &mut Px) -> PConstStmt {\n\n assert_eq!(px.next_token().body_text(), \"const\");\n\n let keyword = px.bump();\n\n\n\n let privacy_opt = parse_privacy(px);\n\n let ty_opt = parse_const_ty(px);\n\n let name_opt = px.eat(TokenKind::Ident);\n\n let init_opt = parse_expr(px);\n\n parse_end_of_preproc(px);\n\n\n\n PConstStmt {\n\n hash,\n\n keyword,\n\n privacy_opt,\n\n ty_opt,\n\n name_opt,\n\n init_opt,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 38, "score": 215649.669109967 }, { "content": "fn parse_jump_modifier(px: &mut Px) -> Option<(PJumpModifier, PToken)> {\n\n if px.next() != TokenKind::Ident {\n\n return None;\n\n }\n\n\n\n let jump_modifier = PJumpModifier::parse(px.next_token().body_text())?;\n\n let token = px.bump();\n\n Some((jump_modifier, token))\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 39, "score": 215649.669109967 }, { "content": "fn parse_param_ty(px: &mut Px) -> Option<(PParamTy, PToken)> {\n\n if px.next() != TokenKind::Ident {\n\n return None;\n\n }\n\n\n\n let param_ty = PParamTy::parse(px.next_token().body_text())?;\n\n let token = px.bump();\n\n Some((param_ty, token))\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 40, "score": 215649.669109967 }, { "content": "/// 文字が改行ではない空白か?\n\nfn char_is_space(c: char) -> bool {\n\n c == ' ' || c == '\\t' || c == ' '\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 41, "score": 214820.9972855167 }, { "content": "/// 文字が識別子の一部になるか?\n\nfn char_is_ident(c: char) -> bool {\n\n c.is_ascii_alphanumeric() || c == '_'\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 42, "score": 214820.9972855167 }, { "content": "fn char_is_binary(c: char) -> bool {\n\n c == '0' || c == '1'\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 43, "score": 214820.9972855167 }, { "content": "/// 文字が改行か?\n\nfn char_is_eol(c: char) -> bool {\n\n c == '\\r' || c == '\\n'\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 44, "score": 214820.9972855167 }, { "content": "fn parse_regcmd_stmt(hash: PToken, px: &mut Px) -> PRegCmdStmt {\n\n assert_eq!(px.next_token().body_text(), \"regcmd\");\n\n\n\n let keyword = px.bump();\n\n let args = parse_args(px);\n\n parse_end_of_preproc(px);\n\n\n\n PRegCmdStmt {\n\n hash,\n\n keyword,\n\n args,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 45, "score": 213169.7159538091 }, { "content": "fn char_is_comment_first(c: char) -> bool {\n\n c == ';' || c == '/'\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 46, "score": 212349.69076278998 }, { "content": "/// 文字が識別子の先頭になるか?\n\nfn char_is_ident_first(c: char) -> bool {\n\n char_is_ident(c) && !c.is_ascii_digit()\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 47, "score": 212349.69076278998 }, { "content": "/// 文字が約物の先頭になるか?\n\nfn char_is_pun_first(c: char) -> bool {\n\n \"!\\\"#$%&'()-=^\\\\|{}+*:,.<>/\".contains(c)\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 48, "score": 212349.69076278998 }, { "content": "fn parse_lib_func_stmt(hash: PToken, px: &mut Px) -> PLibFuncStmt {\n\n let keyword = px.bump();\n\n\n\n let privacy_opt = parse_privacy(px);\n\n let name_opt = px.eat(TokenKind::Ident);\n\n let onexit_opt = eat_ident(\"onexit\", px);\n\n\n\n let func_name_opt = match px.next() {\n\n TokenKind::Ident | TokenKind::Str => Some(px.bump()),\n\n _ => None,\n\n };\n\n let type_id_opt = px.eat(TokenKind::Number);\n\n let params = parse_deffunc_params(px);\n\n parse_end_of_preproc(px);\n\n\n\n PLibFuncStmt {\n\n hash,\n\n keyword,\n\n privacy_opt,\n\n name_opt,\n\n onexit_opt,\n\n func_name_opt,\n\n type_id_opt,\n\n params,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 49, "score": 210764.15207952342 }, { "content": "fn leading_blank_range(token: &PToken) -> Range {\n\n let e = token.body_pos();\n\n let mut s = e;\n\n\n\n for t in token.leading.iter().rev() {\n\n match t.kind {\n\n TokenKind::Blank => {\n\n s = s.min(t.loc.range.start());\n\n }\n\n TokenKind::Newlines => {\n\n let last = t.text.rfind('\\n').unwrap() + 1;\n\n s = s.min(t.loc.range.start() + Pos::from(&t.text[..last]));\n\n break;\n\n }\n\n _ => break,\n\n }\n\n }\n\n\n\n Range::from(s..e)\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/assists/formatting.rs", "rank": 50, "score": 207647.73804746434 }, { "content": "fn trailing_blank_range(token: &PToken) -> Range {\n\n let s = token.body.loc.end();\n\n let e = token\n\n .trailing\n\n .iter()\n\n .take_while(|t| t.kind == TokenKind::Blank)\n\n .map(|t| t.loc.range.end())\n\n .last()\n\n .unwrap_or(s);\n\n Range::from(s..e)\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/assists/formatting.rs", "rank": 51, "score": 207647.73804746434 }, { "content": "fn eat_ident(pattern: &str, px: &mut Px) -> Option<PToken> {\n\n if px.next() == TokenKind::Ident && px.next_token().body_text() == pattern {\n\n Some(px.bump())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 52, "score": 203197.80906680325 }, { "content": "fn on_compound_use(compound: &PCompound, ctx: &mut Ctx) {\n\n match compound {\n\n PCompound::Name(name) => on_symbol_use(name, true, ctx),\n\n PCompound::Paren(PNameParen { name, args, .. }) => {\n\n on_symbol_use(name, true, ctx);\n\n\n\n for arg in args {\n\n on_expr_opt(arg.expr_opt.as_ref(), ctx);\n\n }\n\n }\n\n PCompound::Dots(PNameDot { name, args }) => {\n\n on_symbol_use(name, true, ctx);\n\n\n\n for arg in args {\n\n on_expr_opt(arg.expr_opt.as_ref(), ctx);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 53, "score": 203077.32306493196 }, { "content": "fn convert_symbol(hs_symbol: HsSymbol) -> (SymbolRc, CompletionItem) {\n\n let kind = CompletionItemKind::FUNCTION;\n\n let HsSymbol {\n\n name,\n\n description,\n\n documentation,\n\n params_opt,\n\n builtin,\n\n } = hs_symbol;\n\n\n\n let name_rc = RcStr::from(name.clone());\n\n\n\n let signature_opt = params_opt.map(|params| {\n\n let params = params\n\n .into_iter()\n\n .map(|p| (None, Some(p.name.into()), p.details_opt))\n\n .collect();\n\n\n\n Rc::new(SignatureData {\n\n name: name_rc.clone(),\n", "file_path": "hsp3-analyzer-mini/ham-core/src/lang_service/search_hsphelp.rs", "rank": 54, "score": 201402.43425524104 }, { "content": "/// 小数部を字句解析する。(`3.14` などの `14` の部分)\n\nfn tokenize_fraction(t: &mut TokenizeContext) {\n\n assert!(t.next().is_ascii_digit());\n\n\n\n while t.next().is_ascii_digit() {\n\n t.bump();\n\n }\n\n t.commit(Token::Fraction);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 55, "score": 199663.50826604984 }, { "content": "/// プリプロセッサ命令における改行のエスケープや、\n\n/// 複数行コメントや複数行文字列リテラルの中に改行を\n\nfn tokenize_segment(t: &mut TokenizeContext) {\n\n // この時点で t は行頭に位置する。\n\n // 行頭のスペースやコメントを除去する。(複数行コメントの中に改行があっても1行とみなす。)\n\n tokenize_spaces_comments(false, t);\n\n\n\n let pp = if t.eat(\"#\") {\n\n t.commit(Token::Hash);\n\n true\n\n } else {\n\n false\n\n };\n\n\n\n while !t.at_eof() && !tokenize_eol(t) {\n\n let ok = tokenize_space(pp, t)\n\n || tokenize_comment(t)\n\n || tokenize_number(pp, t)\n\n || tokenize_char(t)\n\n || tokenize_str(t)\n\n || tokenize_multiline_str(t)\n\n || tokenize_ident(t)\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 56, "score": 199663.50826604984 }, { "content": "fn tokenize_binary(t: &mut TokenizeContext) {\n\n while char_is_binary(t.next()) {\n\n t.bump();\n\n }\n\n t.commit(Token::Binary);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 57, "score": 199663.50826604984 }, { "content": "/// 指数部を字句解析する。(`1e+9` などの `e+9` の部分)\n\nfn tokenize_exponent(t: &mut TokenizeContext) {\n\n assert!(t.next() == 'e' || t.next() == 'E');\n\n\n\n t.bump();\n\n t.commit(Token::ExpChar);\n\n\n\n // 指数部の符号\n\n let exp_sign = t.eat(\"+\") || t.eat(\"-\");\n\n if exp_sign {\n\n t.commit(Token::ExpSign);\n\n }\n\n\n\n // 指数部の数値\n\n if !exp_sign && !t.next().is_ascii_digit() {\n\n return;\n\n }\n\n\n\n while t.next().is_ascii_digit() {\n\n t.bump();\n\n }\n\n t.commit(Token::ExpDigit);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 58, "score": 199663.50826604984 }, { "content": "fn tokenize_hex(t: &mut TokenizeContext) {\n\n while t.next().is_ascii_hexdigit() {\n\n t.bump();\n\n }\n\n t.commit(Token::Hex);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 59, "score": 199663.50826604984 }, { "content": "fn parse_label_stmt(p: &mut Px) {\n\n assert_eq!(p.next(), Token::Star);\n\n\n\n p.start_node();\n\n\n\n parse_label_literal(p);\n\n parse_end_of_stmt(p);\n\n\n\n p.end_node(NodeKind::LabelStmt);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 60, "score": 199403.5412396546 }, { "content": "/// 10進数の数字の直後にある、小数部や指数部を字句解析する。\n\nfn tokenize_digit_suffix(tx: &mut TokenizeContext) {\n\n // 小数部\n\n if tx.eat(\".\") {\n\n eat_digits(tx);\n\n }\n\n\n\n // 指数部\n\n if let 'e' | 'E' = tx.next() {\n\n tx.bump();\n\n\n\n if let '+' | '-' = tx.next() {\n\n tx.bump();\n\n }\n\n\n\n eat_digits(tx);\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 61, "score": 193369.9381633494 }, { "content": "fn resolve_scope_at(module_map: &ModuleMap, deffunc_map: &DefFuncMap, pos: Pos16) -> LocalScope {\n\n let mut scope = LocalScope::default();\n\n\n\n scope.module_opt = module_map.iter().find_map(|(&m, module_data)| {\n\n if range_is_touched(&module_data.content_loc.range, pos) {\n\n Some(m.clone())\n\n } else {\n\n None\n\n }\n\n });\n\n\n\n scope.deffunc_opt = deffunc_map.iter().find_map(|(&d, deffunc_data)| {\n\n if range_is_touched(&deffunc_data.content_loc.range, pos) {\n\n Some(d)\n\n } else {\n\n None\n\n }\n\n });\n\n\n\n scope\n\n}\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/project_analysis.rs", "rank": 62, "score": 190037.98195768005 }, { "content": "/// 改行でない空白文字を読み飛ばす。\n\nfn eat_blank(tx: &mut Tx) {\n\n loop {\n\n match tx.next() {\n\n ' ' | '\\t' | '\\u{3000}' => {\n\n tx.bump();\n\n }\n\n '\\r' => match tx.nth(1) {\n\n '\\n' => break,\n\n _ => tx.bump(),\n\n },\n\n '\\n' => break,\n\n c if c.is_whitespace() => {\n\n tx.bump();\n\n }\n\n _ => break,\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 63, "score": 187438.38862486498 }, { "content": "fn eat_digits(tx: &mut Tx) {\n\n while tx.next().is_ascii_digit() {\n\n tx.bump();\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 64, "score": 187438.38862486498 }, { "content": "/// 行末まで読み飛ばす。改行自体は読まない。\n\nfn eat_line(tx: &mut Tx) {\n\n match tx.find(\"\\n\") {\n\n Some(mut len) => {\n\n // CRLF の LF が見つかったときは CR の前に戻る。\n\n if len >= 1 && tx.nth_byte(len - 1) == b'\\r' {\n\n len -= 1;\n\n }\n\n\n\n tx.bump_many(len)\n\n }\n\n\n\n // 改行が見つからない場合は、いま最終行なので、ファイルの末尾まで読む。\n\n None => tx.bump_all(),\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 65, "score": 187438.38862486498 }, { "content": "/// すべての空白を読み飛ばす。\n\nfn eat_spaces(tx: &mut Tx) {\n\n loop {\n\n match tx.next() {\n\n ' ' | '\\n' | '\\r' | '\\t' | '\\u{3000}' => tx.bump(),\n\n c if c.is_whitespace() => tx.bump(),\n\n _ => break,\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 66, "score": 187438.38862486498 }, { "content": "fn look_ahead_stmt(p: &mut Px) -> StmtKind {\n\n assert_eq!(p.next(), Token::Ident);\n\n\n\n let second = p.nth(1);\n\n\n\n if second == Token::Minus && p.nth(2).at_end_of_stmt() {\n\n return StmtKind::Assign;\n\n }\n\n\n\n if second == Token::Minus || second == Token::Star {\n\n // 曖昧な文。notes.md を参照。\n\n return StmtKind::Command;\n\n }\n\n\n\n // mes \"hello\" のように識別子の直後に原子式があるケースは、代入文ではない。\n\n // また `on goto ...` のように jump modifier があるケースは命令文に確定。\n\n if (second != Token::LeftParen && second.is_atom_expr_first()) || second.is_jump_modifier() {\n\n return StmtKind::Command;\n\n }\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 67, "score": 187126.6444661566 }, { "content": "fn eat_hex_digits(tx: &mut Tx) {\n\n while tx.next().is_ascii_hexdigit() {\n\n tx.bump();\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 68, "score": 185133.14831994742 }, { "content": "fn eat_binary_digits(tx: &mut Tx) {\n\n while let '0' | '1' = tx.next() {\n\n tx.bump();\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 69, "score": 185133.14831994742 }, { "content": "fn ident_to_kind(s: &str) -> TokenKind {\n\n match s {\n\n \"if\" => TokenKind::If,\n\n \"else\" => TokenKind::Else,\n\n _ => TokenKind::Ident,\n\n }\n\n}\n\n\n\npub(crate) fn do_tokenize(tx: &mut Tx) {\n\n loop {\n\n match lookahead(tx) {\n\n Lookahead::Eof => break,\n\n Lookahead::Cr => {\n\n tx.bump();\n\n\n\n eat_blank(tx);\n\n tx.commit(TokenKind::Blank);\n\n }\n\n Lookahead::CrLf => {\n\n tx.bump_many(2);\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 70, "score": 184912.53079397962 }, { "content": "fn tokenize_char_or_str_content(t: &mut TokenizeContext, quote: char) {\n\n while !t.at_eof() && !char_is_eol(t.next()) && t.next() != quote {\n\n // \\ の直後が行末やファイル末尾のときはエスケープとみなさない。\n\n if t.eat(\"\\\\\") && !t.at_eof() && !char_is_eol(t.next()) {\n\n t.bump();\n\n t.commit(Token::StrEscape);\n\n continue;\n\n }\n\n\n\n let mut ok = false;\n\n\n\n while !t.at_eof() && !char_is_eol(t.next()) && t.next() != quote && t.next() != '\\\\' {\n\n t.bump();\n\n ok = true;\n\n }\n\n\n\n if ok {\n\n t.commit(Token::StrVerbatim);\n\n continue;\n\n }\n\n\n\n break;\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 71, "score": 182673.3527962851 }, { "content": "/// 何文字か先読みして、次の字句を決定する。\n\nfn lookahead(tx: &mut Tx) -> Lookahead {\n\n match tx.next() {\n\n '\\0' => Lookahead::Eof,\n\n '\\r' => match tx.nth(1) {\n\n '\\n' => Lookahead::CrLf,\n\n _ => Lookahead::Cr,\n\n },\n\n '\\n' => Lookahead::Lf,\n\n ' ' | '\\t' | '\\u{3000}' => {\n\n // U+3000: 全角空白\n\n Lookahead::Blank\n\n }\n\n '0' => match tx.nth(1) {\n\n 'b' | 'B' => Lookahead::ZeroB,\n\n 'x' | 'X' => Lookahead::ZeroX,\n\n _ => Lookahead::Digit,\n\n },\n\n '$' => Lookahead::Dollar,\n\n '\\'' => Lookahead::SingleQuote,\n\n '\"' => Lookahead::DoubleQuote,\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 72, "score": 182595.21490047488 }, { "content": "fn str_is_whitespace(s: &str) -> bool {\n\n s.chars().all(|c| c.is_whitespace())\n\n}\n\n\n\n/// ヘルプソースファイルを解析してシンボル情報を集める。\n\npub(crate) fn parse_for_symbols(\n\n content: &str,\n\n symbols: &mut Vec<HsSymbol>,\n\n warnings: &mut Vec<String>,\n\n) {\n\n // セクションに分割する:\n\n\n\n let mut sections = vec![];\n\n {\n\n let mut section = vec![];\n\n let mut in_html = false;\n\n\n\n for line in content.lines() {\n\n if line.starts_with(\";\") {\n\n continue;\n", "file_path": "hsp3-analyzer-mini/ham-core/src/help_source.rs", "rank": 73, "score": 179902.73475647936 }, { "content": "fn create_global_env(symbols: &Symbols, env: &mut Env) {\n\n for symbol in symbols.iter() {\n\n if symbols.kind(&symbol) == SymbolKind::Param {\n\n continue;\n\n }\n\n\n\n if let Some(name) = symbols.unqualified_name(&symbol) {\n\n env.insert(name.to_string(), symbol.clone());\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/analysis/name_resolution.rs", "rank": 74, "score": 174409.38047321278 }, { "content": "/// ロガーを使った処理を行う。\n\nfn with_logger<F>(f: F)\n\nwhere\n\n F: Fn(&mut FileLogger),\n\n{\n\n (|| {\n\n // NOTE: static mut 変数へのアクセスは unsafe 。\n\n let logger_mutex: &sync::Mutex<_> = unsafe { LOGGER.as_ref() }?;\n\n\n\n // ロガーの所有権を一時的に借用する。\n\n let mut logger_lock = logger_mutex.lock().ok()?;\n\n\n\n // 初めてロガーを使用するときのみ、初期化を行う。\n\n if let LazyInit::Uninit = *logger_lock {\n\n let logger = match FileLogger::create(&log_file_path()) {\n\n Ok(logger) => LazyInit::Value(logger),\n\n Err(_) => LazyInit::Deinit,\n\n };\n\n *logger_lock = logger;\n\n }\n\n\n\n match *logger_lock {\n\n LazyInit::Uninit => unreachable!(),\n\n LazyInit::Deinit => {}\n\n LazyInit::Value(ref mut l) => f(l),\n\n }\n\n\n\n Some(())\n\n })();\n\n}\n\n\n", "file_path": "hsp3-debug-ginger/adapter/src/logger.rs", "rank": 75, "score": 172509.44260556618 }, { "content": "/// グローバル変数を使って処理を行う。\n\nfn with_globals<F>(f: F)\n\nwhere\n\n F: FnOnce(&mut Globals),\n\n{\n\n if let Some(cell) = unsafe { GLOBALS.as_ref() } {\n\n let globals = unsafe { &mut *cell.get() };\n\n f(globals)\n\n }\n\n}\n\n\n", "file_path": "hsp3-debug-ginger/adapter/src/lib.rs", "rank": 76, "score": 172509.44260556618 }, { "content": "/// エスケープシーケンスを含む引用符の中身を読み進める。`quote` が出てきたら終わり。\n\nfn eat_escaped_text(quote: char, tx: &mut Tx) {\n\n loop {\n\n match tx.next() {\n\n '\\0' | '\\n' | '\\r' => break,\n\n '\\\\' => {\n\n tx.bump();\n\n tx.bump();\n\n }\n\n c if c == quote => break,\n\n _ => tx.bump(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 77, "score": 172017.42476809112 }, { "content": "fn lookahead_after_paren(mut i: usize, px: &mut Px) -> ExprLikeStmtKind {\n\n let mut balance = 1;\n\n\n\n loop {\n\n let kind = px.nth(i);\n\n i += 1;\n\n\n\n match kind {\n\n TokenKind::LeftParen => balance += 1,\n\n TokenKind::RightParen => match balance {\n\n 0 | 1 => break,\n\n _ => balance -= 1,\n\n },\n\n TokenKind::Comma if balance == 1 => {\n\n // カッコの直下にカンマがあるなら添字のカッコなので、代入文で確定。\n\n return ExprLikeStmtKind::Assign;\n\n }\n\n TokenKind::SlimArrow => {\n\n return ExprLikeStmtKind::Invoke;\n\n }\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 79, "score": 170283.1682381958 }, { "content": "fn go(node: SyntaxNode, gsc: &mut GlobalSymbolCollection) {\n\n for child in node.child_nodes() {\n\n match child.kind() {\n\n NodeKind::Ident => {\n\n let name = AName::cast(&child).unwrap();\n\n gsc.name_context.set_enclosures(\n\n name,\n\n gsc.current_deffunc_opt.clone(),\n\n gsc.current_module_opt.clone(),\n\n );\n\n }\n\n NodeKind::LabelStmt => {\n\n // gsc.symbols.push(GlobalSymbol::Label {\n\n // label_stmt: Rc::new(child.clone()),\n\n // module_stmt_opt: gsc.current_module_opt.clone(),\n\n // });\n\n }\n\n NodeKind::Param => {\n\n let enclosing_deffunc = gsc.current_deffunc_opt.clone();\n\n gsc.symbols\n", "file_path": "hsp3-forgery/hf_core/src/analysis/get_global_symbols.rs", "rank": 80, "score": 170156.94350598287 }, { "content": "fn parse_block(px: &mut Px) -> PBlock {\n\n let mut block = PBlock::default();\n\n\n\n // outer_stmtsをパースしながら `{` または `else` を探す。\n\n let left = loop {\n\n match px.next() {\n\n TokenKind::Eof | TokenKind::Eos | TokenKind::RightBrace | TokenKind::Else => {\n\n return block\n\n }\n\n TokenKind::LeftBrace => break px.bump(),\n\n TokenKind::Colon => {\n\n px.skip();\n\n block.outer_stmts.extend(parse_stmt(px));\n\n }\n\n _ => px.skip(),\n\n }\n\n };\n\n block.left_opt = Some(left);\n\n\n\n // inner_stmtsをパースしながら `}` を探す。\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 81, "score": 164911.54926880496 }, { "content": "type HspMsgFunc = Option<unsafe extern \"C\" fn(*mut hspsdk::HSPCTX)>;\n\n\n\n/// グローバル変数をまとめたもの。\n\n/// `debug_notice` などの関数に状態をもたせるために使う。\n\npub(crate) struct Globals {\n\n app_sender: app::Sender,\n\n hsprt_receiver: mpsc::Receiver<Action>,\n\n hsp_debug: *mut hspsdk::HSP3DEBUG,\n\n default_msgfunc: HspMsgFunc,\n\n #[allow(unused)]\n\n join_handles: Vec<thread::JoinHandle<()>>,\n\n}\n\n\n\nimpl Globals {\n\n /// 初期化処理を行い、各グローバル変数の初期値を設定して `Globals` を構築する。\n\n fn create(hsp_debug: *mut hspsdk::HSP3DEBUG) -> Self {\n\n debug!(\"debugini\");\n\n\n\n // msgfunc に操作を送信するチャネルを生成する。\n\n let (sender, hsprt_receiver) = mpsc::channel();\n", "file_path": "hsp3-debug-ginger/adapter/src/lib.rs", "rank": 82, "score": 164534.26189837037 }, { "content": "fn on_stmt(stmt: &PStmt, ctx: &mut Ctx) {\n\n match stmt {\n\n PStmt::Label(PLabel { name_opt, .. }) => {\n\n if let Some(name) = name_opt {\n\n add_symbol(HspSymbolKind::Label, name, DEF_SITE, ctx);\n\n }\n\n }\n\n PStmt::Assign(PAssignStmt {\n\n left,\n\n op_opt: _,\n\n args,\n\n }) => {\n\n // FIXME: def/use は演算子の種類による\n\n on_compound_def(left, ctx);\n\n on_args(args, ctx);\n\n }\n\n PStmt::Command(PCommandStmt { command, args, .. }) => {\n\n on_symbol_use(command, false, ctx);\n\n\n\n static COMMANDS: &[&str] = &[\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 83, "score": 163364.68169683317 }, { "content": "fn on_block(block: &PBlock, ctx: &mut Ctx) {\n\n for stmt in &block.outer_stmts {\n\n on_stmt(stmt, ctx);\n\n }\n\n for stmt in &block.inner_stmts {\n\n on_stmt(stmt, ctx);\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/preproc.rs", "rank": 84, "score": 163364.68169683317 }, { "content": "fn on_stmt(stmt: &PStmt, ctx: &mut Sema) {\n\n match stmt {\n\n PStmt::Label(_) => {}\n\n PStmt::Assign(_) => {}\n\n PStmt::Command(stmt) => {\n\n let loc = stmt.command.body.loc;\n\n let symbol = match ctx.symbol(loc) {\n\n Some(it) => it,\n\n None => {\n\n ctx.diagnostics.push((Diagnostic::Undefined, loc));\n\n return;\n\n }\n\n };\n\n\n\n if let Some(signature_data) = symbol.signature_opt() {\n\n for (arg, _) in stmt\n\n .args\n\n .iter()\n\n .zip(&signature_data.params)\n\n .filter(|(_, (param, _, _))| param.map_or(false, |p| p.is_by_ref()))\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/sema.rs", "rank": 85, "score": 163364.68169683317 }, { "content": "fn on_expr(expr: &PExpr, ctx: &mut Ctx) {\n\n match expr {\n\n PExpr::Literal(_) => {}\n\n PExpr::Label(PLabel { star: _, name_opt }) => {\n\n if let Some(name) = name_opt {\n\n on_symbol_use(name, false, ctx);\n\n }\n\n }\n\n PExpr::Compound(compound) => on_compound_use(compound, ctx),\n\n PExpr::Paren(PParenExpr { body_opt, .. }) => on_expr_opt(body_opt.as_deref(), ctx),\n\n PExpr::Prefix(PPrefixExpr { prefix: _, arg_opt }) => on_expr_opt(arg_opt.as_deref(), ctx),\n\n PExpr::Infix(PInfixExpr {\n\n infix: _,\n\n left,\n\n right_opt,\n\n }) => {\n\n on_expr(left, ctx);\n\n on_expr_opt(right_opt.as_deref(), ctx);\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 86, "score": 163364.68169683317 }, { "content": "fn on_args(args: &[PArg], ctx: &mut Ctx) {\n\n for arg in args {\n\n on_expr_opt(arg.expr_opt.as_ref(), ctx);\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 87, "score": 163364.68169683317 }, { "content": "fn on_stmt(stmt: &PStmt, ctx: &mut Ctx) {\n\n match stmt {\n\n PStmt::Label(PLabel { star, name_opt }) => {\n\n if let Some(name) = name_opt {\n\n ctx.add_symbol(HspSymbolKind::Label, star, name, ImportMode::Local);\n\n }\n\n }\n\n PStmt::Assign(_) | PStmt::Command(_) | PStmt::Invoke(_) => {}\n\n PStmt::If(stmt) => {\n\n on_block(&stmt.body, ctx);\n\n on_block(&stmt.alt, ctx);\n\n }\n\n PStmt::Const(PConstStmt {\n\n hash,\n\n privacy_opt,\n\n name_opt,\n\n ..\n\n }) => {\n\n if let Some(name) = name_opt {\n\n let scope = ctx.privacy_scope_or_local(privacy_opt);\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/preproc.rs", "rank": 88, "score": 163364.68169683317 }, { "content": "type UseSiteMap = HashMap<(DocId, Pos), SymbolRc>;\n\n\n\npub(crate) struct Sema {\n\n pub(crate) use_site_map: UseSiteMap,\n\n pub(crate) diagnostics: Vec<(Diagnostic, Loc)>,\n\n}\n\n\n\nimpl Sema {\n\n pub(crate) fn on_root(&mut self, root: &PRoot) {\n\n for stmt in &root.stmts {\n\n on_stmt(stmt, self)\n\n }\n\n }\n\n\n\n fn symbol(&self, loc: Loc) -> Option<SymbolRc> {\n\n self.use_site_map.get(&(loc.doc, loc.start())).cloned()\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/sema.rs", "rank": 89, "score": 161327.90010550758 }, { "content": "fn close_module(node_opt: Option<&SyntaxNode>, gsc: &mut GlobalSymbolCollection) {\n\n let module_symbol = match gsc.current_module_opt.take() {\n\n None => return,\n\n Some(x) => x,\n\n };\n\n\n\n gsc.symbols.define_module(&module_symbol, node_opt.cloned());\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/analysis/get_global_symbols.rs", "rank": 90, "score": 161073.70787912613 }, { "content": "fn close_deffunc(node_opt: Option<&SyntaxNode>, gsc: &mut GlobalSymbolCollection) {\n\n let deffunc_symbol = match gsc.current_deffunc_opt.take() {\n\n None => return,\n\n Some(x) => x,\n\n };\n\n\n\n gsc.symbols\n\n .define_deffunc(&deffunc_symbol, node_opt.cloned());\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/analysis/get_global_symbols.rs", "rank": 91, "score": 161073.70787912613 }, { "content": "fn on_compound_def(compound: &PCompound, ctx: &mut Ctx) {\n\n match compound {\n\n PCompound::Name(name) => on_symbol_def(name, ctx),\n\n PCompound::Paren(PNameParen { name, args, .. }) => {\n\n on_symbol_def(name, ctx);\n\n\n\n for arg in args {\n\n on_expr_opt(arg.expr_opt.as_ref(), ctx);\n\n }\n\n }\n\n PCompound::Dots(PNameDot { name, args }) => {\n\n on_symbol_def(name, ctx);\n\n\n\n for arg in args {\n\n on_expr_opt(arg.expr_opt.as_ref(), ctx);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 92, "score": 161026.21269136315 }, { "content": "fn lookahead_stmt(px: &mut Px) -> ExprLikeStmtKind {\n\n match px.nth(1) {\n\n TokenKind::LeftParen => lookahead_after_paren(2, px),\n\n TokenKind::Dot => ExprLikeStmtKind::Assign,\n\n TokenKind::SlimArrow => ExprLikeStmtKind::Invoke,\n\n TokenKind::Plus | TokenKind::Minus if px.nth(2).is_end_of_stmt() => {\n\n ExprLikeStmtKind::Assign\n\n }\n\n second => match second.to_op_kind() {\n\n None | Some(POpKind::Infix) | Some(POpKind::PrefixOrInfixOrAssign) => {\n\n ExprLikeStmtKind::Command\n\n }\n\n Some(POpKind::InfixOrAssign) | Some(POpKind::Assign) => ExprLikeStmtKind::Assign,\n\n },\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 93, "score": 160516.9560310088 }, { "content": "fn arg_is_definitely_rval(arg: &PArg, ctx: &Sema) -> bool {\n\n let mut expr_opt = arg.expr_opt.as_ref();\n\n while let Some(expr) = expr_opt {\n\n match expr {\n\n PExpr::Compound(compound) => {\n\n let name = &compound.name().body;\n\n\n\n let symbol = match ctx.symbol(name.loc) {\n\n Some(it) => it,\n\n _ => return false,\n\n };\n\n\n\n return symbol_kind_is_definitely_rval(symbol.kind);\n\n }\n\n PExpr::Paren(expr) => expr_opt = expr.body_opt.as_deref(),\n\n _ => return true,\n\n }\n\n }\n\n false\n\n}\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/sema.rs", "rank": 94, "score": 158968.33551065286 }, { "content": "fn parse_if_stmt(px: &mut Px) -> Option<PIfStmt> {\n\n let command = px.bump();\n\n let cond_opt = parse_expr(px);\n\n let body = parse_block(px);\n\n\n\n let else_opt = match (px.next(), px.nth(1)) {\n\n // elseの直前は1個だけ改行が認められる。\n\n (TokenKind::Eos, TokenKind::Else) => {\n\n px.skip();\n\n Some(px.bump()) // else\n\n }\n\n (TokenKind::Else, _) => Some(px.bump()),\n\n _ => None,\n\n };\n\n let alt = if else_opt.is_some() {\n\n parse_block(px)\n\n } else {\n\n PBlock::default()\n\n };\n\n parse_end_of_stmt(px);\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 95, "score": 158771.68163945046 }, { "content": "fn to_lsp_range(range: crate::source::Range) -> lsp_types::Range {\n\n lsp_types::Range::new(to_position(range.start()), to_position(range.end()))\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/assists.rs", "rank": 96, "score": 158114.99004562604 }, { "content": "fn parse_params(p: &mut Px) {\n\n // 引数の省略がある parse_args とは異なる方法でカンマや構文エラーを処理する。\n\n\n\n loop {\n\n // エラー回復\n\n if !p.at_eof() && p.next() != Token::Ident && !p.next().at_end_of_pp() {\n\n p.start_node();\n\n while !p.at_eof() && p.next() != Token::Ident && !p.next().at_end_of_pp() {\n\n p.bump();\n\n }\n\n p.end_node(NodeKind::Other);\n\n }\n\n\n\n if p.next() != Token::Ident {\n\n break;\n\n }\n\n\n\n p.start_node();\n\n parse_param_type(p);\n\n\n\n if p.next() == Token::Ident {\n\n parse_name(p);\n\n }\n\n\n\n p.eat(Token::Comma);\n\n p.end_node(NodeKind::Param);\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_pp.rs", "rank": 97, "score": 156651.16212708794 }, { "content": "fn parse_stmt(p: &mut Px) {\n\n match p.next() {\n\n Token::Ident => parse_ambiguous_stmt(p),\n\n Token::Star => parse_label_stmt(p),\n\n Token::Hash => parse_pp_stmt(p),\n\n _ if p.next().is_control_keyword() => parse_command_stmt(p),\n\n _ => {\n\n // assert!(p.next().at_end_of_stmt(), \"is_stmt_first/at_end_of_stmt bug\");\n\n parse_end_of_stmt(p);\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn parse_root(p: &mut Px) {\n\n while !p.at_eof() {\n\n // エラー回復\n\n if !p.next().is_stmt_first() && !p.next().at_end_of_stmt() {\n\n p.start_node();\n\n\n\n while !p.at_eof() && !p.next().is_stmt_first() {\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 98, "score": 156651.16212708794 }, { "content": "fn parse_factor(p: &mut Px) {\n\n match p.next() {\n\n Token::Ident => parse_call_expr(p),\n\n Token::LeftParen => parse_group_expr(p),\n\n Token::CharStart => parse_char_literal(p),\n\n Token::FloatInt => parse_double_literal(p),\n\n Token::Minus => parse_unary_expr(p),\n\n Token::Star => parse_label_literal(p),\n\n Token::StrStart => parse_str_literal(p),\n\n _ if p.next().is_int_literal_first() => parse_int_literal(p),\n\n _ => unreachable!(\"is_expr_first\"),\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_expr.rs", "rank": 99, "score": 156651.16212708794 } ]
Rust
muse/src/instrument.rs
khonsulabs/muse
26aadbc56b3c2029b5d1740da7aa3f9873cfd66d
use crate::{ envelope::PlayingState, manager::{Device, PlayingHandle}, node::{Instantiatable, LoadedInstrument}, note::Note, sampler::PreparedSampler, }; use crossbeam::atomic::AtomicCell; use std::{ sync::{Arc, RwLock}, time::Duration, }; #[cfg(feature = "serialization")] pub mod serialization; pub struct GeneratedTone<T> { pub source: T, pub control: ControlHandle, } pub type ControlHandle = Arc<AtomicCell<PlayingState>>; #[derive(Debug, Default)] pub struct ControlHandles(Arc<RwLock<Vec<ControlHandle>>>); impl ControlHandles { pub fn new() -> Self { Self(Arc::new(RwLock::new(Vec::new()))) } pub fn push(&self, value: ControlHandle) { let mut vec = self.0.write().unwrap(); vec.push(value); } pub fn is_playing(&self) -> bool { let vec = self.0.read().unwrap(); for control in vec.iter() { if let PlayingState::Playing = control.load() { return true; } } false } fn stop(&self) { let vec = self.0.read().unwrap(); for control in vec.iter() { control.store(PlayingState::Stopping); } } fn stopped(&self) -> bool { let control_handles = self.0.read().unwrap(); control_handles .iter() .map(|control| control.load()) .all(|state| state == PlayingState::Stopped) } fn sustain(&self) { let vec = self.0.read().unwrap(); for control in vec.iter() { control.store(PlayingState::Sustaining); } } pub fn new_handle(&self) -> ControlHandle { let handle = Arc::new(AtomicCell::new(PlayingState::Playing)); let mut vec = self.0.write().unwrap(); vec.push(handle.clone()); handle } } #[derive(Debug)] pub struct InstrumentController<T> { pub control_handles: ControlHandles, _tone_generator: std::marker::PhantomData<T>, } impl<T> Default for InstrumentController<T> { fn default() -> Self { Self { control_handles: ControlHandles::new(), _tone_generator: std::marker::PhantomData::default(), } } } impl<T> InstrumentController<T> where T: ToneGenerator, T::CustomNodes: Instantiatable + Clone + 'static, { pub fn instantiate( &mut self, sampler: &LoadedInstrument<T::CustomNodes>, note: Note, ) -> Result<PreparedSampler, serialization::Error> { Ok(sampler.instantiate(&note, &self.control_handles)) } } pub trait ToneGenerator: Sized { type CustomNodes: Instantiatable + Clone + 'static; fn generate_tone( &mut self, note: Note, control: &mut InstrumentController<Self>, ) -> Result<PreparedSampler, anyhow::Error>; } pub struct PlayingNote<T> { note: Note, handle: Option<PlayingHandle>, controller: InstrumentController<T>, } impl<T> PlayingNote<T> { fn is_playing(&self) -> bool { self.controller.control_handles.is_playing() } fn stop(&self) { self.controller.control_handles.stop() } fn sustain(&self) { self.controller.control_handles.sustain() } } impl<T> Drop for PlayingNote<T> { fn drop(&mut self) { self.stop(); let handle = std::mem::take(&mut self.handle); let control_handles = std::mem::take(&mut self.controller.control_handles); std::thread::spawn(move || loop { { if control_handles.stopped() { println!("Sound stopping"); drop(handle); return; } } std::thread::sleep(Duration::from_millis(10)); }); } } #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub enum Loudness { Fortissimo, MezzoForte, Pianissimo, } pub struct VirtualInstrument<T> { playing_notes: Vec<PlayingNote<T>>, device: Device, sustain: bool, tone_generator: T, } impl<T> VirtualInstrument<T> where T: ToneGenerator, { pub fn new(device: Device, tone_generator: T) -> Self { Self { device, tone_generator, playing_notes: Vec::new(), sustain: false, } } pub fn new_with_default_output(tone_generator: T) -> Result<Self, anyhow::Error> { let device = Device::default_output()?; Ok(Self::new(device, tone_generator)) } pub fn play_note(&mut self, note: Note) -> Result<(), anyhow::Error> { self.playing_notes .retain(|n| n.note.step() as u8 != note.step() as u8); let mut controller = InstrumentController::default(); let source = self.tone_generator.generate_tone(note, &mut controller)?; let handle = Some(self.device.play(source, note)?); self.playing_notes.push(PlayingNote { note, handle, controller, }); Ok(()) } pub fn stop_note(&mut self, step: u8) { if self.sustain { if let Some(existing_note) = self .playing_notes .iter_mut() .find(|pn| pn.note.step() as u8 == step) { existing_note.sustain(); } } else { self.playing_notes.retain(|pn| pn.note.step() as u8 != step); } } pub fn set_sustain(&mut self, active: bool) { self.sustain = active; if !active { self.playing_notes.retain(|n| n.is_playing()); } } }
use crate::{ envelope::PlayingState, manager::{Device, PlayingHandle}, node::{Instantiatable, LoadedInstrument}, note::Note, sampler::PreparedSampler, }; use crossbeam::atomic::AtomicCell; use std::{ sync::{Arc, RwLock}, time::Duration, }; #[cfg(feature = "serialization")] pub mod serialization; pub struct GeneratedTone<T> { pub source: T, pub control: ControlHandle, } pub type ControlHandle = Arc<AtomicCell<PlayingState>>; #[derive(Debug, Default)] pub struct ControlHandles(Arc<RwLock<Vec<ControlHandle>>>); impl ControlHandles { pub fn new() -> Self { Self(Arc::new(RwLock::new(Vec::new()))) } pub fn push(&self, value: ControlHandle) { let mut vec = self.0.write().unwrap(); vec.push(value); } pub fn is_playing(&self) -> bool { let vec = self.0.read().unwrap(); for control in vec.iter() { if let PlayingState::Playing = control.load() { return true; } } false } fn stop(&self) { let vec = self.0.read().unwrap(); for control in vec.iter() { control.store(PlayingState::Stopping); } } fn stopped(&self) -> bool { let control_handles = self.0.read().unwrap(); control_handles .iter() .map(|control| control.load()) .all(|state| state == PlayingState::Stopped) } fn sustain(&self) { let vec = self.0.read().unwrap(); for control in vec.iter() { control.store(PlayingState::Sustaining); } } pub fn new_handle(&self) -> ControlHandle { let handle = Arc::new(AtomicCell::new(PlayingState::Playing)); let mut vec = self.0.write().unwrap(); vec.push(handle.clone()); handle } } #[derive(Debug)] pub struct InstrumentController<T> { pub control_handles: ControlHandles, _tone_generator: std::marker::PhantomData<T>, } impl<T> Default for InstrumentController<T> { fn default() -> Self { Self { control_handles: ControlHandles::new(), _tone_generator: std::marker::PhantomData::default(), } } } impl<T> InstrumentController<T> where T: ToneGenerator, T::CustomNodes: Instantiatable + Clone + 'static, { pub fn instantiate( &mut self, sampler: &LoadedInstrument<T::CustomNodes>, note: Note, ) -> Result<PreparedSampler, serialization::Error> { Ok(sampler.instantiate(&note, &self.control_handles)) } } pub trait ToneGenerator: Sized { type CustomNodes: Instantiatable + Clone + 'static; fn generate_tone( &mut self, note: Note, control: &mut InstrumentController<Self>, ) -> Result<PreparedSampler, anyhow::Error>; } pub struct PlayingNote<T> { note: Note, handle: Option<PlayingHandle>, controller: InstrumentController<T>, } impl<T> PlayingNote<T> { fn is_playing(&self) -> bool { self.controller.control_handles.is_playing() } fn stop(&self) {
self.playing_notes.retain(|pn| pn.note.step() as u8 != step); } } pub fn set_sustain(&mut self, active: bool) { self.sustain = active; if !active { self.playing_notes.retain(|n| n.is_playing()); } } }
self.controller.control_handles.stop() } fn sustain(&self) { self.controller.control_handles.sustain() } } impl<T> Drop for PlayingNote<T> { fn drop(&mut self) { self.stop(); let handle = std::mem::take(&mut self.handle); let control_handles = std::mem::take(&mut self.controller.control_handles); std::thread::spawn(move || loop { { if control_handles.stopped() { println!("Sound stopping"); drop(handle); return; } } std::thread::sleep(Duration::from_millis(10)); }); } } #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub enum Loudness { Fortissimo, MezzoForte, Pianissimo, } pub struct VirtualInstrument<T> { playing_notes: Vec<PlayingNote<T>>, device: Device, sustain: bool, tone_generator: T, } impl<T> VirtualInstrument<T> where T: ToneGenerator, { pub fn new(device: Device, tone_generator: T) -> Self { Self { device, tone_generator, playing_notes: Vec::new(), sustain: false, } } pub fn new_with_default_output(tone_generator: T) -> Result<Self, anyhow::Error> { let device = Device::default_output()?; Ok(Self::new(device, tone_generator)) } pub fn play_note(&mut self, note: Note) -> Result<(), anyhow::Error> { self.playing_notes .retain(|n| n.note.step() as u8 != note.step() as u8); let mut controller = InstrumentController::default(); let source = self.tone_generator.generate_tone(note, &mut controller)?; let handle = Some(self.device.play(source, note)?); self.playing_notes.push(PlayingNote { note, handle, controller, }); Ok(()) } pub fn stop_note(&mut self, step: u8) { if self.sustain { if let Some(existing_note) = self .playing_notes .iter_mut() .find(|pn| pn.note.step() as u8 == step) { existing_note.sustain(); } } else {
random
[ { "content": "/// A mapping from a Rust type to its Muse [`Type`].\n\npub trait CustomType: Send + Sync + Debug + 'static {\n\n /// Returns the Muse type for this Rust type.\n\n fn muse_type(&self) -> &TypeRef;\n\n}\n\n\n", "file_path": "src/runtime/value.rs", "rank": 0, "score": 193705.00259367825 }, { "content": "/// A [`CustomType`] that can be type-erased.\n\npub trait DynamicValue: CustomType {\n\n /// Returns `self` as an [`Any`].\n\n fn as_any(&self) -> &dyn Any;\n\n /// Returns `self` as a mut [`Any`].\n\n fn as_any_mut(&mut self) -> &mut dyn Any;\n\n}\n\n\n\nimpl<T> DynamicValue for T\n\nwhere\n\n T: CustomType,\n\n{\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn Any {\n\n self\n\n }\n\n}\n\n\n", "file_path": "src/runtime/value.rs", "rank": 1, "score": 181693.5153382311 }, { "content": "trait RustFn<T>: Send + Sync + 'static\n\nwhere\n\n T: CustomType + Trace,\n\n{\n\n fn invoke(&self, vm: &mut VmContext<'_, '_>, this: &Rooted<T>) -> Result<Value, Fault>;\n\n}\n\n\n\nimpl<T, F> RustFn<T> for F\n\nwhere\n\n F: Fn(&mut VmContext<'_, '_>, &Rooted<T>) -> Result<Value, Fault> + Send + Sync + 'static,\n\n T: CustomType + Trace,\n\n{\n\n fn invoke(&self, vm: &mut VmContext<'_, '_>, this: &Rooted<T>) -> Result<Value, Fault> {\n\n self(vm, this)\n\n }\n\n}\n\n\n", "file_path": "src/runtime/value.rs", "rank": 2, "score": 156906.96724799005 }, { "content": "type ArcRustFn = Arc<dyn Fn(&mut VmContext<'_, '_>, Arity) -> Result<Value, Fault> + Send + Sync>;\n\n\n\n/// A Rust function that can be stored in a [`Value`] and called.\n\n#[derive(Clone)]\n\npub struct RustFunction(ArcRustFn);\n\n\n\nimpl RustFunction {\n\n /// Returns a new function that invokes `function` when called.\n\n pub fn new<F>(function: F) -> Self\n\n where\n\n F: Fn(&mut VmContext<'_, '_>, Arity) -> Result<Value, Fault> + Send + Sync + 'static,\n\n {\n\n Self(Arc::new(function))\n\n }\n\n}\n\n\n\nimpl Debug for RustFunction {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_tuple(\"RustFunction\")\n\n .field(&std::ptr::addr_of!(self.0).cast::<()>())\n", "file_path": "src/runtime/value.rs", "rank": 3, "score": 141030.9451669968 }, { "content": "pub trait VmUi: Sized {\n\n fn with_ui(self, guard: &mut CollectionGuard<'_>) -> Self;\n\n}\n\n\n\nimpl VmUi for Vm {\n\n fn with_ui(self, guard: &mut CollectionGuard<'_>) -> Self {\n\n install(&self, guard);\n\n self\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum MuseWidget {\n\n FloatSlider(Slider<f64>),\n\n IntSlider(Slider<i64>),\n\n Expand(Expand),\n\n}\n\n\n\nimpl MakeWidget for &'_ MuseWidget {\n\n fn make_widget(self) -> WidgetInstance {\n", "file_path": "muse-ui/src/lib.rs", "rank": 4, "score": 137526.24208322572 }, { "content": "/// A function that can be used as a macro in a [`Compiler`].\n\npub trait MacroFn: Send + Sync {\n\n /// Returns a series of tokens from the given tokens.\n\n fn transform(&mut self, tokens: VecDeque<Ranged<Token>>) -> VecDeque<Ranged<Token>>;\n\n}\n\n\n\nimpl<F> MacroFn for F\n\nwhere\n\n F: FnMut(VecDeque<Ranged<Token>>) -> VecDeque<Ranged<Token>> + Send + Sync,\n\n{\n\n fn transform(&mut self, tokens: VecDeque<Ranged<Token>>) -> VecDeque<Ranged<Token>> {\n\n self(tokens)\n\n }\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 5, "score": 136490.1795170001 }, { "content": "pub trait Instruction: Send + Sync + Debug + 'static {\n\n fn execute(&self, vm: &mut VmContext<'_, '_>) -> Result<ControlFlow<()>, Fault>;\n\n fn as_op(&self, guard: &CollectionGuard<'_>) -> Op;\n\n}\n\n\n\n#[derive(Debug, Hash, Eq, PartialEq, Clone, Copy)]\n\npub struct Return;\n\n\n\nimpl Instruction for Return {\n\n fn execute(&self, _vm: &mut VmContext<'_, '_>) -> Result<ControlFlow<()>, Fault> {\n\n Ok(ControlFlow::Break(()))\n\n }\n\n\n\n fn as_op(&self, _guard: &CollectionGuard<'_>) -> Op {\n\n Op::Return\n\n }\n\n}\n\n\n\n#[derive(Debug, Hash, Eq, PartialEq, Clone, Copy)]\n\npub struct Throw(pub FaultKind);\n", "file_path": "src/vm/dispatched.rs", "rank": 6, "score": 135410.87879473792 }, { "content": "/// A function that can be used as an infix macro in a [`Compiler`].\n\npub trait InfixMacroFn: Send + Sync {\n\n /// Returns a series of tokens from the given expression and tokens.\n\n fn transform(\n\n &mut self,\n\n expression: &Ranged<Expression>,\n\n tokens: VecDeque<Ranged<Token>>,\n\n ) -> VecDeque<Ranged<Token>>;\n\n}\n\n\n\nimpl<F> InfixMacroFn for F\n\nwhere\n\n F: FnMut(&Ranged<Expression>, VecDeque<Ranged<Token>>) -> VecDeque<Ranged<Token>> + Send + Sync,\n\n{\n\n fn transform(\n\n &mut self,\n\n expression: &Ranged<Expression>,\n\n tokens: VecDeque<Ranged<Token>>,\n\n ) -> VecDeque<Ranged<Token>> {\n\n self(expression, tokens)\n\n }\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 7, "score": 132552.00776737096 }, { "content": "struct Return;\n\n\n\nimpl Parselet for Return {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Identifier(Symbol::return_symbol().clone()))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Return {\n\n fn parse_prefix(\n\n &self,\n\n r#return: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let value = if tokens\n\n .peek_token()\n\n .map_or(false, |token| !token.is_likely_end())\n\n {\n\n config.parse_expression(tokens)?\n", "file_path": "src/compiler/syntax.rs", "rank": 8, "score": 130393.17395998606 }, { "content": "struct Mod;\n\n\n\nimpl Parselet for Mod {\n\n fn token(&self) -> Option<Token> {\n\n None\n\n }\n\n\n\n fn matches(&self, token: &Token, tokens: &mut TokenReader<'_>) -> bool {\n\n matches!(token, Token::Identifier(ident) if ident == Symbol::mod_symbol())\n\n && tokens\n\n .peek_token()\n\n .map_or(false, |t| matches!(t, Token::Identifier(_)))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Mod {\n\n fn parse_prefix(\n\n &self,\n\n token: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n", "file_path": "src/compiler/syntax.rs", "rank": 9, "score": 130393.17395998606 }, { "content": "struct Pub;\n\n\n\nimpl Parselet for Pub {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Identifier(Symbol::pub_symbol().clone()))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Pub {\n\n fn parse_prefix(\n\n &self,\n\n pub_token: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let keyword_token = tokens.next(ParseError::ExpectedDeclaration)?;\n\n let Token::Identifier(keyword) = &keyword_token.0 else {\n\n return Err(keyword_token.map(|_| ParseError::ExpectedDeclaration));\n\n };\n\n\n", "file_path": "src/compiler/syntax.rs", "rank": 10, "score": 130093.20876054 }, { "content": "trait Source: Send + Sync + Debug + 'static {\n\n fn load(&self, vm: &VmContext<'_, '_>) -> Result<Value, Fault>;\n\n fn as_source(&self, guard: &CollectionGuard<'_>) -> ValueOrSource;\n\n}\n\n\n\nimpl Source for Value {\n\n fn load(&self, _vm: &VmContext<'_, '_>) -> Result<Value, Fault> {\n\n Ok(*self)\n\n }\n\n\n\n fn as_source(&self, guard: &CollectionGuard<'_>) -> ValueOrSource {\n\n match self {\n\n Value::Nil => ValueOrSource::Nil,\n\n Value::Bool(value) => ValueOrSource::Bool(*value),\n\n Value::Int(value) => ValueOrSource::Int(*value),\n\n Value::UInt(value) => ValueOrSource::UInt(*value),\n\n Value::Float(value) => ValueOrSource::Float(*value),\n\n Value::Symbol(value) => value\n\n .upgrade(guard)\n\n .map_or(ValueOrSource::Nil, ValueOrSource::Symbol),\n", "file_path": "src/vm/dispatched.rs", "rank": 11, "score": 129307.46745454487 }, { "content": "struct Fn;\n\n\n\nimpl Fn {\n\n fn parse_function(\n\n publish: Option<Ranged<Token>>,\n\n r#fn: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let name = if let Some(Token::Identifier(name)) = tokens.peek_token() {\n\n Some(tokens.next_or_eof()?.map(|_| name))\n\n } else {\n\n None\n\n };\n\n\n\n let pattern: Ranged<Pattern> = match tokens.peek_token() {\n\n Some(Token::Open(Paired::Paren)) => {\n\n let start = tokens.next_or_eof()?;\n\n\n\n parse_tuple_destructure_pattern(start, Paired::Paren, tokens)?.into()\n", "file_path": "src/compiler/syntax.rs", "rank": 12, "score": 129233.77685341547 }, { "content": "pub fn install(vm: &Vm, guard: &mut CollectionGuard<'_>) {\n\n vm.declare(\n\n \"Dynamic\",\n\n Value::dynamic(\n\n RustFunction::new(|vm: &mut VmContext<'_, '_>, arity| {\n\n if arity == 1 {\n\n Ok(Value::dynamic(\n\n DynamicValue(Dynamic::new(vm[Register(0)].take())),\n\n vm,\n\n ))\n\n } else {\n\n Err(Fault::IncorrectNumberOfArguments)\n\n }\n\n }),\n\n &guard,\n\n ),\n\n guard,\n\n )\n\n .unwrap();\n\n}\n\n\n\nstatic APP: Mutex<Option<App>> = Mutex::new(None);\n\n\n", "file_path": "muse-ui/src/lib.rs", "rank": 13, "score": 118993.37541496765 }, { "content": "/// Converts a value into [`Token`]s.\n\npub trait TokenizeInto {\n\n /// Tokenize `self` into `tokens`.\n\n fn tokenize_into(&self, tokens: &mut VecDeque<Ranged<Token>>);\n\n\n\n /// Returns a list of tokens that could be re-parsed to produce this value.\n\n fn to_tokens(&self) -> VecDeque<Ranged<Token>> {\n\n let mut tokens = VecDeque::new();\n\n self.tokenize_into(&mut tokens);\n\n tokens\n\n }\n\n}\n\n\n\nimpl<T> TokenizeInto for Ranged<T>\n\nwhere\n\n T: TokenizeRanged,\n\n{\n\n fn tokenize_into(&self, tokens: &mut VecDeque<Ranged<Token>>) {\n\n self.0.tokenize_ranged(self.range(), tokens);\n\n }\n\n}\n\n\n", "file_path": "src/compiler/syntax.rs", "rank": 14, "score": 118736.72067162927 }, { "content": "/// A set of arguments that can be loaded into a virtual machine when invoking a\n\n/// function.\n\npub trait InvokeArgs {\n\n /// Loads the arguments into `vm`.\n\n fn load(self, vm: &mut VmContext<'_, '_>) -> Result<Arity, ExecutionError>;\n\n}\n\n\n\nimpl<T, const N: usize> InvokeArgs for [T; N]\n\nwhere\n\n T: Into<Value>,\n\n{\n\n fn load(self, vm: &mut VmContext<'_, '_>) -> Result<Arity, ExecutionError> {\n\n let arity = Arity::try_from(N)\n\n .map_err(|_| ExecutionError::Exception(Fault::InvalidArity.as_exception(vm)))?;\n\n\n\n for (arg, register) in self.into_iter().zip(0..arity.0) {\n\n vm[Register(register)] = arg.into();\n\n }\n\n\n\n Ok(arity)\n\n }\n\n}\n", "file_path": "src/vm.rs", "rank": 15, "score": 118732.14956182896 }, { "content": "/// Summarizes an error's kind.\n\npub trait ErrorKind {\n\n /// Returns the summary of the error being raised.\n\n fn kind(&self) -> &'static str;\n\n}\n\n\n\n/// One or more errors raised during compilation or execution.\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Error {\n\n /// A list of compilation errors.\n\n Compilation(Vec<Ranged<compiler::Error>>),\n\n /// An execution error.\n\n Execution(vm::ExecutionError),\n\n}\n\n\n\nimpl From<Vec<Ranged<compiler::Error>>> for Error {\n\n fn from(value: Vec<Ranged<compiler::Error>>) -> Self {\n\n Self::Compilation(value)\n\n }\n\n}\n\n\n\nimpl From<vm::ExecutionError> for Error {\n\n fn from(value: vm::ExecutionError) -> Self {\n\n Self::Execution(value)\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 16, "score": 118732.14956182896 }, { "content": "/// A type that contains a list of symbols.\n\npub trait SymbolList {\n\n /// The iterator used for [`into_symbols`](Self::into_symbols).\n\n type Iterator: Iterator<Item = Symbol>;\n\n\n\n /// Returns `self` as an iterator over its contained symbols.\n\n fn into_symbols(self) -> Self::Iterator;\n\n}\n\n\n\n/// An iterator over an array of types that implement [`Into<Symbol>`].\n\npub struct ArraySymbolsIntoIter<T: Into<Symbol>, const N: usize>(array::IntoIter<T, N>);\n\n\n\nimpl<T: Into<Symbol>, const N: usize> Iterator for ArraySymbolsIntoIter<T, N> {\n\n type Item = Symbol;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.0.next().map(T::into)\n\n }\n\n}\n\n\n\nimpl<T: Into<Symbol>, const N: usize> SymbolList for [T; N] {\n", "file_path": "src/runtime/symbol.rs", "rank": 17, "score": 115650.35836070034 }, { "content": "/// A type that can be optionally be converted to a [`Symbol`].\n\npub trait IntoOptionSymbol {\n\n /// Returns this type as an optional symbol.\n\n fn into_symbol(self) -> Option<Symbol>;\n\n}\n\n\n\nimpl<T> IntoOptionSymbol for T\n\nwhere\n\n T: Into<Symbol>,\n\n{\n\n fn into_symbol(self) -> Option<Symbol> {\n\n Some(self.into())\n\n }\n\n}\n\n\n\nimpl IntoOptionSymbol for Option<Symbol> {\n\n fn into_symbol(self) -> Option<Symbol> {\n\n self\n\n }\n\n}\n\n\n", "file_path": "src/runtime/symbol.rs", "rank": 18, "score": 115650.28846947796 }, { "content": "/// Converts a value into a series of [`Token`]s with the provided enclosing\n\n/// range.\n\npub trait TokenizeRanged {\n\n /// Tokenize `self` into `tokens` within the enclosing `range`.\n\n fn tokenize_ranged(&self, range: SourceRange, tokens: &mut VecDeque<Ranged<Token>>);\n\n}\n\n\n\nimpl<T> TokenizeRanged for T\n\nwhere\n\n T: TokenizeInto,\n\n{\n\n fn tokenize_ranged(&self, _range: SourceRange, tokens: &mut VecDeque<Ranged<Token>>) {\n\n self.tokenize_into(tokens);\n\n }\n\n}\n\n\n\n/// A Muse expression.\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Expression {\n\n /// A reference to the root module (`$`).\n\n RootModule,\n\n /// A literal value.\n", "file_path": "src/compiler/syntax.rs", "rank": 19, "score": 115649.74985107599 }, { "content": "/// Parses a series of tokens into an expression.\n\npub fn parse_tokens(\n\n source: VecDeque<Ranged<Token>>,\n\n) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n parse_from_reader(TokenReader::from(source))\n\n}\n\n\n", "file_path": "src/compiler/syntax.rs", "rank": 20, "score": 114928.06518844346 }, { "content": "fn print_errors(errs: Vec<Ranged<muse::compiler::Error>>, sources: &Sources) -> String {\n\n let mut text = Vec::new();\n\n for (index, err) in errs.into_iter().enumerate() {\n\n if index > 0 {\n\n text.push(b'\\n');\n\n }\n\n ariadne::Report::<MuseSpan>::build(\n\n ariadne::ReportKind::Error,\n\n err.1.source_id,\n\n err.1.start,\n\n )\n\n .with_message(err.kind())\n\n .with_label(Label::new(MuseSpan(err.1)).with_message(err.0.to_string()))\n\n .finish()\n\n .write_for_stdout(SourceCache(sources, HashMap::default()), &mut text)\n\n .expect(\"error building report\");\n\n }\n\n String::from_utf8(text).expect(\"invalid utf-8 in error report\")\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 21, "score": 114695.78380058399 }, { "content": "/// Parses source code into an expression.\n\npub fn parse<'a>(\n\n source: impl Into<SourceCode<'a>>,\n\n) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n parse_from_reader(TokenReader::new(source))\n\n}\n\n\n", "file_path": "src/compiler/syntax.rs", "rank": 22, "score": 111759.59771852527 }, { "content": "#[test]\n\nfn functions() {\n\n let mut guard = CollectionGuard::acquire();\n\n let func = Value::Dynamic(AnyDynamic::new(\n\n RustFunction::new(|_vm: &mut VmContext<'_, '_>, _arity| Ok(Value::Int(1))),\n\n &guard,\n\n ));\n\n let runtime = crate::vm::Vm::new(&guard);\n\n let Value::Int(i) = func\n\n .call(&mut VmContext::new(&runtime, &mut guard), 0)\n\n .unwrap()\n\n else {\n\n unreachable!()\n\n };\n\n assert_eq!(i, 1);\n\n}\n", "file_path": "src/runtime/value.rs", "rank": 23, "score": 103168.34847080303 }, { "content": "#[test]\n\nfn dynamic() {\n\n impl CustomType for usize {\n\n fn muse_type(&self) -> &TypeRef {\n\n static TYPE: RustType<usize> = RustType::new(\"usize\", RustTypeBuilder::with_clone);\n\n &TYPE\n\n }\n\n }\n\n let guard = CollectionGuard::acquire();\n\n let dynamic = AnyDynamic::new(1_usize, &guard);\n\n assert_eq!(dynamic.downcast_ref::<usize>(&guard), Some(&1));\n\n let dynamic2 = dynamic;\n\n assert_eq!(dynamic, dynamic2);\n\n}\n\n\n", "file_path": "src/runtime/value.rs", "rank": 24, "score": 103168.34847080303 }, { "content": "struct ValueFuture(ArcFuture);\n\n\n\nimpl Clone for ValueFuture {\n\n fn clone(&self) -> Self {\n\n Self(self.0.clone())\n\n }\n\n}\n\n\n\nimpl CustomType for ValueFuture {\n\n fn muse_type(&self) -> &TypeRef {\n\n static TYPE: StaticType = StaticType::new(|| Type::new(\"AsyncValue\"));\n\n &TYPE\n\n }\n\n}\n\n\n\nimpl ContainsNoRefs for ValueFuture {}\n\n\n\nimpl Debug for ValueFuture {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"ValueFuture\").finish_non_exhaustive()\n\n }\n\n}\n\n\n", "file_path": "src/runtime/value.rs", "rank": 25, "score": 100929.38822171045 }, { "content": "#[derive(Default, Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]\n\nstruct SourceMapData {\n\n instructions: Vec<InstructionRange>,\n\n}\n\n\n\nimpl SourceMap {\n\n /// Record a new instruction with `range` as its source.\n\n pub(crate) fn push(&mut self, range: SourceRange) {\n\n let map = Arc::make_mut(&mut self.0);\n\n if let Some(inst) = map\n\n .instructions\n\n .last_mut()\n\n .filter(|inst| inst.range == range)\n\n {\n\n inst.instructions.end += 1;\n\n } else {\n\n let instruction = map\n\n .instructions\n\n .last()\n\n .map_or(0, |inst| inst.instructions.end);\n\n\n", "file_path": "src/compiler.rs", "rank": 26, "score": 100841.00896612124 }, { "content": "#[derive(Debug)]\n\nstruct TestMap(Vec<(TestOutput, TestOutput)>);\n\n\n\nimpl PartialEq for TestMap {\n\n fn eq(&self, other: &Self) -> bool {\n\n if self.0.len() != other.0.len() {\n\n return false;\n\n }\n\n\n\n 'outer: for (keya, valuea) in &self.0 {\n\n for (keyb, valueb) in &self.0 {\n\n if keya == keyb {\n\n if valuea == valueb {\n\n continue 'outer;\n\n }\n\n return false;\n\n }\n\n }\n\n }\n\n\n\n true\n", "file_path": "tests/harness.rs", "rank": 27, "score": 96237.08230787165 }, { "content": "pub fn initialize(app: &PendingApp) {\n\n *APP.lock().expect(\"poisoned\") = Some(app.as_app());\n\n}\n\n\n", "file_path": "muse-ui/src/lib.rs", "rank": 28, "score": 96139.45512552347 }, { "content": "trait Destination: Send + Sync + Debug + 'static {\n\n fn store(&self, vm: &mut VmContext<'_, '_>, value: Value) -> Result<(), Fault>;\n\n fn as_dest(&self) -> OpDestination;\n\n}\n\n\n\nimpl Destination for () {\n\n fn store(&self, _vm: &mut VmContext<'_, '_>, _value: Value) -> Result<(), Fault> {\n\n Ok(())\n\n }\n\n\n\n fn as_dest(&self) -> OpDestination {\n\n OpDestination::Void\n\n }\n\n}\n\n\n\nimpl Source for Stack {\n\n fn load(&self, vm: &VmContext<'_, '_>) -> Result<Value, Fault> {\n\n vm.current_frame()\n\n .get(self.0)\n\n .copied()\n", "file_path": "src/vm/dispatched.rs", "rank": 29, "score": 95653.81636299606 }, { "content": "struct MuseSpan(SourceRange);\n\n\n\nimpl Span for MuseSpan {\n\n type SourceId = SourceId;\n\n\n\n fn source(&self) -> &Self::SourceId {\n\n &self.0.source_id\n\n }\n\n\n\n fn start(&self) -> usize {\n\n self.0.start\n\n }\n\n\n\n fn end(&self) -> usize {\n\n self.0.end()\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 30, "score": 93390.93019055661 }, { "content": "struct Precedented<T>(Vec<SharedPredence<T>>);\n\n\n\nimpl<T> Precedented<T> {\n\n pub const fn new() -> Self {\n\n Self(Vec::new())\n\n }\n\n\n\n pub fn push(&mut self, precedence: usize, multi: Vec<T>)\n\n where\n\n T: Parselet,\n\n {\n\n let mut by_token = AHashMap::new();\n\n let mut wildcard = Vec::new();\n\n\n\n for t in multi {\n\n if let Some(token) = t.token() {\n\n by_token.insert(token.clone(), t);\n\n } else {\n\n wildcard.push(t);\n\n }\n\n }\n\n\n\n self.0.push(SharedPredence {\n\n precedence,\n\n by_token,\n\n wildcard,\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/compiler/syntax.rs", "rank": 31, "score": 92642.23185142365 }, { "content": "type ArcAsyncFunction = Arc<\n\n dyn Fn(\n\n &mut VmContext<'_, '_>,\n\n Arity,\n\n ) -> Pin<Box<dyn Future<Output = Result<Value, Fault>> + Send + Sync>>\n\n + Send\n\n + Sync,\n\n>;\n\n\n\n/// An asynchronous Rust function that can be stored in a [`Value`] and called.\n\n#[derive(Clone)]\n\npub struct AsyncFunction(ArcAsyncFunction);\n\n\n\nimpl AsyncFunction {\n\n /// Returns a new function that invokes `function` and awaits the returned\n\n /// future when called.\n\n pub fn new<F, Fut>(function: F) -> Self\n\n where\n\n F: Fn(&mut VmContext<'_, '_>, Arity) -> Fut + Send + Sync + 'static,\n\n Fut: Future<Output = Result<Value, Fault>> + Send + Sync + 'static,\n", "file_path": "src/runtime/value.rs", "rank": 32, "score": 90542.56200279809 }, { "content": "struct SourceCache<'a>(&'a Sources, HashMap<SourceId, ariadne::Source<String>>);\n\n\n\nimpl Cache<SourceId> for SourceCache<'_> {\n\n type Storage = String;\n\n\n\n fn fetch(\n\n &mut self,\n\n id: &SourceId,\n\n ) -> Result<&ariadne::Source<Self::Storage>, Box<dyn std::fmt::Debug + '_>> {\n\n Ok(self.1.entry(*id).or_insert_with(|| {\n\n ariadne::Source::from(self.0.get(*id).expect(\"missing source\").source.clone())\n\n }))\n\n }\n\n\n\n fn display<'a>(&self, id: &'a SourceId) -> Option<Box<dyn std::fmt::Display + 'a>> {\n\n Some(Box::new(id.get().map_or(0, NonZeroUsize::get)))\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 33, "score": 90467.75381035802 }, { "content": "struct Custom<T>(T);\n\n\n\nimpl<T> Trace for Custom<T>\n\nwhere\n\n T: Trace,\n\n{\n\n const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES;\n\n\n\n fn trace(&self, tracer: &mut refuse::Tracer) {\n\n self.0.trace(tracer);\n\n }\n\n}\n\n\n\nimpl<T> MapAs for Custom<T>\n\nwhere\n\n T: CustomType,\n\n{\n\n type Target = dyn CustomType;\n\n\n\n fn map_as(&self) -> &Self::Target {\n", "file_path": "src/runtime/value.rs", "rank": 34, "score": 89276.18006932948 }, { "content": "#[derive(Debug)]\n\nstruct Declare<Value, Dest> {\n\n name: Symbol,\n\n mutable: bool,\n\n access: Access,\n\n declaration: Value,\n\n dest: Dest,\n\n}\n\n\n\nimpl<Value, Dest> Instruction for Declare<Value, Dest>\n\nwhere\n\n Value: Source,\n\n Dest: Destination,\n\n{\n\n fn execute(&self, vm: &mut VmContext<'_, '_>) -> Result<ControlFlow<()>, Fault> {\n\n let value = self.declaration.load(vm)?;\n\n vm.declare_inner(self.name.downgrade(), value, self.mutable, self.access)?;\n\n\n\n self.dest.store(vm, value)?;\n\n\n\n Ok(ControlFlow::Continue(()))\n", "file_path": "src/vm/dispatched.rs", "rank": 35, "score": 89276.18006932948 }, { "content": "fn print_exception(exception: &Exception, sources: &Sources) -> String {\n\n let mut text = Vec::new();\n\n\n\n let last_range = exception\n\n .backtrace()\n\n .iter()\n\n .rev()\n\n .find_map(StackFrame::source_range)\n\n .expect(\"missing instruction range\");\n\n let mut report = ariadne::Report::<MuseSpan>::build(\n\n ariadne::ReportKind::Error,\n\n last_range.source_id,\n\n last_range.start,\n\n )\n\n .with_message(format!(\"Exception: {:?}\", exception.value()));\n\n\n\n for range in exception\n\n .backtrace()\n\n .iter()\n\n .rev()\n", "file_path": "src/main.rs", "rank": 36, "score": 88960.06316847651 }, { "content": "struct VmMemory(Mutex<VmState>);\n\n\n\nimpl refuse::Trace for VmMemory {\n\n const MAY_CONTAIN_REFERENCES: bool = true;\n\n\n\n fn trace(&self, tracer: &mut refuse::Tracer) {\n\n let state = self.0.lock();\n\n for register in state\n\n .registers\n\n .iter()\n\n .chain(state.stack[0..state.frames[state.current_frame].end].iter())\n\n .filter_map(Value::as_any_dynamic)\n\n {\n\n tracer.mark(register.0);\n\n }\n\n\n\n for frame in &state.frames {\n\n for key in frame.variables.keys() {\n\n key.trace(tracer);\n\n }\n", "file_path": "src/vm.rs", "rank": 37, "score": 87506.52265096921 }, { "content": "fn linked_dynamic_value<R>(\n\n value: &Value,\n\n guard: &CollectionGuard,\n\n mut map_to: impl FnMut(&Value) -> R + Send + 'static,\n\n map_from: impl FnMut(&R) -> Value + Send + 'static,\n\n) -> CushyValue<R>\n\nwhere\n\n R: PartialEq + Send + 'static,\n\n{\n\n if let Some(dynamic) = value.as_downcast_ref::<DynamicValue>(guard) {\n\n return dynamic.0.linked(map_to, map_from).into_value();\n\n }\n\n\n\n CushyValue::Constant(map_to(value))\n\n}\n\n\n\n// fn map_each_dynamic_value<R>(\n\n// value: &Value,\n\n// mut map: impl FnMut(&Value) -> R + Send + 'static,\n\n// ) -> CushyValue<R>\n", "file_path": "muse-ui/src/lib.rs", "rank": 38, "score": 87067.8087354782 }, { "content": "fn map_dynamic_value<R>(\n\n value: &Value,\n\n guard: &CollectionGuard,\n\n map: impl FnOnce(&Value) -> R,\n\n) -> R {\n\n if let Some(dynamic) = value.as_downcast_ref::<DynamicValue>(guard) {\n\n return dynamic.0.map_ref(map);\n\n }\n\n map(value)\n\n}\n\n\n", "file_path": "muse-ui/src/lib.rs", "rank": 39, "score": 87067.8087354782 }, { "content": "struct Macro(Box<dyn MacroFn>);\n\n\n\nimpl Debug for Macro {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"Macro\").finish_non_exhaustive()\n\n }\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 40, "score": 84932.11969034142 }, { "content": "fn compile_declare_function<Value, Dest>(\n\n _dest: &OpDestination,\n\n code: &mut CodeData,\n\n _guard: &CollectionGuard<'_>,\n\n f: Value,\n\n name: &Symbol,\n\n mutable: bool,\n\n access: Access,\n\n dest: Dest,\n\n) where\n\n Value: Source,\n\n Dest: Destination,\n\n{\n\n code.push_dispatched(Declare {\n\n name: name.clone(),\n\n mutable,\n\n access,\n\n declaration: f,\n\n dest,\n\n });\n\n}\n\n\n\ndecode_sd!(match_resolve, compile_resolve);\n\n\n", "file_path": "src/vm/dispatched.rs", "rank": 41, "score": 83979.91706575974 }, { "content": "struct InfixMacro(Box<dyn InfixMacroFn>);\n\n\n\nimpl Debug for InfixMacro {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"InfixMacro\").finish_non_exhaustive()\n\n }\n\n}\n\n\n\n/// A declaration made within a block.\n\n#[derive(Debug, Clone, Copy)]\n\npub struct BlockDeclaration {\n\n /// The stack location of the value.\n\n pub stack: Stack,\n\n /// If true, this declaration can be reassigned to.\n\n pub mutable: bool,\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 42, "score": 79893.30270037078 }, { "content": "fn numeric_kind(value: &Value, guard: &CollectionGuard) -> NumericKind {\n\n map_dynamic_value(value, guard, |value| match value {\n\n Value::Int(_) => NumericKind::Int,\n\n Value::Float(_) => NumericKind::Float,\n\n _ => NumericKind::Unknown,\n\n })\n\n}\n\n\n", "file_path": "muse-ui/src/lib.rs", "rank": 43, "score": 79717.82895852784 }, { "content": "#[allow(clippy::cast_precision_loss)]\n\nfn powf64_u64(base: f64, exp: u64) -> f64 {\n\n if let Ok(exp) = i32::try_from(exp) {\n\n base.powi(exp)\n\n } else {\n\n base.powf(exp as f64)\n\n }\n\n}\n\n\n\nimpl_from!(Value, f32, Float);\n\nimpl_from!(Value, f64, Float);\n\nimpl_from!(Value, i8, Int);\n\nimpl_from!(Value, i16, Int);\n\nimpl_from!(Value, i32, Int);\n\nimpl_from!(Value, i64, Int);\n\nimpl_from!(Value, u8, UInt);\n\nimpl_from!(Value, u16, UInt);\n\nimpl_from!(Value, u32, UInt);\n\nimpl_from!(Value, u64, UInt);\n\nimpl_from!(Value, bool, Bool);\n\nimpl_from!(Value, Symbol, Symbol);\n", "file_path": "src/runtime/value.rs", "rank": 44, "score": 73870.26025830985 }, { "content": "#[allow(clippy::cast_precision_loss)]\n\nfn powf64_i64(base: f64, exp: i64) -> f64 {\n\n if let Ok(exp) = i32::try_from(exp) {\n\n base.powi(exp)\n\n } else {\n\n base.powf(exp as f64)\n\n }\n\n}\n\n\n", "file_path": "src/runtime/value.rs", "rank": 45, "score": 73870.26025830985 }, { "content": "type ArcFuture = Arc<Mutex<Pin<Box<dyn Future<Output = Result<Value, Fault>> + Send + Sync>>>>;\n\n\n", "file_path": "src/runtime/value.rs", "rank": 46, "score": 68537.74385843622 }, { "content": "#[test]\n\nfn macros() {\n\n let mut guard = CollectionGuard::acquire();\n\n let code = Compiler::default()\n\n .with_macro(\"$test\", |mut tokens: VecDeque<Ranged<Token>>| {\n\n assert_eq!(tokens[0].0, Token::Open(Paired::Paren));\n\n tokens.insert(2, Ranged::new(SourceRange::default(), Token::Char('+')));\n\n assert_eq!(tokens[4].0, Token::Close(Paired::Paren));\n\n dbg!(tokens)\n\n })\n\n .with(\n\n r\"\n\n let hello = 5;\n\n let world = 3;\n\n $test(hello world)\n\n \",\n\n )\n\n .build(&guard)\n\n .unwrap();\n\n let vm = Vm::new(&guard);\n\n let result = vm.execute(&code, &mut guard).unwrap().as_u64();\n\n assert_eq!(result, Some(8));\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 47, "score": 66502.68740790956 }, { "content": "fn main() {\n\n let filter = std::env::args().nth(1).unwrap_or_default();\n\n // let filter = String::from(\"mod_multi\");\n\n for entry in std::fs::read_dir(\"tests/cases\").unwrap() {\n\n let entry = entry.unwrap().path();\n\n if entry.extension().map_or(false, |ext| ext == \"rsn\") {\n\n run_test_cases(&entry, filter.trim());\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/harness.rs", "rank": 48, "score": 66502.68740790956 }, { "content": "fn main() {\n\n let mut editor: Editor<Muse, DefaultHistory> = Editor::new().unwrap();\n\n let config_dir =\n\n dirs::config_local_dir().map_or_else(|| PathBuf::from(\".muse\"), |dir| dir.join(\"muse\"));\n\n let _err = std::fs::create_dir_all(&config_dir);\n\n let history_path = config_dir.join(\"history\");\n\n let _err = editor.load_history(&history_path);\n\n editor.set_auto_add_history(true);\n\n editor.set_helper(Some(Muse));\n\n let mut guard = CollectionGuard::acquire();\n\n let vm = Vm::new(&guard);\n\n let mut sources = Sources::default();\n\n let mut compiler = Compiler::default();\n\n loop {\n\n let line_num = sources.len() + 1;\n\n match editor.readline(&format!(\"{line_num}> \")) {\n\n Ok(line) => {\n\n let _err = editor.append_history(&history_path);\n\n let source = sources.push(line_num.to_string(), line);\n\n compiler.push(source);\n", "file_path": "src/main.rs", "rank": 49, "score": 66502.68740790956 }, { "content": "fn main() {\n\n let mut guard = CollectionGuard::acquire();\n\n dbg!(Vm::new(&guard)\n\n .compile_and_execute(include_str!(\"fib.muse\"), &mut guard)\n\n .unwrap());\n\n}\n", "file_path": "examples/fib.rs", "rank": 50, "score": 66502.68740790956 }, { "content": "#[test]\n\nfn invoke() {\n\n let mut guard = CollectionGuard::acquire();\n\n let code = Compiler::compile(\n\n r\"\n\n pub fn test(n) => n * 2;\n\n fn private(n) => n * 2;\n\n \",\n\n &guard,\n\n )\n\n .unwrap();\n\n let vm = Vm::new(&guard);\n\n vm.execute(&code, &mut guard).unwrap();\n\n\n\n let Value::Int(result) = vm.invoke(\"test\", [Value::Int(3)], &mut guard).unwrap() else {\n\n unreachable!()\n\n };\n\n assert_eq!(result, 6);\n\n let ExecutionError::Exception(exception) = vm\n\n .invoke(\"private\", [Value::Int(3)], &mut guard)\n\n .unwrap_err()\n", "file_path": "src/tests.rs", "rank": 51, "score": 66502.68740790956 }, { "content": "fn main() {\n\n let mut guard = CollectionGuard::acquire();\n\n let vm = Vm::new(&guard);\n\n let mut sources = Sources::default();\n\n let source = sources.push(\n\n \"definition\",\n\n r\"\n\n pub fn muse_function(n) {\n\n n * 2\n\n }\n\n \",\n\n );\n\n if let Err(err) = vm.compile_and_execute(source, &mut guard) {\n\n let mut formatted = String::new();\n\n sources\n\n .format_error(err, &mut vm.context(&mut guard), &mut formatted)\n\n .unwrap();\n\n eprintln!(\"{formatted}\");\n\n return;\n\n }\n\n\n\n assert_eq!(\n\n vm.invoke(\"muse_function\", [21], &mut guard)\n\n .unwrap()\n\n .as_i64(),\n\n Some(42)\n\n );\n\n}\n", "file_path": "examples/invoke.rs", "rank": 52, "score": 66502.68740790956 }, { "content": "fn main() {\n\n let (input_sender, input_receiver) = flume::unbounded();\n\n let (output_sender, output_receiver) = flume::unbounded();\n\n let mut guard = CollectionGuard::acquire();\n\n\n\n std::thread::spawn(move || {\n\n while let Ok(input) = input_receiver.recv() {\n\n output_sender.send(input + 1).unwrap();\n\n }\n\n });\n\n\n\n let async_func = AsyncFunction::new(move |vm: &mut VmContext<'_, '_>, _arity: Arity| {\n\n input_sender\n\n .send(vm[Register(0)].as_i64().expect(\"invalid arg\"))\n\n .unwrap();\n\n let output_receiver = output_receiver.clone();\n\n async move { Ok::<_, Fault>(Value::Int(output_receiver.recv_async().await.unwrap())) }\n\n });\n\n\n\n let code = Compiler::compile(\n", "file_path": "examples/async.rs", "rank": 53, "score": 66502.68740790956 }, { "content": "fn main() {\n\n let filter = std::env::args().nth(1).unwrap_or_default();\n\n // let filter = String::from(\"raw_format\");\n\n for entry in std::fs::read_dir(\"tests/cases\").unwrap() {\n\n let entry = entry.unwrap().path();\n\n if entry.extension().map_or(false, |ext| ext == \"muse\") {\n\n run_test_cases(&entry, filter.trim());\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/hosted.rs", "rank": 54, "score": 66502.68740790956 }, { "content": "#[test]\n\nfn budgeting() {\n\n const COUNT_TO: i64 = 42;\n\n\n\n let mut guard = CollectionGuard::acquire();\n\n let mut code = BitcodeBlock::default();\n\n for value in 0..=COUNT_TO {\n\n code.copy(value, Register(0));\n\n }\n\n let code = code.to_code(&guard);\n\n let vm = Vm::new(&guard);\n\n // Turn on budgeting, but don't give any budget.\n\n vm.set_steps_per_charge(1);\n\n vm.increase_budget(0);\n\n assert_eq!(\n\n vm.execute(&code, &mut guard).unwrap_err(),\n\n ExecutionError::NoBudget\n\n );\n\n for value in 0..=COUNT_TO {\n\n // Step through by allowing one op at a time.\n\n vm.increase_budget(1);\n\n assert_eq!(vm.resume(&mut guard).unwrap_err(), ExecutionError::NoBudget);\n\n assert_eq!(vm.register(Register(0)).as_i64(), Some(value));\n\n }\n\n vm.increase_budget(1);\n\n assert_eq!(vm.resume(&mut guard).unwrap().as_i64(), Some(COUNT_TO));\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 55, "score": 66502.68740790956 }, { "content": "#[derive(Default, Debug, Clone)]\n\nstruct Frame {\n\n start: usize,\n\n end: usize,\n\n instruction: usize,\n\n code: Option<CodeIndex>,\n\n variables: Map<SymbolRef, BlockDeclaration>,\n\n module: usize,\n\n loading_module: Option<NonZeroUsize>,\n\n exception_handler: Option<NonZeroUsize>,\n\n}\n\n\n\nimpl Frame {\n\n fn clear(&mut self) {\n\n self.variables.clear();\n\n self.instruction = usize::MAX;\n\n self.code = None;\n\n self.module = 0;\n\n self.loading_module = None;\n\n self.exception_handler = None;\n\n }\n", "file_path": "src/vm.rs", "rank": 56, "score": 66390.20824984447 }, { "content": "struct If;\n\n\n\nimpl Parselet for If {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Identifier(Symbol::if_symbol().clone()))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for If {\n\n fn parse_prefix(\n\n &self,\n\n r#if: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let condition = config.parse_expression(tokens)?;\n\n let brace_or_then = tokens.next(ParseError::ExpectedThenOrBrace)?;\n\n let (then, when_true) = match &brace_or_then.0 {\n\n Token::Open(Paired::Brace) => {\n\n (None, Braces.parse_prefix(brace_or_then, tokens, config)?)\n", "file_path": "src/compiler/syntax.rs", "rank": 57, "score": 66380.45339590893 }, { "content": "struct For;\n\n\n\nimpl Parselet for For {\n\n fn token(&self) -> Option<Token> {\n\n None\n\n }\n\n\n\n fn matches(&self, token: &Token, tokens: &mut TokenReader<'_>) -> bool {\n\n matches!(token, Token::Identifier(ident) if ident == Symbol::for_symbol())\n\n && tokens.peek_token().map_or(false, |t| {\n\n matches!(t, Token::Open(_) | Token::Identifier(_))\n\n })\n\n }\n\n}\n\n\n\nimpl PrefixParselet for For {\n\n fn parse_prefix(\n\n &self,\n\n for_token: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n", "file_path": "src/compiler/syntax.rs", "rank": 58, "score": 66380.45339590893 }, { "content": "struct While;\n\n\n\nimpl Parselet for While {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Identifier(Symbol::while_symbol().clone()))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for While {\n\n fn parse_prefix(\n\n &self,\n\n token: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let condition = config.parse_expression(tokens)?;\n\n\n\n let block = parse_block(tokens, config)?;\n\n Ok(tokens.ranged(\n\n token.range().start..,\n\n Expression::Loop(Box::new(LoopExpression {\n\n token,\n\n kind: LoopKind::While(condition),\n\n block,\n\n })),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/compiler/syntax.rs", "rank": 59, "score": 66380.45339590893 }, { "content": "struct Muse;\n\n\n\nimpl Helper for Muse {}\n\n\n\nimpl Validator for Muse {\n\n fn validate(&self, ctx: &mut ValidationContext) -> rustyline::Result<ValidationResult> {\n\n let mut sources = Sources::default();\n\n let source = sources.push(\"\", ctx.input().to_string());\n\n match parse(source) {\n\n Ok(_) => Ok(ValidationResult::Valid(None)),\n\n Err(Ranged(\n\n muse::compiler::syntax::ParseError::UnexpectedEof\n\n | muse::compiler::syntax::ParseError::MissingEnd(_),\n\n _,\n\n )) => Ok(ValidationResult::Incomplete),\n\n Err(err) => {\n\n let mut errors = print_errors(vec![err.into()], &sources);\n\n errors.insert(0, '\\n');\n\n Ok(ValidationResult::Invalid(Some(errors)))\n\n }\n", "file_path": "src/main.rs", "rank": 60, "score": 66380.45339590893 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Case {\n\n pub src: String,\n\n pub output: TestOutput,\n\n #[serde(default)]\n\n pub ignore: bool,\n\n}\n\n\n\nimpl Case {\n\n fn run(&self) -> (Option<Code>, TestOutput) {\n\n let mut guard = CollectionGuard::acquire();\n\n match Compiler::compile(&self.src, &guard) {\n\n Ok(code) => match Vm::new(&guard).execute(&code, &mut guard) {\n\n Ok(value) => (Some(code), TestOutput::from(value)),\n\n Err(fault) => (Some(code), TestOutput::Fault(VmFault::from(fault))),\n\n },\n\n Err(err) => (None, TestOutput::Error(err)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/harness.rs", "rank": 61, "score": 66380.45339590893 }, { "content": "trait Parselet {\n\n fn token(&self) -> Option<Token>;\n\n\n\n #[allow(unused_variables)]\n\n fn matches(&self, token: &Token, tokens: &mut TokenReader<'_>) -> bool {\n\n matches!(self.token(), Some(token))\n\n }\n\n}\n\n\n\nimpl<T> Parselet for Box<T>\n\nwhere\n\n T: Parselet + ?Sized,\n\n{\n\n fn matches(&self, token: &Token, tokens: &mut TokenReader<'_>) -> bool {\n\n T::matches(self, token, tokens)\n\n }\n\n\n\n fn token(&self) -> Option<Token> {\n\n T::token(self)\n\n }\n\n}\n\n\n", "file_path": "src/compiler/syntax.rs", "rank": 62, "score": 65518.37439294952 }, { "content": "fn main() {\n\n let mut guard = CollectionGuard::acquire();\n\n let code = Compiler::compile(\"is_even(42)\", &guard).unwrap();\n\n\n\n let vm = Vm::new(&guard);\n\n let mut context = VmContext::new(&vm, &mut guard);\n\n context\n\n .declare(\n\n Symbol::from(\"is_even\"),\n\n Value::dynamic(\n\n RustFunction::new(|vm: &mut VmContext<'_, '_>, arity| {\n\n assert_eq!(arity, 1);\n\n\n\n let arg = &vm[Register(0)];\n\n println!(\"Called with {arg:?}\");\n\n\n\n if let Some(int) = arg.as_i64() {\n\n Ok(Value::Bool(int % 2 == 0))\n\n } else {\n\n Err(Fault::UnsupportedOperation)\n\n }\n\n }),\n\n context.guard(),\n\n ),\n\n )\n\n .unwrap();\n\n assert_eq!(context.execute(&code).unwrap(), Value::Bool(true));\n\n}\n", "file_path": "examples/rust-function.rs", "rank": 63, "score": 64677.75802553304 }, { "content": "#[test]\n\nfn recursive_macros() {\n\n let mut guard = CollectionGuard::acquire();\n\n let code = Compiler::default()\n\n .with_macro(\"$inner\", |mut tokens: VecDeque<Ranged<Token>>| {\n\n assert_eq!(tokens[0].0, Token::Open(Paired::Paren));\n\n tokens.insert(2, Ranged::new(SourceRange::default(), Token::Char('+')));\n\n assert_eq!(tokens[4].0, Token::Close(Paired::Paren));\n\n dbg!(tokens)\n\n })\n\n .with_macro(\"$test\", |mut tokens: VecDeque<Ranged<Token>>| {\n\n tokens.insert(\n\n 0,\n\n Ranged::new(SourceRange::default(), Token::Sigil(Symbol::from(\"$inner\"))),\n\n );\n\n tokens\n\n })\n\n .with(\n\n r\"\n\n let hello = 5;\n\n let world = 3;\n\n $test(hello world)\n\n \",\n\n )\n\n .build(&guard)\n\n .unwrap();\n\n let vm = Vm::new(&guard);\n\n let result = vm.execute(&code, &mut guard).unwrap().as_u64();\n\n assert_eq!(result, Some(8));\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 64, "score": 64677.75802553304 }, { "content": "fn main() {\n\n let mut guard = CollectionGuard::acquire();\n\n let vm = Vm::new(&guard);\n\n let mut fib = BitcodeBlock::default();\n\n\n\n // Special case two or less\n\n let two_or_less = fib.new_label();\n\n fib.compare(LessThanOrEqual, R(0), 2, two_or_less);\n\n\n\n // Calculate n - 1, store in R0\n\n fib.sub(R(0), 1, R(0));\n\n // Calculate n - 2, store on the stack.\n\n let temporary = fib.new_variable();\n\n fib.sub(R(0), 1, temporary);\n\n // Recurse, calculating fib(n - 1), storing in R1.\n\n fib.call((), 1);\n\n fib.copy(R(0), R(1));\n\n // Move n - 2 into R0\n\n fib.copy(temporary, R(0));\n\n // Move fib(n - 1) into temporary.\n", "file_path": "examples/fib-vm.rs", "rank": 65, "score": 64677.75802553304 }, { "content": "fn main() {\n\n let mut guard = CollectionGuard::acquire();\n\n let runtime = Dynamic::new(Runtime {\n\n vm: Vm::new(&guard).with_ui(&mut guard),\n\n compiler: Compiler::default(),\n\n sources: Sources::default(),\n\n });\n\n let history = Dynamic::<Vec<History>>::default();\n\n let history_list = history.map_each({\n\n let runtime = runtime.clone();\n\n move |history| {\n\n history\n\n .iter()\n\n .map(|i| i.make_widget(&runtime))\n\n .collect::<WidgetList>()\n\n }\n\n });\n\n\n\n let input = Dynamic::<String>::default();\n\n let parsed = input\n", "file_path": "amuse/src/main.rs", "rank": 66, "score": 64677.75802553304 }, { "content": "#[test]\n\nfn module_budgeting() {\n\n const MAX_OPS: usize = 24;\n\n let mut guard = CollectionGuard::acquire();\n\n let code = Compiler::compile(\n\n r\"\n\n mod foo {\n\n pub var a = 1;\n\n a = a + 1;\n\n a = a + 1;\n\n a = a + 1;\n\n a = a + 1;\n\n };\n\n\n\n foo.a\n\n \",\n\n &guard,\n\n )\n\n .unwrap();\n\n let vm = Vm::new(&guard);\n\n // Turn on budgeting, but don't give any budget.\n", "file_path": "src/tests.rs", "rank": 67, "score": 64677.75802553304 }, { "content": "#[test]\n\nfn infix_macros() {\n\n let mut guard = CollectionGuard::acquire();\n\n let code = Compiler::default()\n\n .with_infix_macro(\n\n \"$test\",\n\n |expr: &Ranged<Expression>, mut tokens: VecDeque<Ranged<Token>>| {\n\n let mut expr = expr.to_tokens();\n\n\n\n assert_eq!(tokens[0].0, Token::Open(Paired::Paren));\n\n assert_eq!(tokens[1].0, Token::Close(Paired::Paren));\n\n\n\n let close = tokens.pop_back().unwrap();\n\n\n\n tokens.append(&mut expr);\n\n\n\n tokens.push_back(Ranged::new(SourceRange::default(), Token::Char('+')));\n\n tokens.push_back(Ranged::new(SourceRange::default(), Token::Int(1)));\n\n tokens.push_back(close);\n\n dbg!(tokens)\n\n },\n", "file_path": "src/tests.rs", "rank": 68, "score": 64677.75802553304 }, { "content": "#[derive(Default, Debug, Clone)]\n\nstruct CodeData {\n\n instructions: Vec<Inst>,\n\n labels: Vec<usize>,\n\n regexes: Vec<PrecompiledRegex>,\n\n stack_requirement: usize,\n\n symbols: Vec<Symbol>,\n\n known_symbols: AHashMap<Symbol, usize>,\n\n functions: Vec<BitcodeFunction>,\n\n modules: Vec<BitcodeModule>,\n\n map: SourceMap,\n\n}\n\n\n\nimpl CodeData {\n\n #[allow(clippy::too_many_lines)]\n\n pub fn push(&mut self, op: &Op, range: SourceRange, guard: &CollectionGuard) {\n\n match op {\n\n Op::Return => self.push_loaded(LoadedOp::Return, range, guard),\n\n Op::Label(label) => {\n\n if self.labels.len() <= label.0 {\n\n self.labels.resize(label.0 + 1, usize::MAX);\n", "file_path": "src/vm.rs", "rank": 69, "score": 64565.77368181796 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]\n\nstruct InstructionRange {\n\n range: SourceRange,\n\n instructions: Range<usize>,\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 70, "score": 64565.24998060306 }, { "content": "#[derive(Default)]\n\nstruct PatternBindings {\n\n publish: bool,\n\n mutable: bool,\n\n bound_names: Set<Symbol>,\n\n}\n\n\n\n/// A mapping of [`SourceRange`]s to instruction addresses.\n\n#[derive(Default, Debug, Clone, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct SourceMap(Arc<SourceMapData>);\n\n\n", "file_path": "src/compiler.rs", "rank": 71, "score": 64561.13833868806 }, { "content": "#[derive(Clone)]\n\nstruct RegisteredCode {\n\n code: Code,\n\n owner: Option<Rooted<Function>>,\n\n}\n\n\n", "file_path": "src/vm.rs", "rank": 72, "score": 64560.9431979779 }, { "content": "#[derive(Debug, Clone)]\n\nstruct PrecompiledRegex {\n\n literal: RegexLiteral,\n\n result: Result<Value, Fault>,\n\n}\n", "file_path": "src/vm.rs", "rank": 73, "score": 64560.87131077107 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\nstruct LoadedBinary {\n\n op1: LoadedSource,\n\n op2: LoadedSource,\n\n dest: OpDestination,\n\n}\n\n\n\nimpl LoadedBinary {\n\n #[cfg(not(feature = \"dispatched\"))]\n\n fn as_op(&self, kind: BinaryKind, code: &Code) -> Op {\n\n Op::BinOp {\n\n op1: trusted_loaded_source_to_value(&self.op1, &code.data),\n\n op2: trusted_loaded_source_to_value(&self.op2, &code.data),\n\n dest: self.dest,\n\n kind,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/vm.rs", "rank": 74, "score": 64560.66771391003 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\nstruct LoadedUnary {\n\n op: LoadedSource,\n\n dest: OpDestination,\n\n}\n\n\n\nimpl LoadedUnary {\n\n #[cfg(not(feature = \"dispatched\"))]\n\n fn as_op(&self, kind: UnaryKind, code: &Code) -> Op {\n\n Op::Unary {\n\n op: trusted_loaded_source_to_value(&self.op, &code.data),\n\n dest: self.dest,\n\n kind,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/vm.rs", "rank": 75, "score": 64560.66771391003 }, { "content": "struct Continue;\n\n\n\nimpl Parselet for Continue {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Identifier(Symbol::continue_symbol().clone()))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Continue {\n\n fn parse_prefix(\n\n &self,\n\n r#continue: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n _config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let name = if tokens\n\n .peek_token()\n\n .map_or(false, |token| matches!(token, Token::Label(_)))\n\n {\n\n let label_token = tokens.next_or_eof()?;\n", "file_path": "src/compiler/syntax.rs", "rank": 76, "score": 64556.018827882435 }, { "content": "struct Labeled;\n\n\n\nimpl Parselet for Labeled {\n\n fn token(&self) -> Option<Token> {\n\n None\n\n }\n\n\n\n fn matches(&self, token: &Token, tokens: &mut TokenReader<'_>) -> bool {\n\n matches!(token, Token::Label(_))\n\n && tokens\n\n .peek_token()\n\n .map_or(false, |token| matches!(token, Token::Char(':')))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Labeled {\n\n fn parse_prefix(\n\n &self,\n\n label_token: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n", "file_path": "src/compiler/syntax.rs", "rank": 77, "score": 64556.018827882435 }, { "content": "struct Throw;\n\n\n\nimpl Parselet for Throw {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Identifier(Symbol::throw_symbol().clone()))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Throw {\n\n fn parse_prefix(\n\n &self,\n\n throw: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let value = if tokens\n\n .peek_token()\n\n .map_or(false, |token| !token.is_likely_end())\n\n {\n\n config.parse_expression(tokens)?\n", "file_path": "src/compiler/syntax.rs", "rank": 78, "score": 64556.018827882435 }, { "content": "struct Assign;\n\n\n\nimpl Parselet for Assign {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Char('='))\n\n }\n\n}\n\n\n\nimpl InfixParselet for Assign {\n\n fn parse(\n\n &self,\n\n lhs: Ranged<Expression>,\n\n token: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n match lhs.0 {\n\n Expression::Lookup(lookup) => {\n\n let value = config.parse(tokens)?;\n\n Ok(tokens.ranged(\n", "file_path": "src/compiler/syntax.rs", "rank": 79, "score": 64556.018827882435 }, { "content": "#[derive(Debug)]\n\nstruct ScopeInfo {\n\n kind: ScopeKind,\n\n name: Option<Symbol>,\n\n break_info: Option<(Label, OpDestination)>,\n\n continue_label: Option<Label>,\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 80, "score": 64556.018827882435 }, { "content": "#[derive(Debug)]\n\nstruct ModuleDeclaration {\n\n access: Access,\n\n mutable: bool,\n\n value: Value,\n\n}\n\n\n", "file_path": "src/vm.rs", "rank": 81, "score": 64556.018827882435 }, { "content": "struct Term;\n\n\n\nimpl Parselet for Term {\n\n fn token(&self) -> Option<Token> {\n\n None\n\n }\n\n\n\n fn matches(&self, token: &Token, _tokens: &mut TokenReader<'_>) -> bool {\n\n matches!(\n\n token,\n\n Token::Int(_)\n\n | Token::UInt(_)\n\n | Token::Float(_)\n\n | Token::Identifier(_)\n\n | Token::Regex(_)\n\n | Token::String(_)\n\n | Token::Symbol(_)\n\n | Token::Sigil(_)\n\n | Token::FormatString(_)\n\n )\n", "file_path": "src/compiler/syntax.rs", "rank": 82, "score": 64556.018827882435 }, { "content": "struct Dot;\n\n\n\nimpl Parselet for Dot {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Char('.'))\n\n }\n\n}\n\n\n\nimpl InfixParselet for Dot {\n\n fn parse(\n\n &self,\n\n lhs: Ranged<Expression>,\n\n dot: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n _config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let name_token = tokens.next(ParseError::ExpectedName)?;\n\n let Token::Identifier(name) = name_token.0 else {\n\n return Err(Ranged::new(name_token.1, ParseError::ExpectedName));\n\n };\n", "file_path": "src/compiler/syntax.rs", "rank": 83, "score": 64556.018827882435 }, { "content": "struct Parentheses;\n\n\n\nimpl Parselet for Parentheses {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Open(Paired::Paren))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Parentheses {\n\n fn parse_prefix(\n\n &self,\n\n open: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let expression = config.parse_expression(tokens)?;\n\n\n\n let end_paren = tokens.next(ParseError::MissingEnd(Paired::Paren))?;\n\n if end_paren.0 == Token::Close(Paired::Paren) {\n\n Ok(tokens.ranged(\n", "file_path": "src/compiler/syntax.rs", "rank": 84, "score": 64556.018827882435 }, { "content": "struct MapVisitor;\n\n\n\nimpl<'de> Visitor<'de> for MapVisitor {\n\n type Value = TestMap;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(formatter, \"a map\")\n\n }\n\n\n\n fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>\n\n where\n\n A: serde::de::MapAccess<'de>,\n\n {\n\n let mut out = TestMap(Vec::with_capacity(map.size_hint().unwrap_or_default()));\n\n while let Some((key, value)) = map.next_entry()? {\n\n out.0.push((key, value));\n\n }\n\n\n\n Ok(out)\n\n }\n\n}\n", "file_path": "tests/harness.rs", "rank": 85, "score": 64556.018827882435 }, { "content": "struct Loop;\n\n\n\nimpl Parselet for Loop {\n\n fn token(&self) -> Option<Token> {\n\n None\n\n }\n\n\n\n fn matches(&self, token: &Token, tokens: &mut TokenReader<'_>) -> bool {\n\n matches!(token, Token::Identifier(ident) if ident == Symbol::loop_symbol())\n\n && tokens\n\n .peek_token()\n\n .map_or(false, |t| matches!(t, Token::Open(Paired::Brace)))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Loop {\n\n fn parse_prefix(\n\n &self,\n\n token: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n", "file_path": "src/compiler/syntax.rs", "rank": 86, "score": 64556.018827882435 }, { "content": "struct LocalDeclaration {\n\n name: Symbol,\n\n previous_declaration: Option<BlockDeclaration>,\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 87, "score": 64556.018827882435 }, { "content": "struct Try;\n\n\n\nimpl Parselet for Try {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Identifier(Symbol::try_symbol().clone()))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Try {\n\n fn parse_prefix(\n\n &self,\n\n r#try: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let body = config.parse_expression(tokens)?;\n\n\n\n let catch =\n\n if tokens.peek_token() == Some(Token::Identifier(Symbol::catch_symbol().clone())) {\n\n let catch_token = tokens.next_or_eof()?;\n", "file_path": "src/compiler/syntax.rs", "rank": 88, "score": 64556.018827882435 }, { "content": "struct Break;\n\n\n\nimpl Parselet for Break {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Identifier(Symbol::break_symbol().clone()))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Break {\n\n fn parse_prefix(\n\n &self,\n\n r#break: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let name = if tokens\n\n .peek_token()\n\n .map_or(false, |token| matches!(token, Token::Label(_)))\n\n {\n\n let label_token = tokens.next_or_eof()?;\n", "file_path": "src/compiler/syntax.rs", "rank": 89, "score": 64556.018827882435 }, { "content": "struct Braces;\n\n\n", "file_path": "src/compiler/syntax.rs", "rank": 90, "score": 64556.018827882435 }, { "content": "struct Match;\n\n\n\nimpl Parselet for Match {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Identifier(Symbol::match_symbol().clone()))\n\n }\n\n\n\n fn matches(&self, token: &Token, tokens: &mut TokenReader<'_>) -> bool {\n\n matches!(token, Token::Identifier(ident) if ident == Symbol::for_symbol())\n\n && tokens\n\n .peek_token()\n\n .map_or(false, |t| Term.matches(&t, tokens))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Match {\n\n fn parse_prefix(\n\n &self,\n\n r#match: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n", "file_path": "src/compiler/syntax.rs", "rank": 91, "score": 64556.018827882435 }, { "content": "struct Brackets;\n\n\n\nimpl Parselet for Brackets {\n\n fn token(&self) -> Option<Token> {\n\n Some(Token::Open(Paired::Bracket))\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Brackets {\n\n fn parse_prefix(\n\n &self,\n\n open: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n\n config: &ParserConfig<'_>,\n\n ) -> Result<Ranged<Expression>, Ranged<ParseError>> {\n\n let mut expressions = Delimited::<_, Ranged<Token>>::build_empty();\n\n\n\n let (_, close) = parse_paired(\n\n Paired::Bracket,\n\n &Token::Char(','),\n", "file_path": "src/compiler/syntax.rs", "rank": 92, "score": 64556.018827882435 }, { "content": "struct Runtime {\n\n vm: Vm,\n\n compiler: Compiler,\n\n sources: Sources,\n\n}\n\n\n", "file_path": "amuse/src/main.rs", "rank": 93, "score": 64556.018827882435 }, { "content": "#[derive(Debug, Trace)]\n\nstruct TestError {\n\n err: ExecutionError,\n\n name: Value,\n\n offset: Value,\n\n source: Value,\n\n}\n\n\n\nimpl CustomType for TestError {\n\n fn muse_type(&self) -> &muse::runtime::value::TypeRef {\n\n static TYPE: RustType<TestError> = RustType::new(\"TestError\", |t| t);\n\n &TYPE\n\n }\n\n}\n\n\n\n// fn execute_test_cases(path: &str, filter: &str, contents: &str) -> Result<(), TestError> {}\n\n\n\n// struct TestError {\n\n// path: String,\n\n// range: Option<\n\n// }\n", "file_path": "tests/hosted.rs", "rank": 94, "score": 64556.018827882435 }, { "content": "struct Var;\n\n\n\nimpl Parselet for Var {\n\n fn matches(&self, token: &Token, _tokens: &mut TokenReader<'_>) -> bool {\n\n let Token::Identifier(ident) = token else {\n\n return false;\n\n };\n\n ident == Symbol::let_symbol() || ident == Symbol::var_symbol()\n\n }\n\n\n\n fn token(&self) -> Option<Token> {\n\n None\n\n }\n\n}\n\n\n\nimpl PrefixParselet for Var {\n\n fn parse_prefix(\n\n &self,\n\n token: Ranged<Token>,\n\n tokens: &mut TokenReader<'_>,\n", "file_path": "src/compiler/syntax.rs", "rank": 95, "score": 64556.018827882435 }, { "content": "struct History {\n\n source: SourceId,\n\n result: Result<MuseValue, ExecutionError>,\n\n}\n\n\n\nimpl History {\n\n fn make_widget(&self, runtime: &Dynamic<Runtime>) -> WidgetInstance {\n\n let runtime = runtime.lock();\n\n let mut guard = CollectionGuard::acquire();\n\n let mut context = VmContext::new(&runtime.vm, &mut guard);\n\n Label::new(\n\n runtime\n\n .sources\n\n .get(self.source)\n\n .expect(\"missing source\")\n\n .source\n\n .clone(),\n\n )\n\n .with(&FontFamily, FamilyOwned::Monospace)\n\n .align_left()\n", "file_path": "amuse/src/main.rs", "rank": 96, "score": 64556.018827882435 }, { "content": "struct Parselets {\n\n precedence: usize,\n\n infix: Precedented<Box<dyn InfixParselet>>,\n\n prefix: Precedented<Box<dyn PrefixParselet>>,\n\n markers: ParseletMarkers,\n\n}\n\n\n", "file_path": "src/compiler/syntax.rs", "rank": 97, "score": 64556.018827882435 }, { "content": "fn execute_input(\n\n source: String,\n\n runtime: &Dynamic<Runtime>,\n\n history: &Dynamic<Vec<History>>,\n\n guard: &mut CollectionGuard,\n\n) {\n\n let mut runtime = runtime.lock();\n\n let runtime = &mut *runtime;\n\n let line_num = runtime.sources.len();\n\n let source = runtime.sources.push(line_num.to_string(), source);\n\n let source_id = source.id;\n\n runtime.compiler.push(source);\n\n match runtime.compiler.build(guard) {\n\n Ok(code) => {\n\n let entry = History {\n\n source: source_id,\n\n result: runtime.vm.execute(&code, guard),\n\n };\n\n history.lock().push(entry);\n\n }\n\n Err(errors) => {\n\n eprintln!(\"Errors: {errors:?}\");\n\n }\n\n }\n\n}\n", "file_path": "amuse/src/main.rs", "rank": 98, "score": 63007.81193252448 }, { "content": "#[test]\n\nfn basics() {\n\n use std::num::NonZeroUsize;\n\n let tokens = Tokens::new(\"a_09_ + 1 - .2\")\n\n .collect::<Result<Vec<_>, _>>()\n\n .unwrap();\n\n assert_eq!(\n\n tokens,\n\n &[\n\n Ranged::new(\n\n (SourceId::anonymous(), 0..5),\n\n Token::Identifier(Symbol::from(\"a_09_\"))\n\n ),\n\n Ranged::new((SourceId::anonymous(), 5..6), Token::Whitespace),\n\n Ranged::new((SourceId::anonymous(), 6..7), Token::Char('+')),\n\n Ranged::new((SourceId::anonymous(), 7..8), Token::Whitespace),\n\n Ranged::new((SourceId::anonymous(), 8..9), Token::Int(1)),\n\n Ranged::new((SourceId::anonymous(), 9..10), Token::Whitespace),\n\n Ranged::new((SourceId::anonymous(), 10..11), Token::Char('-')),\n\n Ranged::new((SourceId::anonymous(), 11..12), Token::Whitespace),\n\n Ranged::new((SourceId::anonymous(), 12..14), Token::Float(0.2)),\n", "file_path": "src/compiler/syntax/token.rs", "rank": 99, "score": 63007.81193252448 } ]
Rust
gf256-macros/src/rs.rs
geky/gf256
57675335061b18e3614376981482fd7584454fd5
extern crate proc_macro; use darling; use darling::FromMeta; use syn; use syn::parse_macro_input; use proc_macro2::*; use std::collections::HashMap; use quote::quote; use std::iter::FromIterator; use crate::common::*; const RS_TEMPLATE: &'static str = include_str!("../templates/rs.rs"); #[derive(Debug, FromMeta)] struct RsArgs { block: usize, data: usize, #[darling(default)] gf: Option<syn::Path>, #[darling(default)] u: Option<syn::Path>, } pub fn rs( args: proc_macro::TokenStream, input: proc_macro::TokenStream ) -> proc_macro::TokenStream { let __crate = crate_path(); let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0; let args = match RsArgs::from_list(&raw_args) { Ok(args) => args, Err(err) => { return err.write_errors().into(); } }; assert!(args.block <= 255); assert!(args.data <= args.block); let ty = parse_macro_input!(input as syn::ItemMod); let attrs = ty.attrs; let vis = ty.vis; let rs = ty.ident; let __gf = Ident::new(&format!("__{}_gf", rs.to_string()), Span::call_site()); let __u = Ident::new(&format!("__{}_u", rs.to_string()), Span::call_site()); let mut overrides = vec![]; match args.gf.as_ref() { Some(gf) => { overrides.push(quote! { use #gf as #__gf; }) } None => { overrides.push(quote! { use #__crate::gf::gf256 as #__gf; }) } } match args.u.as_ref() { Some(u) => { overrides.push(quote! { use #u as #__u; }) } None => { overrides.push(quote! { use u8 as #__u; }); } } let replacements = HashMap::from_iter([ ("__rs".to_owned(), TokenTree::Ident(rs.clone())), ("__block_size".to_owned(), TokenTree::Literal( Literal::usize_unsuffixed(args.block) )), ("__data_size".to_owned(), TokenTree::Literal( Literal::usize_unsuffixed(args.data) )), ("__ecc_size".to_owned(), TokenTree::Literal( Literal::usize_unsuffixed(args.block-args.data) )), ("__gf".to_owned(), TokenTree::Group(Group::new(Delimiter::None, { quote! { super::#__gf } }))), ("__u".to_owned(), TokenTree::Group(Group::new(Delimiter::None, { quote! { super::#__u } }))), ("__crate".to_owned(), __crate.clone()), ]); let template = match compile_template(RS_TEMPLATE, &replacements) { Ok(template) => template, Err(err) => { return err.to_compile_error().into(); } }; let output = quote! { #(#attrs)* #vis mod #rs { #template } #(#overrides)* }; output.into() }
extern crate proc_macro; use darling; use darling::FromMeta; use syn; use syn::parse_macro_input; use proc_macro2::*; use std::collections::HashMap; use quote::quote; use std::iter::FromIterator; use crate::common::*; const RS_TEMPLATE: &'static str = include_str!("../templates/rs.rs"); #[derive(Debug, FromMeta)] struct RsArgs { block: usize, data: usize, #[darling(default)] gf: Option<syn::Path>, #[darling(default)] u: Option<syn::Path>, } pub fn rs( args: proc_macro::TokenStream, input: proc_macro::TokenStream ) -> proc_macro::TokenStream { let __crate = crate_path(); let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0; let args = match RsArgs::from_list(&raw_args) { Ok(args) => args, Err(err) => { return err.write_errors().into(); } }; assert!(args.block <= 255); assert!(args.data <= args.block); let ty = parse_macro_input!(input as syn::ItemMod); let attrs = ty.attrs; let vis = ty.vis; let rs = ty.ident; let __gf = Ident::new(&format!("__{}_gf", rs.to_string()), Span::call_site()); let __u = Ident::new(&format!("__{}_u", rs.to_string()), Span::call_site()); let mut overrides = vec![]; match args.gf.as_ref() { Some(gf) => { overrides.push(quote! { use #gf as #__gf; }) } None => { overrides.push(quote! { use #__crate::gf::gf256 as #__gf; }) } } match args.u.as_ref() { Some(u) => { overrides.push(quote! {
r().into(); } }; let output = quote! { #(#attrs)* #vis mod #rs { #template } #(#overrides)* }; output.into() }
use #u as #__u; }) } None => { overrides.push(quote! { use u8 as #__u; }); } } let replacements = HashMap::from_iter([ ("__rs".to_owned(), TokenTree::Ident(rs.clone())), ("__block_size".to_owned(), TokenTree::Literal( Literal::usize_unsuffixed(args.block) )), ("__data_size".to_owned(), TokenTree::Literal( Literal::usize_unsuffixed(args.data) )), ("__ecc_size".to_owned(), TokenTree::Literal( Literal::usize_unsuffixed(args.block-args.data) )), ("__gf".to_owned(), TokenTree::Group(Group::new(Delimiter::None, { quote! { super::#__gf } }))), ("__u".to_owned(), TokenTree::Group(Group::new(Delimiter::None, { quote! { super::#__u } }))), ("__crate".to_owned(), __crate.clone()), ]); let template = match compile_template(RS_TEMPLATE, &replacements) { Ok(template) => template, Err(err) => { return err.to_compile_erro
function_block-random_span
[ { "content": "/// Generate `n` shares requiring `k` shares to reconstruct.\n\n///\n\n/// This scheme is limited to to the number of shares <= the number of\n\n/// non-zero elements in the field.\n\n///\n\npub fn generate(secret: &[__u], n: usize, k: usize) -> Vec<Vec<__u>> {\n\n // we only support up to 255 shares\n\n assert!(\n\n n <= usize::try_from(__gf::NONZEROS).unwrap_or(usize::MAX),\n\n \"exceeded {} shares\",\n\n __gf::NONZEROS\n\n );\n\n let mut shares = vec![vec![]; n];\n\n let mut rng = __rng();\n\n\n\n // we need to store the x coord somewhere, so just prepend the share with it\n\n for i in 0..n {\n\n shares[i].push(__u::try_from(i+1).unwrap());\n\n }\n\n\n\n for x in secret {\n\n // generate a random polynomial for each byte\n\n let f = poly_random(&mut rng, __gf::from_lossy(*x), k-1);\n\n\n\n // assign each share with a point at f(i)\n\n for i in 0..n {\n\n shares[i].push(__u::from(\n\n poly_eval(&f, __gf::from_lossy(i+1))\n\n ));\n\n }\n\n }\n\n\n\n shares\n\n}\n\n\n", "file_path": "templates/shamir.rs", "rank": 0, "score": 236209.754856076 }, { "content": "/// Encode a message using Reed-Solomon error-correction.\n\n///\n\n/// This writes [`ECC_SIZE`] bytes of error-correction information to the end\n\n/// of the provided slice, based on the data provided in the first\n\n/// `message.len()-ECC_SIZE` bytes. The entire codeword is limited to at most\n\n/// [`BLOCK_SIZE`] bytes, but can be smaller.\n\n///\n\n/// ``` rust\n\n/// # use gf256::rs::rs255w223;\n\n/// let mut codeword = b\"Hello World!\".to_vec();\n\n/// codeword.resize(codeword.len()+32, 0u8);\n\n/// rs255w223::encode(&mut codeword);\n\n/// assert_eq!(&codeword, b\"Hello World!\\\n\n/// \\x85\\xa6\\xad\\xf8\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34\\x14\\xa7\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\");\n\n/// ```\n\n///\n\npub fn encode(message: &mut [__u]) {\n\n assert!(message.len() <= BLOCK_SIZE);\n\n assert!(message.len() >= ECC_SIZE);\n\n let data_len = message.len() - ECC_SIZE;\n\n\n\n // create copy for polynomial division\n\n //\n\n // note if message is < DATA_SIZE we just treat it as a smaller polynomial,\n\n // this is equivalent to prepending zeros\n\n //\n\n let mut divrem = message.to_vec();\n\n divrem[data_len..].fill(0);\n\n\n\n // divide by our generator polynomial\n\n poly_divrem(\n\n unsafe { __gf::slice_from_slice_mut_unchecked(&mut divrem) },\n\n &GENERATOR_POLY\n\n );\n\n\n\n // return message + remainder, this new message is a polynomial\n\n // perfectly divisable by our generator polynomial\n\n message[data_len..].copy_from_slice(&divrem[data_len..]);\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 1, "score": 211260.8611689086 }, { "content": "/// Find roots of the error locator polynomial by brute force\n\n///\n\n/// This just means we evaluate Λ(x) for all x locations in our\n\n/// message, if they equal 0, aka are a root, then we found the\n\n/// error location in our message.\n\n///\n\nfn find_error_locations(codeword: &[__gf], Λ: &[__gf]) -> Vec<usize> {\n\n let mut error_locations = vec![];\n\n for j in 0..codeword.len() {\n\n let Xj = __gf::GENERATOR.pow(__u::try_from(codeword.len()-1-j).unwrap());\n\n let zero = poly_eval(&Λ, Xj.recip());\n\n if zero == __gf::new(0) {\n\n // found an error location!\n\n error_locations.push(j);\n\n }\n\n }\n\n\n\n error_locations\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 2, "score": 209416.72517381635 }, { "content": "pub fn gf(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match GfArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n let width = {\n\n // default to 1 less than the width of the irreducible polynomial\n\n // that defines the field, since, well, this is actually the only\n\n // width that would work with that polynomial\n\n let polynomial = args.polynomial.0;\n", "file_path": "gf256-macros/src/gf.rs", "rank": 3, "score": 208463.15596196946 }, { "content": "/// Generate a random polynomial of a given degree, fixing f(0) = secret\n\nfn poly_random<R: Rng>(rng: &mut R, secret: __gf, degree: usize) -> Vec<__gf> {\n\n let mut f = vec![secret];\n\n for _ in 0..degree {\n\n f.push(__gf::from_lossy(rng.gen_range(1..=__gf::NONZEROS)));\n\n }\n\n f\n\n}\n\n\n", "file_path": "templates/shamir.rs", "rank": 4, "score": 207551.1156773597 }, { "content": "/// Correct up to [`ECC_SIZE/2`](ECC_SIZE) errors at unknown locations.\n\n///\n\n/// Returns the number of errors, or [`Error::TooManyErrors`] if the codeword\n\n/// can not be corrected.\n\n///\n\n/// ``` rust\n\n/// # use gf256::rs::rs255w223;\n\n/// let mut codeword = b\"xexlx xoxlx!\\\n\n/// x\\xa6x\\xf8x\\x15x\\x6ex\\xb6x\\x12x\\xbdx\\xd3\\\n\n/// x\\x14x\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\".to_vec();\n\n///\n\n/// assert_eq!(rs255w223::correct_errors(&mut codeword), Ok(16));\n\n/// assert_eq!(&codeword, b\"Hello World!\\\n\n/// \\x85\\xa6\\xad\\xf8\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34\\x14\\xa7\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\");\n\n/// ```\n\n///\n\npub fn correct_errors(codeword: &mut [__u]) -> Result<usize, Error> {\n\n let codeword = unsafe { __gf::slice_from_slice_mut_unchecked(codeword) };\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = find_syndromes(codeword);\n\n if S.iter().all(|s| *s == __gf::new(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find error locator polynomial\n\n let Λ = find_error_locator(&S);\n\n\n\n // too many errors?\n\n let error_count = Λ.len() - 1;\n\n if error_count*2 > ECC_SIZE {\n\n return Err(Error::TooManyErrors);\n\n }\n\n\n\n // find error locations\n\n let error_locations = find_error_locations(codeword, &Λ);\n", "file_path": "templates/rs.rs", "rank": 5, "score": 204767.49005148475 }, { "content": "/// Find the error locator polynomial when we know the location of errors\n\n///\n\n/// ``` text\n\n///\n\n/// Λ(x) = ∏ (1 - Xk*x)\n\n/// k\n\n/// ```\n\n///\n\nfn find_erasure_locator(codeword: &[__gf], erasures: &[usize]) -> Vec<__gf> {\n\n let mut Λ = vec![__gf::new(0); erasures.len()+1];\n\n let Λ_len = Λ.len();\n\n Λ[Λ_len-1] = __gf::new(1);\n\n\n\n for j in erasures {\n\n poly_mul(&mut Λ, &[\n\n -__gf::GENERATOR.pow(__u::try_from(codeword.len()-1-j).unwrap()),\n\n __gf::new(1)\n\n ]);\n\n }\n\n\n\n Λ\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 6, "score": 204453.8286609354 }, { "content": "/// Find syndromes, which should be zero if there are no errors\n\n///\n\n/// ``` text\n\n/// Si = c'(g^i)\n\n/// ```\n\n///\n\nfn find_syndromes(f: &[__gf]) -> Vec<__gf> {\n\n let mut S = vec![];\n\n for i in 0..ECC_SIZE {\n\n S.push(\n\n poly_eval(f, __gf::GENERATOR.pow(__u::try_from(i).unwrap()))\n\n );\n\n }\n\n S\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 7, "score": 203396.11445276762 }, { "content": "/// Calculate the CRC for a piece of data.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::crc::*;\n\n/// assert_eq!(crc32c(b\"Hello World!\", 0), 0xfe6cf1dc);\n\n/// ```\n\n///\n\n/// Note that this takes the previous state of the CRC as an argument,\n\n/// allowing the CRC to be computed incrementally:\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::crc::*;\n\n/// assert_eq!(crc32c(b\"Hell\", 0x00000000), 0x77bce1bf);\n\n/// assert_eq!(crc32c(b\"o Wo\", 0x77bce1bf), 0xf92d22b8);\n\n/// assert_eq!(crc32c(b\"rld!\", 0xf92d22b8), 0xfe6cf1dc);\n\n/// assert_eq!(crc32c(b\"Hello World!\", 0), 0xfe6cf1dc);\n\n/// ```\n\n///\n\n/// See the [module-level documentation](../crc) for more info.\n\n///\n\npub fn __crc(data: &[u8], crc: __u) -> __u {\n\n cfg_if! {\n\n if #[cfg(__if(__naive))] {\n\n let mut crc = __p(crc ^ __xor);\n\n\n\n cfg_if! {\n\n if #[cfg(__if(__reflected))] {\n\n crc = crc.reverse_bits() >> (8*size_of::<__u>()-__width);\n\n }\n\n }\n\n\n\n crc = crc << 8*size_of::<__u>()-__width;\n\n\n\n // iterate over words\n\n let mut words = data.chunks_exact(size_of::<__u>());\n\n for word in &mut words {\n\n let word = <[u8; size_of::<__u>()]>::try_from(word).unwrap();\n\n cfg_if! {\n\n if #[cfg(__if(__reflected))] {\n\n crc = crc + __p::from_le_bytes(word).reverse_bits();\n", "file_path": "templates/crc.rs", "rank": 8, "score": 199351.30867630884 }, { "content": "/// Multiply a polynomial by a scalar\n\nfn poly_scale(f: &mut [__gf], c: __gf) {\n\n for i in 0..f.len() {\n\n f[i] *= c;\n\n }\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 9, "score": 198684.59899146605 }, { "content": "/// Divide polynomials via synthetic division\n\n///\n\n/// Note both the quotient and remainder are left in the dividend\n\n///\n\nfn poly_divrem(f: &mut [__gf], g: &[__gf]) {\n\n debug_assert!(f.len() >= g.len());\n\n\n\n // find leading coeff to normalize g, note you could avoid\n\n // this if g is already normalized\n\n let leading_coeff = g[0];\n\n\n\n for i in 0 .. (f.len() - g.len() + 1) {\n\n if f[i] != __gf::new(0) {\n\n f[i] /= leading_coeff;\n\n\n\n for j in 1..g.len() {\n\n f[i+j] -= f[i] * g[j];\n\n }\n\n }\n\n }\n\n}\n\n\n\n// Encode using Reed-Solomon error correction\n\n//\n", "file_path": "templates/rs.rs", "rank": 10, "score": 198684.59899146605 }, { "content": "/// Multiply two polynomials together\n\nfn poly_mul(f: &mut [__gf], g: &[__gf]) {\n\n debug_assert!(f[..g.len()-1].iter().all(|x| *x == __gf::new(0)));\n\n\n\n // This is in-place, at the cost of being a bit confusing,\n\n // note that we only write to i+j, and i+j is always >= i\n\n //\n\n // What makes this confusing is that f and g are both big-endian\n\n // polynomials, reverse order from what you would expect. And in\n\n // order to leverage the i+j non-overlap, we need to write to \n\n // f in reverse-reverse order.\n\n //\n\n for i in (0..f.len()-g.len()+1).rev() {\n\n let fi = f[f.len()-1-i];\n\n f[f.len()-1-i] = __gf::new(0);\n\n\n\n for j in 0..g.len() {\n\n f[f.len()-1-(i+j)] += fi * g[g.len()-1-j];\n\n }\n\n }\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 11, "score": 198684.598991466 }, { "content": "/// Add two polynomials together\n\nfn poly_add(f: &mut [__gf], g: &[__gf]) {\n\n debug_assert!(f.len() >= g.len());\n\n\n\n // note g.len() may be <= f.len()!\n\n for i in 0..f.len() {\n\n f[f.len()-1-i] += g[g.len()-1-i];\n\n }\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 12, "score": 198684.59899146605 }, { "content": "#[proc_macro_attribute]\n\npub fn gf(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n gf::gf(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 13, "score": 196691.3237560485 }, { "content": "/// Generate n shares requiring k shares to reconstruct\n\npub fn shamir_generate(secret: &[u8], n: usize, k: usize) -> Vec<Vec<u8>> {\n\n // only support up to 255 shares\n\n assert!(n <= 255, \"exceeded 255 shares\");\n\n let mut shares = vec![vec![]; n];\n\n\n\n // we need to store the x coord somewhere, so just prepend the share with it\n\n for i in 0..n {\n\n shares[i].push(u8::try_from(i+1).unwrap());\n\n }\n\n\n\n for x in secret {\n\n // generate a random polynomial for each byte\n\n let f = shamir_poly_random(gf256(*x), k-1);\n\n\n\n // assign each share with a point at f(i)\n\n for i in 0..n {\n\n shares[i].push(u8::from(\n\n shamir_poly_eval(&f, gf256::try_from(i+1).unwrap())\n\n ));\n\n }\n\n }\n\n\n\n shares\n\n}\n\n\n", "file_path": "examples/shamir.rs", "rank": 14, "score": 194319.1241329754 }, { "content": "/// Iteratively find the error locator polynomial using the\n\n/// Berlekamp-Massey algorithm when we don't know the location of errors\n\n///\n\nfn find_error_locator(S: &[__gf]) -> Vec<__gf> {\n\n // the current estimate for the error locator polynomial\n\n let mut Λ = vec![__gf::new(0); S.len()+1];\n\n let Λ_len = Λ.len();\n\n Λ[Λ_len-1] = __gf::new(1);\n\n\n\n let mut prev_Λ = Λ.clone();\n\n let mut delta_Λ = Λ.clone();\n\n\n\n // the current estimate for the number of errors\n\n let mut v = 0;\n\n\n\n for i in 0..S.len() {\n\n let mut delta = S[i];\n\n for j in 1..v+1 {\n\n delta += Λ[Λ.len()-1-j] * S[i-j];\n\n }\n\n\n\n prev_Λ.rotate_left(1);\n\n\n", "file_path": "templates/rs.rs", "rank": 15, "score": 194040.75612534906 }, { "content": "/// Repair up to `n` bad blocks.\n\n///\n\n/// Where `n` <= the number of parity blocks. This can include the parity\n\n/// blocks themselves. `bad_blocks` must be an array of indices indicating\n\n/// which blocks are bad.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::raid::*;\n\n/// let mut data = b\"Hellxxxxxxxx\".to_vec();\n\n/// let mut datas = data.chunks_mut(4).collect::<Vec<_>>();\n\n/// let mut parity1 = b\"xxxx\".to_vec();\n\n/// let mut parity2 = b\"\\x43\\x88\\x4f\\x36\".to_vec();\n\n/// let mut parity3 = b\"\\x9a\\x6b\\x23\\xe7\".to_vec();\n\n///\n\n/// // repair\n\n/// raid7::repair(&mut datas, &mut parity1, &mut parity2, &mut parity3, &[1, 2, 3]);\n\n/// assert_eq!(&data, b\"Hello World!\");\n\n/// ```\n\n///\n\npub fn repair<B: AsMut<[__u]>>(\n\n blocks: &mut [B],\n\n #[cfg(__if(__parity >= 1))] p: &mut [__u],\n\n #[cfg(__if(__parity >= 2))] q: &mut [__u],\n\n #[cfg(__if(__parity >= 3))] r: &mut [__u],\n\n bad_blocks: &[usize]\n\n) -> Result<(), Error> {\n\n let len = blocks[0].as_mut().len();\n\n #[cfg(__if(__parity >= 1))] let p = unsafe { __gf::slice_from_slice_mut_unchecked(p) };\n\n #[cfg(__if(__parity >= 2))] let q = unsafe { __gf::slice_from_slice_mut_unchecked(q) };\n\n #[cfg(__if(__parity >= 3))] let r = unsafe { __gf::slice_from_slice_mut_unchecked(r) };\n\n\n\n if bad_blocks.len() > __parity {\n\n // can't repair\n\n return Err(Error::TooManyBadBlocks);\n\n }\n\n\n\n // sort the data blocks without alloc, this is only so we can split\n\n // the mut blocks array safely\n\n let mut bad_blocks_array = [\n", "file_path": "templates/raid.rs", "rank": 16, "score": 181463.15595708808 }, { "content": "/// Attempt to reconstruct a secret from at least `k` shares.\n\n///\n\n/// All shares must be the same length. If insufficient or invalid shares are\n\n/// provided, the result will be garbage.\n\n///\n\npub fn reconstruct<S: AsRef<[__u]>>(shares: &[S]) -> Vec<__u> {\n\n // matching lengths?\n\n assert!(\n\n shares.windows(2).all(|ss| ss[0].as_ref().len() == ss[1].as_ref().len()),\n\n \"mismatched share length?\"\n\n );\n\n\n\n let mut secret = vec![];\n\n let len = shares.get(0).map(|s| s.as_ref().len()).unwrap_or(0);\n\n if len == 0 {\n\n return secret;\n\n }\n\n\n\n // x is prepended to each share\n\n let xs = shares.iter().map(|s| __gf::from_lossy(s.as_ref()[0])).collect::<Vec<_>>();\n\n for i in 1..len {\n\n let ys = shares.iter().map(|s| __gf::from_lossy(s.as_ref()[i])).collect::<Vec<_>>();\n\n secret.push(__u::from(poly_interpolate(&xs, &ys)));\n\n }\n\n\n\n secret\n\n}\n\n\n", "file_path": "templates/shamir.rs", "rank": 17, "score": 181365.68575326452 }, { "content": "/// Add a block to a RAID6 array\n\n///\n\n/// Note the block index must be unique in the array! This does not\n\n/// update other block indices.\n\n///\n\npub fn raid6_add(j: usize, new: &[u8], p: &mut [u8], q: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] += gf256(new[i]);\n\n q[i] += gf256(new[i]) * g;\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 18, "score": 179298.70673293807 }, { "content": "/// Add a block from a RAID6 array\n\n///\n\n/// Note the block index must already exit in the array, otherwise the\n\n/// array will become corrupted. This does not update other block indices.\n\n///\n\npub fn raid6_remove(j: usize, old: &[u8], p: &mut [u8], q: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] -= gf256(old[i]);\n\n q[i] -= gf256(old[i]) * g;\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 19, "score": 179298.4731682556 }, { "content": "/// Encode using Reed-Solomon error correction\n\n///\n\n/// Much like in CRC, we want to make the message a multiple of G(x),\n\n/// our generator polynomial. We can do this by appending the remainder\n\n/// of our message after division by G(x).\n\n///\n\n/// ``` text\n\n/// c(x) = m(x) - (m(x) % G(x))\n\n/// ```\n\n///\n\n/// Note we expect the message to only take up the first message.len()-ECC_SIZE\n\n/// bytes, but this can be smaller than BLOCK_SIZE\n\n///\n\npub fn rs_encode(message: &mut [u8]) {\n\n assert!(message.len() <= BLOCK_SIZE);\n\n assert!(message.len() >= ECC_SIZE);\n\n let data_len = message.len() - ECC_SIZE;\n\n\n\n // create copy for polynomial division\n\n //\n\n // note if message is < DATA_SIZE we just treat it as a smaller polynomial,\n\n // this is equivalent to prepending zeros\n\n //\n\n let mut divrem = message.to_vec();\n\n divrem[data_len..].fill(0);\n\n\n\n // divide by our generator polynomial\n\n rs_poly_divrem(\n\n gf256::slice_from_slice_mut(&mut divrem),\n\n &GENERATOR_POLY\n\n );\n\n\n\n // return message + remainder, this new message is a polynomial\n\n // perfectly divisable by our generator polynomial\n\n message[data_len..].copy_from_slice(&divrem[data_len..]);\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 20, "score": 179185.96648720495 }, { "content": "/// Add a block to a RAID7 array\n\n///\n\n/// Note the block index must be unique in the array! This does not\n\n/// update other block indices.\n\n///\n\npub fn raid7_add(j: usize, new: &[u8], p: &mut [u8], q: &mut [u8], r: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n let r = gf256::slice_from_slice_mut(r);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] += gf256(new[i]);\n\n q[i] += gf256(new[i]) * g;\n\n r[i] += gf256(new[i]) * h;\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 21, "score": 177553.24618596968 }, { "content": "/// Add a block from a RAID7 array\n\n///\n\n/// Note the block index must already exit in the array, otherwise the\n\n/// array will become corrupted. This does not update other block indices.\n\n///\n\npub fn raid7_remove(j: usize, old: &[u8], p: &mut [u8], q: &mut [u8], r: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n let r = gf256::slice_from_slice_mut(r);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] -= gf256(old[i]);\n\n q[i] -= gf256(old[i]) * g;\n\n r[i] -= gf256(old[i]) * h;\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 22, "score": 177553.01262128723 }, { "content": "/// Correct up to ECC_SIZE/2 errors at unknown locations\n\n///\n\npub fn rs_correct_errors(codeword: &mut [u8]) -> Result<usize, RsError> {\n\n let codeword = gf256::slice_from_slice_mut(codeword);\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = rs_find_syndromes(codeword);\n\n if S.iter().all(|s| *s == gf256(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find error locator polynomial\n\n let Λ = rs_find_error_locator(&S);\n\n\n\n // too many errors?\n\n let error_count = Λ.len() - 1;\n\n if error_count*2 > ECC_SIZE {\n\n return Err(RsError::TooManyErrors);\n\n }\n\n\n\n // find error locations\n\n let error_locations = rs_find_error_locations(codeword, &Λ);\n", "file_path": "examples/rs.rs", "rank": 23, "score": 175712.6786362428 }, { "content": "/// Add a block to a RAID5 array\n\n///\n\n/// Note the block index must be unique in the array! This does not\n\n/// update other block indices.\n\n///\n\npub fn raid5_add(_j: usize, new: &[u8], p: &mut [u8]) {\n\n let len = p.len();\n\n\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] ^= new[i];\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 24, "score": 175330.34812486373 }, { "content": "/// Add a block from a RAID5 array\n\n///\n\n/// Note the block index must already exit in the array, otherwise the\n\n/// array will become corrupted. This does not update other block indices.\n\n///\n\npub fn raid5_remove(_j: usize, old: &[u8], p: &mut [u8]) {\n\n let len = p.len();\n\n\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] ^= old[i];\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 25, "score": 175330.11456018122 }, { "content": "/// A naive CRC implementation using the textbook definition of polynomial\n\n/// remainder, the input is padded with 32-bits of zeros to represent the\n\n/// correct polynomial.\n\n///\n\n/// The bit-invert of the CRC is a bit strange when mapped to the\n\n/// textbook definition as this appears as xoring the input with\n\n/// 32-bits of ones followed by zeros.\n\n///\n\n/// We also have to bit-reverse the input/output in order to match\n\n/// the common CRC32 behavior.\n\n///\n\npub fn naive_crc(data: &[u8]) -> u32 {\n\n let mut crc = p64(0);\n\n\n\n for b in\n\n data.iter().copied()\n\n // pad with 32-bits\n\n .chain(iter::repeat(0x00).take(4))\n\n // invert the first 32-bits\n\n .zip(iter::repeat(0xff).take(4).chain(iter::repeat(0x00)))\n\n .map(|(m, b)| m ^ b)\n\n {\n\n crc = (crc << 8) | p64::from(b.reverse_bits());\n\n crc = crc % POLYNOMIAL;\n\n }\n\n\n\n u32::try_from(crc).unwrap().reverse_bits() ^ 0xffffffff\n\n}\n\n\n", "file_path": "examples/crc.rs", "rank": 27, "score": 171106.84422852204 }, { "content": "/// A table-based CRC implementation using precomputed remainders\n\n/// post-addition\n\n///\n\n/// This requires a 4*256 = 1024 byte table (computed at compile-time thanks\n\n/// to Rust's const evaluation), and is the most common CRC implementation\n\n/// thanks to its portability and speed.\n\n///\n\npub fn table_crc(data: &[u8]) -> u32 {\n\n const CRC_TABLE: [u32; 256] = {\n\n let mut table = [0; 256];\n\n let mut i = 0;\n\n while i < table.len() {\n\n let x = (i as u32).reverse_bits();\n\n let x = p64((x as u64) << 8).naive_rem(POLYNOMIAL).0 as u32;\n\n table[i] = x.reverse_bits();\n\n i += 1;\n\n }\n\n table\n\n };\n\n\n\n let mut crc = 0xffffffff;\n\n\n\n for b in data {\n\n crc = (crc >> 8) ^ CRC_TABLE[usize::from((crc as u8) ^ b)];\n\n }\n\n\n\n crc ^ 0xffffffff\n\n}\n\n\n", "file_path": "examples/crc.rs", "rank": 28, "score": 171103.1103262212 }, { "content": "/// A hardware-accelerated CRC implementation using Barret reduction\n\n///\n\n/// This leverages polynomial multiplication instructions (pclmulqdq,\n\n/// pmull, etc) to provide an efficient CRC implementation without the need\n\n/// of a lookup table.\n\n///\n\n/// You may notice that polynomial multiplication is not the polynomial\n\n/// remainder operation needed for CRC, and that is where Barret reduction\n\n/// comes in. Barret reduction allows you to turn division/remainder\n\n/// by a constant into a cheaper multiply by a different constant.\n\n///\n\n/// Fortunately Rust makes it easy to precompute this constant at\n\n/// compile-time.\n\n///\n\npub fn barret_crc(data: &[u8]) -> u32 {\n\n // Normally this would be 0x10000000000000000 / __polynomial, but\n\n // we eagerly do one step of division so we avoid needing a 4x wide\n\n // type. We can also drop the highest bit if we add the high bits\n\n // manually we use use this constant.\n\n //\n\n // = x % p\n\n // = 0xffffffff & (x + p*(((x >> 32) * [0x10000000000000000/p]) >> 32))\n\n // = 0xffffffff & (x + p*(((x >> 32) * [(p << 32)/p + 0x100000000]) >> 32))\n\n // = 0xffffffff & (x + p*((((x >> 32) * [(p << 32)/p]) >> 32) + (x >> 32)))\n\n // \\-----+-----/\n\n // '-- Barret constant\n\n //\n\n // Note that the shifts and masks can go away if we operate on u32s,\n\n // leaving 2 xmuls and 2 xors.\n\n //\n\n const BARRET_CONSTANT: p32 = {\n\n p32(p64(POLYNOMIAL.0 << 32).naive_div(POLYNOMIAL).0 as u32)\n\n };\n\n\n", "file_path": "examples/crc.rs", "rank": 29, "score": 171098.462593648 }, { "content": "#[cfg(feature=\"rs\")]\n\n#[proc_macro_attribute]\n\npub fn rs(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n rs::rs(args, input)\n\n}\n", "file_path": "gf256-macros/src/lib.rs", "rank": 30, "score": 170648.31842925714 }, { "content": "/// Format blocks with RAID6, aka two blocks of parity\n\npub fn raid6_format<B: AsRef<[u8]>>(blocks: &[B], p: &mut [u8], q: &mut [u8]) {\n\n let len = p.len();\n\n assert!(q.len() == len);\n\n assert!(blocks.iter().all(|b| b.as_ref().len() == len));\n\n assert!(blocks.len() <= 255);\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n\n\n for i in 0..len {\n\n p[i] = gf256(0);\n\n q[i] = gf256(0);\n\n }\n\n\n\n for (j, b) in blocks.iter().enumerate() {\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n for i in 0..len {\n\n p[i] += gf256(b.as_ref()[i]);\n\n q[i] += gf256(b.as_ref()[i]) * g;\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 31, "score": 168999.1012010146 }, { "content": "/// Format blocks with RAID7, aka three blocks of parity\n\npub fn raid7_format<B: AsRef<[u8]>>(blocks: &[B], p: &mut [u8], q: &mut [u8], r: &mut [u8]) {\n\n let len = p.len();\n\n assert!(q.len() == len);\n\n assert!(r.len() == len);\n\n assert!(blocks.iter().all(|b| b.as_ref().len() == len));\n\n assert!(blocks.len() <= 255);\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n let r = gf256::slice_from_slice_mut(r);\n\n\n\n for i in 0..len {\n\n p[i] = gf256(0);\n\n q[i] = gf256(0);\n\n r[i] = gf256(0);\n\n }\n\n\n\n for (j, b) in blocks.iter().enumerate() {\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n let h = g*g;\n\n for i in 0..len {\n\n p[i] += gf256(b.as_ref()[i]);\n\n q[i] += gf256(b.as_ref()[i]) * g;\n\n r[i] += gf256(b.as_ref()[i]) * h;\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 32, "score": 168748.6583157455 }, { "content": "/// Update a block in a RAID7 array\n\n///\n\n/// This is functionally equivalent to remove(i)+add(i), but more efficient.\n\n///\n\npub fn raid7_update(j: usize, old: &[u8], new: &[u8], p: &mut [u8], q: &mut [u8], r: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n let r = gf256::slice_from_slice_mut(r);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] += gf256(new[i]) - gf256(old[i]);\n\n q[i] += (gf256(new[i]) - gf256(old[i])) * g;\n\n r[i] += (gf256(new[i]) - gf256(old[i])) * h;\n\n }\n\n}\n\n\n\n\n", "file_path": "examples/raid.rs", "rank": 33, "score": 167145.21600186703 }, { "content": "/// Update a block in a RAID6 array\n\n///\n\n/// This is functionally equivalent to remove(i)+add(i), but more efficient.\n\n///\n\npub fn raid6_update(j: usize, old: &[u8], new: &[u8], p: &mut [u8], q: &mut [u8]) {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n\n\n let g = gf256::GENERATOR.pow(u8::try_from(j).unwrap());\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] += gf256(new[i]) - gf256(old[i]);\n\n q[i] += (gf256(new[i]) - gf256(old[i])) * g;\n\n }\n\n}\n\n\n\n\n\n//// RAID7 ////\n\n\n", "file_path": "examples/raid.rs", "rank": 34, "score": 167034.62141815425 }, { "content": "/// A smaller table-based CRC implementation using 4-bit precomputed\n\n/// remainders post-addition\n\n///\n\n/// This requires a 4*16 = 64 byte table (computed at compile-time thanks\n\n/// to Rust's const evaluation), significantly reducing the code-size\n\n/// at the cost of 2x the number of operations. This CRC implementation\n\n/// is common on embedded systems.\n\n///\n\npub fn small_table_crc(data: &[u8]) -> u32 {\n\n const CRC_SMALL_TABLE: [u32; 16] = {\n\n let mut table = [0; 16];\n\n let mut i = 0;\n\n while i < table.len() {\n\n let x = (i as u32).reverse_bits();\n\n let x = p64((x as u64) << 4).naive_rem(POLYNOMIAL).0 as u32;\n\n table[i] = x.reverse_bits();\n\n i += 1;\n\n }\n\n table\n\n };\n\n\n\n let mut crc = 0xffffffff;\n\n\n\n for b in data {\n\n crc = (crc >> 4) ^ CRC_SMALL_TABLE[usize::from(((crc as u8) ^ (b >> 0)) & 0xf)];\n\n crc = (crc >> 4) ^ CRC_SMALL_TABLE[usize::from(((crc as u8) ^ (b >> 4)) & 0xf)];\n\n }\n\n\n\n crc ^ 0xffffffff\n\n}\n\n\n", "file_path": "examples/crc.rs", "rank": 35, "score": 166591.21669342666 }, { "content": "/// A hardware-accelerated CRC implementation using Barret reduction without\n\n/// needing to bit-reverse the internal representation\n\n///\n\n/// CRC32 and polynomial multiplication instructions unfortunately are defined\n\n/// with different bit-endianness. This would normally mean we need to\n\n/// bit-reverse the incoming data before we can use polynomial multiplication.\n\n///\n\n/// However, polynomial multiplication has the odd property that it is\n\n/// symmetric, brev(a) * brev(b) = brev((a * b) << 1)\n\n///\n\n/// This means we can rewrite our Barret reduction CRC to operate entirely\n\n/// on a bit-reversed representation, shaving off several instructions.\n\n///\n\n/// In theory this should be faster, but measurements show this as actually\n\n/// being slightly slower, perhaps the extra 1-bit shift costs more on\n\n/// machines with bit-reverse instructions?\n\n///\n\npub fn reversed_barret_crc(data: &[u8]) -> u32 {\n\n // Normally this would be 0x10000000000000000 / __polynomial, but\n\n // we eagerly do one step of division so we avoid needing a 4x wide\n\n // type. We can also drop the highest bit if we add the high bits\n\n // manually we use use this constant.\n\n //\n\n // = x % p\n\n // = 0xffffffff & (x + p*(((x >> 32) * [0x10000000000000000/p]) >> 32))\n\n // = 0xffffffff & (x + p*(((x >> 32) * [(p << 32)/p + 0x100000000]) >> 32))\n\n // = 0xffffffff & (x + p*((((x >> 32) * [(p << 32)/p]) >> 32) + (x >> 32)))\n\n // \\-----+-----/\n\n // '-- Barret constant\n\n //\n\n // Note that the shifts and masks can go away if we operate on u32s,\n\n // leaving 2 xmuls and 2 xors.\n\n //\n\n const BARRET_CONSTANT: p32 = {\n\n p32(p64(POLYNOMIAL.0 << 32).naive_div(POLYNOMIAL).0 as u32)\n\n };\n\n const POLYNOMIAL_REV: p32 = p32(POLYNOMIAL.0 as u32).reverse_bits();\n", "file_path": "examples/crc.rs", "rank": 36, "score": 166590.25142690065 }, { "content": "/// A hardware-accelerated CRC implementation using the same technique as\n\n/// barret_crc, but operating on a 32-bit word at a time\n\n///\n\npub fn word_barret_crc(data: &[u8]) -> u32 {\n\n // Normally this would be 0x10000000000000000 / __polynomial, but\n\n // we eagerly do one step of division so we avoid needing a 4x wide\n\n // type. We can also drop the highest bit if we add the high bits\n\n // manually we use use this constant.\n\n //\n\n // = x % p\n\n // = 0xffffffff & (x + p*(((x >> 32) * [0x10000000000000000/p]) >> 32))\n\n // = 0xffffffff & (x + p*(((x >> 32) * [(p << 32)/p + 0x100000000]) >> 32))\n\n // = 0xffffffff & (x + p*((((x >> 32) * [(p << 32)/p]) >> 32) + (x >> 32)))\n\n // \\-----+-----/\n\n // '-- Barret constant\n\n //\n\n // Note that the shifts and masks can go away if we operate on u32s,\n\n // leaving 2 xmuls and 2 xors.\n\n //\n\n const BARRET_CONSTANT: p32 = {\n\n p32(p64(POLYNOMIAL.0 << 32).naive_div(POLYNOMIAL).0 as u32)\n\n };\n\n\n", "file_path": "examples/crc.rs", "rank": 37, "score": 166588.92968376295 }, { "content": "/// A CRC implementation that uses the first common optimization:\n\n/// delaying the addition of the next byte to when overflow can occur\n\n///\n\npub fn less_naive_crc(data: &[u8]) -> u32 {\n\n let mut crc = p32(0xffffffff);\n\n\n\n for b in data {\n\n crc = crc + (p32::from(b.reverse_bits()) << 24);\n\n crc = p32::try_from((p64::from(crc) << 8) % POLYNOMIAL).unwrap();\n\n }\n\n\n\n u32::from(crc).reverse_bits() ^ 0xffffffff\n\n}\n\n\n", "file_path": "examples/crc.rs", "rank": 38, "score": 166588.86342037522 }, { "content": "/// Format blocks with RAID5, aka single block of parity\n\npub fn raid5_format<B: AsRef<[u8]>>(blocks: &[B], p: &mut [u8]) {\n\n let len = p.len();\n\n assert!(blocks.iter().all(|b| b.as_ref().len() == len));\n\n\n\n for i in 0..len {\n\n p[i] = 0;\n\n }\n\n\n\n for b in blocks {\n\n for i in 0..len {\n\n // this could be gf256(a) + gf256(b), but that's just xor anyways\n\n p[i] ^= b.as_ref()[i];\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/raid.rs", "rank": 39, "score": 163360.69660956695 }, { "content": "/// A CRC implementation using the same technique as less_naive_crc but\n\n/// operating on a 32-bit word at a time\n\n///\n\npub fn word_less_naive_crc(data: &[u8]) -> u32 {\n\n let mut crc = p32(0xffffffff);\n\n\n\n // iterate over 4-byte words\n\n let mut words = data.chunks_exact(4);\n\n for word in &mut words {\n\n let word = <[u8; 4]>::try_from(word).unwrap();\n\n crc = crc + p32::from_le_bytes(word).reverse_bits();\n\n crc = p32::try_from((p64::from(crc) << 32) % POLYNOMIAL).unwrap();\n\n }\n\n\n\n for b in words.remainder() {\n\n crc = crc + (p32::from(b.reverse_bits()) << 24);\n\n crc = p32::try_from((p64::from(crc) << 8) % POLYNOMIAL).unwrap();\n\n }\n\n\n\n u32::from(crc).reverse_bits() ^ 0xffffffff\n\n}\n\n\n", "file_path": "examples/crc.rs", "rank": 40, "score": 162456.64743929435 }, { "content": "/// A hardware-accelerated CRC implementation using the same technique as\n\n/// reversed_barret_crc, but operating on a 32-bit word at a time\n\n///\n\npub fn word_reversed_barret_crc(data: &[u8]) -> u32 {\n\n // Normally this would be 0x10000000000000000 / __polynomial, but\n\n // we eagerly do one step of division so we avoid needing a 4x wide\n\n // type. We can also drop the highest bit if we add the high bits\n\n // manually we use use this constant.\n\n //\n\n // = x % p\n\n // = 0xffffffff & (x + p*(((x >> 32) * [0x10000000000000000/p]) >> 32))\n\n // = 0xffffffff & (x + p*(((x >> 32) * [(p << 32)/p + 0x100000000]) >> 32))\n\n // = 0xffffffff & (x + p*((((x >> 32) * [(p << 32)/p]) >> 32) + (x >> 32)))\n\n // \\-----+-----/\n\n // '-- Barret constant\n\n //\n\n // Note that the shifts and masks can go away if we operate on u32s,\n\n // leaving 2 xmuls and 2 xors.\n\n //\n\n const BARRET_CONSTANT: p32 = {\n\n p32(p64(POLYNOMIAL.0 << 32).naive_div(POLYNOMIAL).0 as u32)\n\n };\n\n const POLYNOMIAL_REV: p32 = p32(POLYNOMIAL.0 as u32).reverse_bits();\n", "file_path": "examples/crc.rs", "rank": 41, "score": 162456.51278233435 }, { "content": "/// Update a block in a RAID5 array\n\n///\n\n/// This is functionally equivalent to remove(i)+add(i), but more efficient.\n\n///\n\npub fn raid5_update(_j: usize, old: &[u8], new: &[u8], p: &mut [u8]) {\n\n let len = p.len();\n\n\n\n for i in 0..len {\n\n // calculate new parity\n\n p[i] ^= old[i] ^ new[i];\n\n }\n\n}\n\n\n\n\n\n//// RAID6 ////\n\n\n", "file_path": "examples/raid.rs", "rank": 42, "score": 160952.5680037576 }, { "content": "fn bench_gfmul(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"gfmul\");\n\n\n\n // gf256 mul/div\n\n bench_mul!(group, \"gf256_naive_mul\", gf256_naive);\n\n bench_mul!(group, \"gf256_table_mul\", gf256_table);\n\n bench_mul!(group, \"gf256_rem_table_mul\", gf256_rem_table);\n\n bench_mul!(group, \"gf256_small_rem_table_mul\", gf256_small_rem_table);\n\n bench_mul!(group, \"gf256_barret_mul\", gf256_barret);\n\n\n\n bench_div!(group, \"gf256_naive_div\", gf256_naive);\n\n bench_div!(group, \"gf256_table_div\", gf256_table);\n\n bench_div!(group, \"gf256_rem_table_div\", gf256_rem_table);\n\n bench_div!(group, \"gf256_small_rem_table_div\", gf256_small_rem_table);\n\n bench_div!(group, \"gf256_barret_div\", gf256_barret);\n\n\n\n // gf16 mul/div\n\n bench_mul!(group, \"gf16_naive_mul\", |x: u8| gf16_naive::try_from(x&0xf).unwrap());\n\n bench_mul!(group, \"gf16_table_mul\", |x: u8| gf16_table::try_from(x&0xf).unwrap());\n\n bench_mul!(group, \"gf16_rem_table_mul\", |x: u8| gf16_rem_table::try_from(x&0xf).unwrap());\n", "file_path": "benches/gf.rs", "rank": 43, "score": 160099.08519182962 }, { "content": "#[derive(Debug, FromMeta)]\n\nstruct GfArgs {\n\n polynomial: U128Wrapper,\n\n generator: u64,\n\n\n\n #[darling(default, rename=\"usize\")]\n\n is_usize: Option<bool>,\n\n #[darling(default)]\n\n u: Option<syn::Path>,\n\n #[darling(default)]\n\n u2: Option<syn::Path>,\n\n #[darling(default)]\n\n p: Option<syn::Path>,\n\n #[darling(default)]\n\n p2: Option<syn::Path>,\n\n\n\n #[darling(default)]\n\n naive: bool,\n\n #[darling(default)]\n\n table: bool,\n\n #[darling(default)]\n\n rem_table: bool,\n\n #[darling(default)]\n\n small_rem_table: bool,\n\n #[darling(default)]\n\n barret: bool,\n\n}\n\n\n", "file_path": "gf256-macros/src/gf.rs", "rank": 44, "score": 157807.69019281602 }, { "content": "/// Evaluate a polynomial at x using Horner's method\n\n///\n\n/// Note polynomials here are ordered biggest-coefficient first\n\n///\n\nfn poly_eval(f: &[__gf], x: __gf) -> __gf {\n\n let mut y = __gf::new(0);\n\n for c in f {\n\n y = y*x + c;\n\n }\n\n y\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 45, "score": 155809.1658642825 }, { "content": "/// Determine if codeword is correct and has no errors/erasures.\n\n///\n\n/// This is quite a bit faster than actually finding the errors/erasures.\n\n///\n\n/// ``` rust\n\n/// # use gf256::rs::rs255w223;\n\n/// let codeword = b\"Hello World!\\\n\n/// \\x85\\xa6\\xad\\xf8\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34\\x14\\xa7\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\".to_vec();\n\n/// assert!(rs255w223::is_correct(&codeword));\n\n/// ```\n\n///\n\npub fn is_correct(codeword: &[__u]) -> bool {\n\n let codeword = unsafe { __gf::slice_from_slice_unchecked(codeword) };\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let syndromes = find_syndromes(codeword);\n\n syndromes.iter().all(|s| *s == __gf::new(0))\n\n}\n\n\n", "file_path": "templates/rs.rs", "rank": 46, "score": 154330.90513697 }, { "content": "/// Evaluate a polynomial at x using Horner's method\n\nfn poly_eval(f: &[__gf], x: __gf) -> __gf {\n\n let mut y = __gf::new(0);\n\n for c in f.iter().rev() {\n\n y = y*x + c;\n\n }\n\n y\n\n}\n\n\n", "file_path": "templates/shamir.rs", "rank": 47, "score": 150104.0978836581 }, { "content": "/// Format blocks as a RAID array.\n\n///\n\n/// This writes the parity data to the provided parity blocks based on the\n\n/// provided data blocks.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::raid::*;\n\n/// let mut data = b\"Hello World!\".to_vec();\n\n/// let datas = data.chunks(4).collect::<Vec<_>>();\n\n/// let mut parity1 = vec![0u8; 4];\n\n/// let mut parity2 = vec![0u8; 4];\n\n/// let mut parity3 = vec![0u8; 4];\n\n/// raid7::format(&datas, &mut parity1, &mut parity2, &mut parity3);\n\n///\n\n/// assert_eq!(&datas[0], b\"Hell\");\n\n/// assert_eq!(&datas[1], b\"o Wo\");\n\n/// assert_eq!(&datas[2], b\"rld!\");\n\n/// assert_eq!(&parity1, b\"\\x55\\x29\\x5f\\x22\");\n\n/// assert_eq!(&parity2, b\"\\x43\\x88\\x4f\\x36\");\n\n/// assert_eq!(&parity3, b\"\\x9a\\x6b\\x23\\xe7\");\n\n/// ```\n\n///\n\npub fn format<B: AsRef<[__u]>>(\n\n blocks: &[B],\n\n #[cfg(__if(__parity >= 1))] p: &mut [__u],\n\n #[cfg(__if(__parity >= 2))] q: &mut [__u],\n\n #[cfg(__if(__parity >= 3))] r: &mut [__u],\n\n) {\n\n assert!(blocks.len() >= 1);\n\n #[cfg(__if(__parity >= 2))] { assert!(blocks.len() <= usize::try_from(__gf::NONZEROS).unwrap_or(usize::MAX)); }\n\n\n\n let len = blocks[0].as_ref().len();\n\n assert!(blocks.iter().all(|b| b.as_ref().len() == len));\n\n #[cfg(__if(__parity >= 1))] { assert!(p.len() == len); }\n\n #[cfg(__if(__parity >= 1))] let p = unsafe { __gf::slice_from_slice_mut_unchecked(p) };\n\n #[cfg(__if(__parity >= 2))] { assert!(q.len() == len); }\n\n #[cfg(__if(__parity >= 2))] let q = unsafe { __gf::slice_from_slice_mut_unchecked(q) };\n\n #[cfg(__if(__parity >= 3))] { assert!(r.len() == len); }\n\n #[cfg(__if(__parity >= 3))] let r = unsafe { __gf::slice_from_slice_mut_unchecked(r) };\n\n\n\n for i in 0..len {\n\n #[cfg(__if(__parity >= 1))] { p[i] = __gf::new(0); }\n", "file_path": "templates/raid.rs", "rank": 48, "score": 148674.38237873578 }, { "content": "/// Correct a mixture of errors and erasures, up to `2*errors+erasures <= ECC_SIZE`.\n\n///\n\n/// Where erasures are at known locations and errors are at unknown locations.\n\n/// Errors must be <= [`ECC_SIZE`], erasures must be <= [`ECC_SIZE/2`](ECC_SIZE),\n\n/// and `2*errors+erasures` must be <= [`ECC_SIZE`].\n\n///\n\n/// Returns the number of errors and erasures, or [`Error::TooManyErrors`] if the\n\n/// codeword can not be corrected.\n\n///\n\n/// ``` rust\n\n/// # use gf256::rs::rs255w223;\n\n/// let mut codeword = b\"xxxxxxxxxxxx\\\n\n/// xxxx\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34x\\xa7x\\xd6x\\xfdx\\xc2x\\x81x\\x8ax\\xc9x\".to_vec();\n\n///\n\n/// let erasures = (0..16).collect::<Vec<_>>();\n\n/// assert_eq!(rs255w223::correct(&mut codeword, &erasures), Ok(24));\n\n/// assert_eq!(&codeword, b\"Hello World!\\\n\n/// \\x85\\xa6\\xad\\xf8\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34\\x14\\xa7\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\");\n\n/// ```\n\n///\n\npub fn correct(\n\n codeword: &mut [__u],\n\n erasures: &[usize]\n\n) -> Result<usize, Error> {\n\n let codeword = unsafe { __gf::slice_from_slice_mut_unchecked(codeword) };\n\n\n\n // too many erasures?\n\n if erasures.len() > ECC_SIZE {\n\n return Err(Error::TooManyErrors);\n\n }\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = find_syndromes(codeword);\n\n if S.iter().all(|s| *s == __gf::new(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find Forney syndromes, hiding known erasures from the syndromes\n\n let forney_S = find_forney_syndromes(codeword, &S, &erasures);\n\n\n", "file_path": "templates/rs.rs", "rank": 49, "score": 148611.4727914552 }, { "content": "/// Correct a mixture of erasures at unknown locations and erasures\n\n/// as known locations, can correct up to 2*errors+erasures <= ECC_SIZE\n\n///\n\npub fn rs_correct(\n\n codeword: &mut [u8],\n\n erasures: &[usize]\n\n) -> Result<usize, RsError> {\n\n let codeword = gf256::slice_from_slice_mut(codeword);\n\n\n\n // too many erasures?\n\n if erasures.len() > ECC_SIZE {\n\n return Err(RsError::TooManyErrors);\n\n }\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = rs_find_syndromes(codeword);\n\n if S.iter().all(|s| *s == gf256(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find Forney syndromes, hiding known erasures from the syndromes\n\n let forney_S = rs_find_forney_syndromes(codeword, &S, &erasures);\n\n\n", "file_path": "examples/rs.rs", "rank": 50, "score": 146794.84029011652 }, { "content": "/// Repair up to two blocks of failure\n\npub fn raid6_repair<B: AsMut<[u8]>>(\n\n blocks: &mut [B],\n\n p: &mut [u8],\n\n q: &mut [u8],\n\n bad_blocks: &[usize]\n\n) -> Result<(), RaidError> {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n\n\n if bad_blocks.len() > 2 {\n\n // can't repair\n\n return Err(RaidError::TooManyBadBlocks);\n\n }\n\n\n\n // sort the data blocks without alloc, this is only so we can split\n\n // the mut blocks array safely\n\n let mut bad_blocks_array = [\n\n bad_blocks.get(0).copied().unwrap_or(0),\n\n bad_blocks.get(1).copied().unwrap_or(0),\n", "file_path": "examples/raid.rs", "rank": 51, "score": 146211.72512916522 }, { "content": "/// Repair up to three blocks of failure\n\npub fn raid7_repair<B: AsMut<[u8]>>(\n\n blocks: &mut [B],\n\n p: &mut [u8],\n\n q: &mut [u8],\n\n r: &mut [u8],\n\n bad_blocks: &[usize]\n\n) -> Result<(), RaidError> {\n\n let len = p.len();\n\n let p = gf256::slice_from_slice_mut(p);\n\n let q = gf256::slice_from_slice_mut(q);\n\n let r = gf256::slice_from_slice_mut(r);\n\n\n\n if bad_blocks.len() > 3 {\n\n // can't repair\n\n return Err(RaidError::TooManyBadBlocks);\n\n }\n\n\n\n // sort the data blocks without alloc, this is only so we can split\n\n // the mut blocks array safely\n\n let mut bad_blocks_array = [\n", "file_path": "examples/raid.rs", "rank": 52, "score": 146211.72512916525 }, { "content": "/// Repair up to one block of failure\n\npub fn raid5_repair<B: AsMut<[u8]>>(\n\n blocks: &mut [B],\n\n p: &mut [u8],\n\n bad_blocks: &[usize]\n\n) -> Result<(), RaidError> {\n\n let len = p.len();\n\n\n\n if bad_blocks.len() > 1 {\n\n // can't repair\n\n return Err(RaidError::TooManyBadBlocks);\n\n }\n\n\n\n if bad_blocks[0] < blocks.len() {\n\n // repair using p\n\n let (before, after) = blocks.split_at_mut(bad_blocks[0]);\n\n let (d, after) = after.split_first_mut().unwrap();\n\n let d = d.as_mut();\n\n\n\n for i in 0..len {\n\n d[i] = p[i];\n", "file_path": "examples/raid.rs", "rank": 53, "score": 146211.72512916522 }, { "content": "/// Correct up to [`ECC_SIZE`] erasures at known locations.\n\n///\n\n/// Returns the number of erasures, or [`Error::TooManyErrors`] if the codeword\n\n/// can not be corrected.\n\n///\n\n/// ``` rust\n\n/// # use gf256::rs::rs255w223;\n\n/// let mut codeword = b\"xxxxxxxxxxxx\\\n\n/// xxxxxxxxxxxxxxxx\\\n\n/// xxxx\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\".to_vec();\n\n///\n\n/// let erasures = (0..32).collect::<Vec<_>>();\n\n/// assert_eq!(rs255w223::correct_erasures(&mut codeword, &erasures), Ok(32));\n\n/// assert_eq!(&codeword, b\"Hello World!\\\n\n/// \\x85\\xa6\\xad\\xf8\\xbd\\x15\\x94\\x6e\\x5f\\xb6\\x07\\x12\\x4b\\xbd\\x11\\xd3\\\n\n/// \\x34\\x14\\xa7\\x06\\xd6\\x25\\xfd\\x84\\xc2\\x61\\x81\\xa7\\x8a\\x15\\xc9\\x35\");\n\n/// ```\n\n///\n\npub fn correct_erasures(\n\n codeword: &mut [__u],\n\n erasures: &[usize]\n\n) -> Result<usize, Error> {\n\n let codeword = unsafe { __gf::slice_from_slice_mut_unchecked(codeword) };\n\n\n\n // too many erasures?\n\n if erasures.len() > ECC_SIZE {\n\n return Err(Error::TooManyErrors);\n\n }\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = find_syndromes(codeword);\n\n if S.iter().all(|s| *s == __gf::new(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find erasure locator polynomial\n\n let Λ = find_erasure_locator(codeword, &erasures);\n\n\n", "file_path": "templates/rs.rs", "rank": 54, "score": 143956.11377482256 }, { "content": "/// Find f(0) using Lagrange interpolation\n\nfn poly_interpolate(xs: &[__gf], ys: &[__gf]) -> __gf {\n\n assert!(xs.len() == ys.len());\n\n\n\n let mut y = __gf::new(0);\n\n for (i, (x0, y0)) in xs.iter().zip(ys).enumerate() {\n\n let mut li = __gf::new(1);\n\n for (j, (x1, _y1)) in xs.iter().zip(ys).enumerate() {\n\n if i != j {\n\n li *= x1 / (x1-x0);\n\n }\n\n }\n\n\n\n y += li*y0;\n\n }\n\n\n\n y\n\n}\n\n\n", "file_path": "templates/shamir.rs", "rank": 55, "score": 143758.0910976886 }, { "content": "/// Add a block to a RAID array.\n\n///\n\n/// Note the block index must be unique in the array, otherwise the array will\n\n/// become corrupted. This does not update other block indices.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::raid::*;\n\n/// let mut data = b\"xxxxo World!\".to_vec();\n\n/// let mut datas = data.chunks_mut(4).collect::<Vec<_>>();\n\n/// let mut parity1 = b\"\\x1d\\x4c\\x33\\x4e\".to_vec();\n\n/// let mut parity2 = b\"\\x0b\\xed\\x23\\x5a\".to_vec();\n\n/// let mut parity3 = b\"\\xd2\\x0e\\x4f\\x8b\".to_vec();\n\n///\n\n/// // add\n\n/// let new_data = b\"Jell\";\n\n/// raid7::add(0, new_data, &mut parity1, &mut parity2, &mut parity3);\n\n/// datas[0].copy_from_slice(new_data);\n\n///\n\n/// assert_eq!(&datas[0], b\"Jell\");\n\n/// assert_eq!(&datas[1], b\"o Wo\");\n\n/// assert_eq!(&datas[2], b\"rld!\");\n\n/// assert_eq!(&parity1, b\"\\x57\\x29\\x5f\\x22\");\n\n/// assert_eq!(&parity2, b\"\\x41\\x88\\x4f\\x36\");\n\n/// assert_eq!(&parity3, b\"\\x98\\x6b\\x23\\xe7\");\n\n/// ```\n\n///\n\npub fn add(\n\n j: usize,\n\n new: &[__u],\n\n #[cfg(__if(__parity >= 1))] p: &mut [__u],\n\n #[cfg(__if(__parity >= 2))] q: &mut [__u],\n\n #[cfg(__if(__parity >= 3))] r: &mut [__u],\n\n) {\n\n let len = new.len();\n\n #[cfg(__if(__parity >= 1))] let p = unsafe { __gf::slice_from_slice_mut_unchecked(p) };\n\n #[cfg(__if(__parity >= 2))] let q = unsafe { __gf::slice_from_slice_mut_unchecked(q) };\n\n #[cfg(__if(__parity >= 3))] let r = unsafe { __gf::slice_from_slice_mut_unchecked(r) };\n\n\n\n #[cfg(__if(__parity >= 2))] let g = __gf::GENERATOR.pow(__u::try_from(j).unwrap());\n\n #[cfg(__if(__parity >= 3))] let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n #[cfg(__if(__parity >= 1))] { p[i] += __gf::from_lossy(new[i]); }\n\n #[cfg(__if(__parity >= 2))] { q[i] += __gf::from_lossy(new[i]) * g; }\n\n #[cfg(__if(__parity >= 3))] { r[i] += __gf::from_lossy(new[i]) * h; }\n\n }\n\n}\n\n\n", "file_path": "templates/raid.rs", "rank": 56, "score": 143109.03259088888 }, { "content": "/// Remove a block from a RAID array.\n\n///\n\n/// Note the block index must already exist in the array, otherwise the\n\n/// array will become corrupted. This does not update other block indices.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::raid::*;\n\n/// let mut data = b\"Hello World!\".to_vec();\n\n/// let mut datas = data.chunks_mut(4).collect::<Vec<_>>();\n\n/// let mut parity1 = b\"\\x55\\x29\\x5f\\x22\".to_vec();\n\n/// let mut parity2 = b\"\\x43\\x88\\x4f\\x36\".to_vec();\n\n/// let mut parity3 = b\"\\x9a\\x6b\\x23\\xe7\".to_vec();\n\n///\n\n/// // remove \n\n/// raid7::remove(0, datas[0], &mut parity1, &mut parity2, &mut parity3);\n\n///\n\n/// assert_eq!(&datas[1], b\"o Wo\");\n\n/// assert_eq!(&datas[2], b\"rld!\");\n\n/// assert_eq!(&parity1, b\"\\x1d\\x4c\\x33\\x4e\");\n\n/// assert_eq!(&parity2, b\"\\x0b\\xed\\x23\\x5a\");\n\n/// assert_eq!(&parity3, b\"\\xd2\\x0e\\x4f\\x8b\");\n\n/// ```\n\n///\n\npub fn remove(\n\n j: usize,\n\n old: &[__u],\n\n #[cfg(__if(__parity >= 1))] p: &mut [__u],\n\n #[cfg(__if(__parity >= 2))] q: &mut [__u],\n\n #[cfg(__if(__parity >= 3))] r: &mut [__u],\n\n) {\n\n let len = old.len();\n\n #[cfg(__if(__parity >= 1))] let p = unsafe { __gf::slice_from_slice_mut_unchecked(p) };\n\n #[cfg(__if(__parity >= 2))] let q = unsafe { __gf::slice_from_slice_mut_unchecked(q) };\n\n #[cfg(__if(__parity >= 3))] let r = unsafe { __gf::slice_from_slice_mut_unchecked(r) };\n\n\n\n #[cfg(__if(__parity >= 2))] let g = __gf::GENERATOR.pow(__u::try_from(j).unwrap());\n\n #[cfg(__if(__parity >= 3))] let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n #[cfg(__if(__parity >= 1))] { p[i] -= __gf::from_lossy(old[i]); }\n\n #[cfg(__if(__parity >= 2))] { q[i] -= __gf::from_lossy(old[i]) * g; }\n\n #[cfg(__if(__parity >= 3))] { r[i] -= __gf::from_lossy(old[i]) * h; }\n\n }\n\n}\n\n\n", "file_path": "templates/raid.rs", "rank": 57, "score": 143108.99469169753 }, { "content": "/// Update a block in a RAID array.\n\n///\n\n/// ``` rust\n\n/// # use ::gf256::raid::*;\n\n/// let mut data = b\"Hello World!\".to_vec();\n\n/// let mut datas = data.chunks_mut(4).collect::<Vec<_>>();\n\n/// let mut parity1 = b\"\\x55\\x29\\x5f\\x22\".to_vec();\n\n/// let mut parity2 = b\"\\x43\\x88\\x4f\\x36\".to_vec();\n\n/// let mut parity3 = b\"\\x9a\\x6b\\x23\\xe7\".to_vec();\n\n///\n\n/// // update\n\n/// let new_data = b\"Jell\";\n\n/// raid7::update(0, datas[0], new_data, &mut parity1, &mut parity2, &mut parity3);\n\n/// datas[0].copy_from_slice(new_data);\n\n///\n\n/// assert_eq!(&datas[0], b\"Jell\");\n\n/// assert_eq!(&datas[1], b\"o Wo\");\n\n/// assert_eq!(&datas[2], b\"rld!\");\n\n/// assert_eq!(&parity1, b\"\\x57\\x29\\x5f\\x22\");\n\n/// assert_eq!(&parity2, b\"\\x41\\x88\\x4f\\x36\");\n\n/// assert_eq!(&parity3, b\"\\x98\\x6b\\x23\\xe7\");\n\n/// ```\n\n///\n\npub fn update(\n\n j: usize,\n\n old: &[__u],\n\n new: &[__u],\n\n #[cfg(__if(__parity >= 1))] p: &mut [__u],\n\n #[cfg(__if(__parity >= 2))] q: &mut [__u],\n\n #[cfg(__if(__parity >= 3))] r: &mut [__u],\n\n) {\n\n let len = old.len();\n\n assert!(new.len() == old.len());\n\n #[cfg(__if(__parity >= 1))] let p = unsafe { __gf::slice_from_slice_mut_unchecked(p) };\n\n #[cfg(__if(__parity >= 2))] let q = unsafe { __gf::slice_from_slice_mut_unchecked(q) };\n\n #[cfg(__if(__parity >= 3))] let r = unsafe { __gf::slice_from_slice_mut_unchecked(r) };\n\n\n\n #[cfg(__if(__parity >= 2))] let g = __gf::GENERATOR.pow(__u::try_from(j).unwrap());\n\n #[cfg(__if(__parity >= 3))] let h = g*g;\n\n for i in 0..len {\n\n // calculate new parity\n\n #[cfg(__if(__parity >= 1))] { p[i] += (__gf::from_lossy(new[i])-__gf::from_lossy(old[i])); }\n\n #[cfg(__if(__parity >= 2))] { q[i] += (__gf::from_lossy(new[i])-__gf::from_lossy(old[i])) * g; }\n\n #[cfg(__if(__parity >= 3))] { r[i] += (__gf::from_lossy(new[i])-__gf::from_lossy(old[i])) * h; }\n\n }\n\n}\n\n\n", "file_path": "templates/raid.rs", "rank": 58, "score": 143108.33247791315 }, { "content": "#[crc(polynomial=0x107)]\n\npub fn crc8() {}\n\n\n\n// HD=3,4, up to 32751+16 bits\n", "file_path": "src/crc.rs", "rank": 59, "score": 143084.70878199913 }, { "content": "#[crc(polynomial=0x142f0e1eba9ea3693)]\n\npub fn crc64() {}\n\n\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::p::*;\n\n\n\n #[test]\n\n fn crc() {\n\n assert_eq!(crc8(b\"Hello World!\", 0), 0xb3);\n\n assert_eq!(crc16(b\"Hello World!\", 0), 0x0bbb);\n\n assert_eq!(crc32(b\"Hello World!\", 0), 0x1c291ca3);\n\n assert_eq!(crc32c(b\"Hello World!\", 0), 0xfe6cf1dc);\n\n assert_eq!(crc64(b\"Hello World!\", 0), 0x75045245c9ea6fe2);\n\n }\n\n\n\n // explicit modes\n\n #[crc(polynomial=0x107, naive)] fn crc8_naive() {}\n\n #[crc(polynomial=0x11021, naive)] fn crc16_naive() {}\n", "file_path": "src/crc.rs", "rank": 60, "score": 143084.70878199913 }, { "content": "pub fn main() {\n\n fn hex(xs: &[u8]) -> String {\n\n xs.iter()\n\n .map(|x| format!(\"{:02x}\", x))\n\n .collect()\n\n }\n\n\n\n fn ascii(xs: &[u8]) -> String {\n\n xs.iter()\n\n .map(|x| {\n\n if *x < b' ' || *x > b'~' {\n\n '.'\n\n } else {\n\n char::from(*x)\n\n }\n\n })\n\n .collect::<String>()\n\n }\n\n\n\n\n", "file_path": "examples/raid.rs", "rank": 61, "score": 143084.70878199913 }, { "content": "#[crc(polynomial=0x11021)]\n\npub fn crc16() {}\n\n\n\n// HD=3, up to 4294967263+32 bits\n\n// HD=4, up to 91607+32 bits\n\n// HD=5, up to 2974+32 bits\n\n// HD=6, up to 268+32 bits\n\n// HD=7, up to 171+32 bits\n\n// HD=8, up to 91+32 bits\n", "file_path": "src/crc.rs", "rank": 62, "score": 143084.70878199913 }, { "content": "#[crc(polynomial=0x11edc6f41)]\n\npub fn crc32c() {}\n\n\n\n// HD=3,4, up to 8589606850+64 bits\n\n// HD=5,6, up to 126701+64 bits\n\n// HD=7,7, up to ~33710+64 bits\n", "file_path": "src/crc.rs", "rank": 63, "score": 143084.70878199913 }, { "content": "pub fn p(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match PArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n let is_usize = match args.is_usize {\n\n Some(is_usize) => is_usize,\n\n None => {\n\n match args.u.as_ref().and_then(guess_is_usize) {\n\n Some(is_usize) => is_usize,\n", "file_path": "gf256-macros/src/p.rs", "rank": 64, "score": 143084.70878199913 }, { "content": "#[crc(polynomial=0x104c11db7)]\n\npub fn crc32() {}\n\n\n\n// HD=3,4, up to 2147483615+32 bits\n\n// HD=5,6, up to 5243+32 bits\n\n// HD=7,8, up to 177+32 bits\n", "file_path": "src/crc.rs", "rank": 65, "score": 143084.70878199913 }, { "content": "/// Correct up to ECC_SIZE erasures at known locations\n\n///\n\npub fn rs_correct_erasures(\n\n codeword: &mut [u8],\n\n erasures: &[usize]\n\n) -> Result<usize, RsError> {\n\n let codeword = gf256::slice_from_slice_mut(codeword);\n\n\n\n // too many erasures?\n\n if erasures.len() > ECC_SIZE {\n\n return Err(RsError::TooManyErrors);\n\n }\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let S = rs_find_syndromes(codeword);\n\n if S.iter().all(|s| *s == gf256(0)) {\n\n return Ok(0);\n\n }\n\n\n\n // find erasure locator polynomial\n\n let Λ = rs_find_erasure_locator(codeword, &erasures);\n\n\n", "file_path": "examples/rs.rs", "rank": 66, "score": 142686.1897043675 }, { "content": "/// Reconstruct a secret\n\npub fn shamir_reconstruct<S: AsRef<[u8]>>(shares: &[S]) -> Vec<u8> {\n\n // matching lengths?\n\n assert!(\n\n shares.windows(2).all(|ss| ss[0].as_ref().len() == ss[1].as_ref().len()),\n\n \"mismatched share length?\"\n\n );\n\n\n\n let mut secret = vec![];\n\n let len = shares.get(0).map(|s| s.as_ref().len()).unwrap_or(0);\n\n if len == 0 {\n\n return secret;\n\n }\n\n\n\n // x is prepended to each share\n\n let xs = shares.iter().map(|s| gf256(s.as_ref()[0])).collect::<Vec<_>>();\n\n for i in 1..len {\n\n let ys = shares.iter().map(|s| gf256(s.as_ref()[i])).collect::<Vec<_>>();\n\n secret.push(u8::from(shamir_poly_interpolate(&xs, &ys)));\n\n }\n\n\n\n secret\n\n}\n\n\n\n\n", "file_path": "examples/shamir.rs", "rank": 67, "score": 141202.57653613237 }, { "content": "/// Find roots of the error locator polynomial by brute force\n\n///\n\n/// This just means we evaluate Λ(x) for all x locations in our\n\n/// message, if they equal 0, aka are a root, then we found the\n\n/// error location in our message.\n\n///\n\nfn rs_find_error_locations(codeword: &[gf256], Λ: &[gf256]) -> Vec<usize> {\n\n let mut error_locations = vec![];\n\n for j in 0..codeword.len() {\n\n let Xj = gf256::GENERATOR.pow(u8::try_from(codeword.len()-1-j).unwrap());\n\n let zero = rs_poly_eval(&Λ, Xj.recip());\n\n if zero == gf256(0) {\n\n // found an error location!\n\n error_locations.push(j);\n\n }\n\n }\n\n\n\n error_locations\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 68, "score": 139758.9500993364 }, { "content": "#[proc_macro_attribute]\n\npub fn p(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n p::p(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 69, "score": 138330.33156170163 }, { "content": "fn bench_rs(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"rs\");\n\n\n\n // xorshift64 for deterministic random numbers\n\n fn xorshift64(seed: u64) -> impl Iterator<Item=u64> {\n\n let mut x = seed;\n\n iter::repeat_with(move || {\n\n x ^= x << 13;\n\n x ^= x >> 7;\n\n x ^= x << 17;\n\n x\n\n })\n\n }\n\n\n\n // size to bench\n\n const SIZE: usize = 1024*1024;\n\n group.throughput(Throughput::Bytes(SIZE as u64));\n\n\n\n // note we are using Reed-Solomon (20, 12) only because it's what is in our\n\n // example, this isn't necessarily the most efficient geometry, but we\n", "file_path": "benches/rs.rs", "rank": 70, "score": 137009.5189440117 }, { "content": "/// Find the error locator polynomial when we know the location of errors\n\n///\n\n/// ``` text\n\n///\n\n/// Λ(x) = ∏ (1 - Xk*x)\n\n/// k\n\n/// ```\n\n///\n\nfn rs_find_erasure_locator(codeword: &[gf256], erasures: &[usize]) -> Vec<gf256> {\n\n let mut Λ = vec![gf256(0); erasures.len()+1];\n\n let Λ_len = Λ.len();\n\n Λ[Λ_len-1] = gf256(1);\n\n\n\n for j in erasures {\n\n rs_poly_mul(&mut Λ, &[\n\n -gf256::GENERATOR.pow(u8::try_from(codeword.len()-1-j).unwrap()),\n\n gf256(1)\n\n ]);\n\n }\n\n\n\n Λ\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 71, "score": 136240.6320184845 }, { "content": "pub fn raid(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match RaidArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n // only up to 2 parity blocks are currently supported\n\n assert!(args.parity <= 3);\n\n\n\n // parse type\n\n let ty = parse_macro_input!(input as syn::ItemMod);\n", "file_path": "gf256-macros/src/raid.rs", "rank": 72, "score": 134055.83403616227 }, { "content": "#[cfg(feature=\"raid\")]\n\n#[proc_macro_attribute]\n\npub fn raid(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n raid::raid(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 73, "score": 134055.83403616227 }, { "content": "pub fn crc(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match CrcArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n let width = {\n\n // default to 1 less than the width of the given polynomial, this\n\n // is the only width that would really work\n\n let polynomial = args.polynomial.0;\n\n (128-usize::try_from(polynomial.leading_zeros()).unwrap()) - 1\n", "file_path": "gf256-macros/src/crc.rs", "rank": 74, "score": 134055.83403616227 }, { "content": "pub fn shamir(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match ShamirArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n // parse type\n\n let ty = parse_macro_input!(input as syn::ItemMod);\n\n let attrs = ty.attrs;\n\n let vis = ty.vis;\n\n let shamir = ty.ident;\n", "file_path": "gf256-macros/src/shamir.rs", "rank": 75, "score": 134055.83403616227 }, { "content": "#[cfg(feature=\"shamir\")]\n\n#[proc_macro_attribute]\n\npub fn shamir(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n shamir::shamir(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 76, "score": 134055.83403616227 }, { "content": "#[cfg(feature=\"crc\")]\n\n#[proc_macro_attribute]\n\npub fn crc(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n crc::crc(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 77, "score": 134055.83403616227 }, { "content": "pub fn lfsr(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n let __crate = crate_path();\n\n\n\n // parse args\n\n let raw_args = parse_macro_input!(args as AttributeArgsWrapper).0;\n\n let args = match LfsrArgs::from_list(&raw_args) {\n\n Ok(args) => args,\n\n Err(err) => {\n\n return err.write_errors().into();\n\n }\n\n };\n\n\n\n let width = {\n\n // default to 1 less than the width of the given polynomial, this\n\n // is the only width that would really work\n\n let polynomial = args.polynomial.0;\n\n (128-usize::try_from(polynomial.leading_zeros()).unwrap()) - 1\n", "file_path": "gf256-macros/src/lfsr.rs", "rank": 78, "score": 134055.83403616227 }, { "content": "#[cfg(feature=\"lfsr\")]\n\n#[proc_macro_attribute]\n\npub fn lfsr(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream\n\n) -> proc_macro::TokenStream {\n\n lfsr::lfsr(args, input)\n\n}\n\n\n", "file_path": "gf256-macros/src/lib.rs", "rank": 79, "score": 134055.83403616227 }, { "content": "/// Generate a random polynomial of a given degree, fixing f(0) = secret\n\nfn shamir_poly_random(secret: gf256, degree: usize) -> Vec<gf256> {\n\n let mut rng = rand::thread_rng();\n\n let mut f = vec![secret];\n\n for _ in 0..degree {\n\n f.push(gf256(rng.gen_range(1..=255)));\n\n }\n\n f\n\n}\n\n\n", "file_path": "examples/shamir.rs", "rank": 80, "score": 130830.29959834254 }, { "content": "/// Find all irreducible polynomials of a given bit-width\n\npub fn irreducibles(width: usize) -> impl Iterator<Item=p128> {\n\n // find irreducible polynomials via brute force\n\n ((1u128 << (width-1)) .. (1u128 << width))\n\n .map(p128)\n\n .filter(|p| is_irreducible(*p))\n\n}\n\n\n", "file_path": "examples/find-p.rs", "rank": 81, "score": 130660.95198641904 }, { "content": "fn bench_crc(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"crc\");\n\n\n\n // xorshift64 for deterministic random numbers\n\n fn xorshift64(seed: u64) -> impl Iterator<Item=u64> {\n\n let mut x = seed;\n\n iter::repeat_with(move || {\n\n x ^= x << 13;\n\n x ^= x >> 7;\n\n x ^= x << 17;\n\n x\n\n })\n\n }\n\n\n\n // size to bench\n\n const SIZE: usize = 1024*1024;\n\n group.throughput(Throughput::Bytes(SIZE as u64));\n\n\n\n // naive crc\n\n let mut xs = xorshift64(42).map(|x| x as u8);\n", "file_path": "benches/crc.rs", "rank": 82, "score": 128379.1632758065 }, { "content": "fn bench_xmul(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"xmul\");\n\n\n\n // xorshift64 for deterministic random numbers\n\n fn xorshift64(seed: u64) -> impl Iterator<Item=u64> {\n\n let mut x = seed;\n\n iter::repeat_with(move || {\n\n x ^= x << 13;\n\n x ^= x >> 7;\n\n x ^= x << 17;\n\n x\n\n })\n\n }\n\n\n\n // naive xmul\n\n let mut xs = xorshift64(42).map(p64);\n\n let mut ys = xorshift64(42*42).map(p64);\n\n group.bench_function(\"naive_xmul\", |b| b.iter_batched(\n\n || (xs.next().unwrap(), ys.next().unwrap()),\n\n |(x, y)| x.naive_wrapping_mul(y),\n", "file_path": "benches/xmul.rs", "rank": 83, "score": 128379.1632758065 }, { "content": "fn bench_find_p(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"find_p\");\n\n\n\n // find 9-bit irreducible polynomials\n\n let mut irreducibles = iter::repeat_with(|| find_p::irreducibles(9)).flatten();\n\n group.bench_function(\"find_irreducibles_9\", |b| b.iter(\n\n || irreducibles.next().unwrap(),\n\n ));\n\n\n\n // find 8-bit generators\n\n let polynomial = irreducibles.next().unwrap();\n\n let mut generators = iter::repeat_with(|| find_p::generators(polynomial)).flatten();\n\n group.bench_function(\"find_generators_8\", |b| b.iter(\n\n || generators.next().unwrap(),\n\n ));\n\n\n\n\n\n // find 17-bit irreducible polynomials\n\n let mut irreducibles = iter::repeat_with(|| find_p::irreducibles(17)).flatten();\n\n group.bench_function(\"find_irreducibles_17\", |b| b.iter(\n", "file_path": "benches/find-p.rs", "rank": 84, "score": 128379.1632758065 }, { "content": "fn bench_shamir(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"shamir\");\n\n\n\n // xorshift64 for deterministic random numbers\n\n fn xorshift64(seed: u64) -> impl Iterator<Item=u64> {\n\n let mut x = seed;\n\n iter::repeat_with(move || {\n\n x ^= x << 13;\n\n x ^= x >> 7;\n\n x ^= x << 17;\n\n x\n\n })\n\n }\n\n\n\n // size to bench\n\n const SIZE: usize = 1024;\n\n const N: usize = 5;\n\n group.throughput(Throughput::Bytes((N*SIZE) as u64));\n\n\n\n // benchmark the time it takes to generate shares\n", "file_path": "benches/shamir.rs", "rank": 85, "score": 128379.1632758065 }, { "content": "fn bench_lfsr(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"lfsr\");\n\n\n\n // size to bench\n\n const SIZE: usize = 1024*1024;\n\n group.throughput(Throughput::Bytes(SIZE as u64));\n\n let mut buffer = vec![0u64; SIZE/size_of::<u64>()];\n\n\n\n // xorshift timing\n\n let mut xorshift64 = Xorshift64::new(0x123456789abcdef0);\n\n group.bench_function(\"xorshift64\", |b| b.iter(\n\n || buffer.fill_with(|| xorshift64.next())\n\n ));\n\n\n\n // lfsr64 timings\n\n let mut lfs64_naive = lfsr::Lfsr64Naive::new(0x123456789abcdef0);\n\n group.bench_function(\"lfsr64_naive\", |b| b.iter(\n\n || buffer.fill_with(|| lfs64_naive.next(64))\n\n ));\n\n\n", "file_path": "benches/lfsr.rs", "rank": 86, "score": 128379.1632758065 }, { "content": "fn bench_raid(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"raid\");\n\n\n\n // xorshift64 for deterministic random numbers\n\n fn xorshift64(seed: u64) -> impl Iterator<Item=u64> {\n\n let mut x = seed;\n\n iter::repeat_with(move || {\n\n x ^= x << 13;\n\n x ^= x >> 7;\n\n x ^= x << 17;\n\n x\n\n })\n\n }\n\n\n\n // size to bench\n\n const SIZE: usize = 1024*1024;\n\n const COUNT: usize = 5;\n\n\n\n // format\n\n let mut xs = xorshift64(42).map(|x| x as u8);\n", "file_path": "benches/raid.rs", "rank": 87, "score": 128379.1632758065 }, { "content": "/// Find syndromes, which should be zero if there are no errors\n\n///\n\n/// ``` text\n\n/// Si = c'(g^i)\n\n/// ```\n\n///\n\nfn rs_find_syndromes(f: &[gf256]) -> Vec<gf256> {\n\n let mut S = vec![];\n\n for i in 0..ECC_SIZE {\n\n S.push(\n\n rs_poly_eval(f, gf256::GENERATOR.pow(u8::try_from(i).unwrap()))\n\n );\n\n }\n\n S\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 88, "score": 127415.2407123723 }, { "content": "/// Determine if message is correct\n\n///\n\n/// Note this is quite a bit faster than correcting the errors\n\n///\n\npub fn rs_is_correct(codeword: &[u8]) -> bool {\n\n let codeword = gf256::slice_from_slice(codeword);\n\n\n\n // find syndromes, syndromes of all zero means there are no errors\n\n let syndromes = rs_find_syndromes(codeword);\n\n syndromes.iter().all(|s| *s == gf256(0))\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 89, "score": 125261.34460745865 }, { "content": "/// Divide polynomials via synthetic division\n\n///\n\n/// Note both the quotient and remainder are left in the dividend\n\n///\n\nfn rs_poly_divrem(f: &mut [gf256], g: &[gf256]) {\n\n debug_assert!(f.len() >= g.len());\n\n\n\n // find leading coeff to normalize g, note you could avoid\n\n // this if g is already normalized\n\n let leading_coeff = g[0];\n\n\n\n for i in 0 .. (f.len() - g.len() + 1) {\n\n if f[i] != gf256(0) {\n\n f[i] /= leading_coeff;\n\n\n\n for j in 1..g.len() {\n\n f[i+j] -= f[i] * g[j];\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 90, "score": 124097.57594906978 }, { "content": "/// Multiply a polynomial by a scalar\n\nfn rs_poly_scale(f: &mut [gf256], c: gf256) {\n\n for i in 0..f.len() {\n\n f[i] *= c;\n\n }\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 91, "score": 124097.57594906978 }, { "content": "/// Add two polynomials together\n\nfn rs_poly_add(f: &mut [gf256], g: &[gf256]) {\n\n debug_assert!(f.len() >= g.len());\n\n\n\n // note g.len() may be <= f.len()!\n\n for i in 0..f.len() {\n\n f[f.len()-1-i] += g[g.len()-1-i];\n\n }\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 92, "score": 124097.57594906978 }, { "content": "/// Multiply two polynomials together\n\nfn rs_poly_mul(f: &mut [gf256], g: &[gf256]) {\n\n debug_assert!(f[..g.len()-1].iter().all(|x| *x == gf256(0)));\n\n\n\n // This is in-place, at the cost of being a bit confusing,\n\n // note that we only write to i+j, and i+j is always >= i\n\n //\n\n // What makes this confusing is that f and g are both big-endian\n\n // polynomials, reverse order from what you would expect. And in\n\n // order to leverage the i+j non-overlap, we need to write to \n\n // f in reverse-reverse order.\n\n //\n\n for i in (0..f.len()-g.len()+1).rev() {\n\n let fi = f[f.len()-1-i];\n\n f[f.len()-1-i] = gf256(0);\n\n\n\n for j in 0..g.len() {\n\n f[f.len()-1-(i+j)] += fi * g[g.len()-1-j];\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/rs.rs", "rank": 93, "score": 124097.57594906978 }, { "content": "/// Is a given polynomial irreducible?\n\n///\n\n/// This is roughly equivalent to asking if a number is prime\n\n///\n\npub fn is_irreducible(p: p128) -> bool {\n\n // some corner cases\n\n if p == p128(0) || p == p128(1) {\n\n return false;\n\n }\n\n\n\n // check for 2 so we can skip all multiples of 2, seems like\n\n // a minor optimization but speeds things up by ~2x\n\n if p % p128(2) == p128(0) {\n\n return p == p128(2);\n\n }\n\n\n\n // test division of all polynomials < sqrt(p), or a simpler\n\n // heuristic of < 2^(log2(p)/2)\n\n let npw2 = 128 - (u128::from(p)-1).leading_zeros();\n\n let roughsqrt = 1u128 << ((npw2+1)/2);\n\n\n\n for x in (3..roughsqrt).step_by(2).map(p128) {\n\n if p % x == p128(0) {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "examples/find-p.rs", "rank": 94, "score": 123799.05534607114 }, { "content": "/// Iteratively find the error locator polynomial using the\n\n/// Berlekamp-Massey algorithm when we don't know the location of errors\n\n///\n\nfn rs_find_error_locator(S: &[gf256]) -> Vec<gf256> {\n\n // the current estimate for the error locator polynomial\n\n let mut Λ = vec![gf256(0); S.len()+1];\n\n let Λ_len = Λ.len();\n\n Λ[Λ_len-1] = gf256(1);\n\n\n\n let mut prev_Λ = Λ.clone();\n\n let mut delta_Λ = Λ.clone();\n\n\n\n // the current estimate for the number of errors\n\n let mut v = 0;\n\n\n\n for i in 0..S.len() {\n\n let mut delta = S[i];\n\n for j in 1..v+1 {\n\n delta += Λ[Λ.len()-1-j] * S[i-j];\n\n }\n\n\n\n prev_Λ.rotate_left(1);\n\n\n", "file_path": "examples/rs.rs", "rank": 96, "score": 121351.5003630307 }, { "content": "#[derive(Debug, FromMeta)]\n\nstruct PArgs {\n\n #[darling(default)]\n\n width: Option<usize>,\n\n #[darling(default, rename=\"usize\")]\n\n is_usize: Option<bool>,\n\n #[darling(default)]\n\n u: Option<syn::Path>,\n\n #[darling(default)]\n\n i: Option<syn::Path>,\n\n\n\n #[darling(default)]\n\n naive: bool,\n\n #[darling(default)]\n\n xmul: Option<darling::util::Override<syn::Path>>,\n\n}\n\n\n", "file_path": "gf256-macros/src/p.rs", "rank": 97, "score": 118624.4737289094 }, { "content": "fn bench_lfsr_compressability(c: &mut Criterion<Compressability>) {\n\n let mut group = c.benchmark_group(\"lfsr\");\n\n\n\n // size to bench\n\n const SIZE: usize = 1024*1024;\n\n let mut buffer = vec![0; SIZE];\n\n\n\n // xorshift compressability\n\n let mut xorshift64 = Xorshift64::new(0x123456789abcdef0);\n\n group.bench_function(\"xorshift64_compressability\", |b| b.iter_custom(\n\n |iters| {\n\n let mut sum = 0.0;\n\n for _ in 0..iters { \n\n buffer.fill_with(|| xorshift64.next() as u8);\n\n let mut comp = DeflateEncoder::new(Vec::new(), Compression::best());\n\n comp.write_all(&buffer).unwrap();\n\n let comp = comp.finish().unwrap();\n\n sum += ((SIZE as f64) - (comp.len() as f64)) / (SIZE as f64);\n\n }\n\n sum\n", "file_path": "benches/lfsr.rs", "rank": 98, "score": 117945.00232332372 }, { "content": "/// Is a given polynomial a primitive element, aka generator, of the\n\n/// finite-field defined by modulo the given irreducible polynomial?\n\n///\n\n/// That's a mouthful, the question being asked here is do successive\n\n/// powers of the generator iterate over every non-zero element in the\n\n/// finite-field defined by the given irreducible polynomial\n\n///\n\npub fn is_generator(g: p128, p: p128) -> bool {\n\n if g == p128(0) {\n\n return false;\n\n }\n\n\n\n // Define a few operations over the finite field defined by the irreducible\n\n // polynomial p. Normally we could use our gf-types, except this tool\n\n // is being used to generate the polynomials for the gf-types, so...\n\n //\n\n let width = (128-p.leading_zeros()) - 1;\n\n\n\n // Multiplication uses carry-less multiplicatio modulo our irreducible\n\n // polynomial\n\n let gfmul = |a: p128, b: p128| -> p128 {\n\n (a * b) % p\n\n };\n\n\n\n // Exponentiation via squaring\n\n let gfpow = |mut a: p128, mut exp: u128| -> p128 {\n\n let mut x = p128(1);\n", "file_path": "examples/find-p.rs", "rank": 99, "score": 115030.46898072811 } ]
Rust
src/cli/collections.rs
RekhaDS/couchbase-shell
81bdfd4a8e62bdb477857d31f7704d07d1f35da6
use crate::cli::util::{cluster_identifiers_from, validate_is_not_cloud}; use crate::client::ManagementRequest; use crate::state::State; use async_trait::async_trait; use log::debug; use nu_engine::CommandArgs; use nu_errors::ShellError; use nu_protocol::{Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, Value}; use nu_source::Tag; use nu_stream::OutputStream; use serde_derive::Deserialize; use std::ops::Add; use std::sync::{Arc, Mutex}; use std::time::Duration; use tokio::time::Instant; pub struct Collections { state: Arc<Mutex<State>>, } impl Collections { pub fn new(state: Arc<Mutex<State>>) -> Self { Self { state } } } #[async_trait] impl nu_engine::WholeStreamCommand for Collections { fn name(&self) -> &str { "collections" } fn signature(&self) -> Signature { Signature::build("collections") .named( "bucket", SyntaxShape::String, "the name of the bucket", None, ) .named("scope", SyntaxShape::String, "the name of the scope", None) .named( "clusters", SyntaxShape::String, "the clusters to query against", None, ) } fn usage(&self) -> &str { "Fetches collections through the HTTP API" } fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> { collections_get(self.state.clone(), args) } } fn collections_get( state: Arc<Mutex<State>>, args: CommandArgs, ) -> Result<OutputStream, ShellError> { let ctrl_c = args.ctrl_c(); let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?; let guard = state.lock().unwrap(); let scope: Option<String> = args.get_flag("scope")?; let mut results: Vec<Value> = vec![]; for identifier in cluster_identifiers { let active_cluster = match guard.clusters().get(&identifier) { Some(c) => c, None => { return Err(ShellError::unexpected("Cluster not found")); } }; validate_is_not_cloud( active_cluster, "collections get cannot be run against cloud clusters", )?; let bucket = match args.get_flag("bucket")? { Some(v) => v, None => match active_cluster.active_bucket() { Some(s) => s, None => { return Err(ShellError::unexpected( "Could not auto-select a bucket - please use --bucket instead".to_string(), )); } }, }; debug!( "Running collections get for bucket {:?}, scope {:?}", &bucket, &scope ); let response = active_cluster.cluster().http_client().management_request( ManagementRequest::GetCollections { bucket }, Instant::now().add(active_cluster.timeouts().management_timeout()), ctrl_c.clone(), )?; let manifest: Manifest = match response.status() { 200 => match serde_json::from_str(response.content()) { Ok(m) => m, Err(e) => { return Err(ShellError::unexpected(format!( "Failed to decode response body {}", e, ))); } }, _ => { return Err(ShellError::unexpected(response.content())); } }; for scope_res in manifest.scopes { if let Some(scope_name) = &scope { if scope_name != &scope_res.name { continue; } } let collections = scope_res.collections; if collections.is_empty() { let mut collected = TaggedDictBuilder::new(Tag::default()); collected.insert_value("scope", scope_res.name.clone()); collected.insert_value("collection", ""); collected.insert_value("max_expiry", UntaggedValue::duration(0)); collected.insert_value("cluster", identifier.clone()); results.push(collected.into_value()); continue; } for collection in collections { let mut collected = TaggedDictBuilder::new(Tag::default()); collected.insert_value("scope", scope_res.name.clone()); collected.insert_value("collection", collection.name); collected.insert_value( "max_expiry", UntaggedValue::duration(Duration::from_secs(collection.max_expiry).as_nanos()), ); collected.insert_value("cluster", identifier.clone()); results.push(collected.into_value()); } } } Ok(OutputStream::from(results)) } #[derive(Debug, Deserialize)] pub struct ManifestCollection { pub uid: String, pub name: String, #[serde(rename = "maxTTL")] pub max_expiry: u64, } #[derive(Debug, Deserialize)] pub struct ManifestScope { pub uid: String, pub name: String, pub collections: Vec<ManifestCollection>, } #[derive(Debug, Deserialize)] pub struct Manifest { pub uid: String, pub scopes: Vec<ManifestScope>, }
use crate::cli::util::{cluster_identifiers_from, validate_is_not_cloud}; use crate::client::ManagementRequest; use crate::state::State; use async_trait::async_trait; use log::debug; use nu_engine::CommandArgs; use nu_errors::ShellError; use nu_protocol::{Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, Value}; use nu_source::Tag; use nu_stream::OutputStream; use serde_derive::Deserialize; use std::ops::Add; use std::sync::{Arc, Mutex}; use std::time::Duration; use tokio::time::Instant; pub struct Collections { state: Arc<Mutex<State>>, } impl Collections { pub fn new(state: Arc<Mutex<State>>) -> Self { Self { state } } } #[async_trait] impl nu_engine::WholeStreamCommand for Collections { fn name(&self) -> &str { "collections" } fn signature(&self) -> Signature { Signature::build("collections") .named( "bucket", SyntaxShape::String, "the name of the bucket", None, ) .named("scope", Synta
fn usage(&self) -> &str { "Fetches collections through the HTTP API" } fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> { collections_get(self.state.clone(), args) } } fn collections_get( state: Arc<Mutex<State>>, args: CommandArgs, ) -> Result<OutputStream, ShellError> { let ctrl_c = args.ctrl_c(); let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?; let guard = state.lock().unwrap(); let scope: Option<String> = args.get_flag("scope")?; let mut results: Vec<Value> = vec![]; for identifier in cluster_identifiers { let active_cluster = match guard.clusters().get(&identifier) { Some(c) => c, None => { return Err(ShellError::unexpected("Cluster not found")); } }; validate_is_not_cloud( active_cluster, "collections get cannot be run against cloud clusters", )?; let bucket = match args.get_flag("bucket")? { Some(v) => v, None => match active_cluster.active_bucket() { Some(s) => s, None => { return Err(ShellError::unexpected( "Could not auto-select a bucket - please use --bucket instead".to_string(), )); } }, }; debug!( "Running collections get for bucket {:?}, scope {:?}", &bucket, &scope ); let response = active_cluster.cluster().http_client().management_request( ManagementRequest::GetCollections { bucket }, Instant::now().add(active_cluster.timeouts().management_timeout()), ctrl_c.clone(), )?; let manifest: Manifest = match response.status() { 200 => match serde_json::from_str(response.content()) { Ok(m) => m, Err(e) => { return Err(ShellError::unexpected(format!( "Failed to decode response body {}", e, ))); } }, _ => { return Err(ShellError::unexpected(response.content())); } }; for scope_res in manifest.scopes { if let Some(scope_name) = &scope { if scope_name != &scope_res.name { continue; } } let collections = scope_res.collections; if collections.is_empty() { let mut collected = TaggedDictBuilder::new(Tag::default()); collected.insert_value("scope", scope_res.name.clone()); collected.insert_value("collection", ""); collected.insert_value("max_expiry", UntaggedValue::duration(0)); collected.insert_value("cluster", identifier.clone()); results.push(collected.into_value()); continue; } for collection in collections { let mut collected = TaggedDictBuilder::new(Tag::default()); collected.insert_value("scope", scope_res.name.clone()); collected.insert_value("collection", collection.name); collected.insert_value( "max_expiry", UntaggedValue::duration(Duration::from_secs(collection.max_expiry).as_nanos()), ); collected.insert_value("cluster", identifier.clone()); results.push(collected.into_value()); } } } Ok(OutputStream::from(results)) } #[derive(Debug, Deserialize)] pub struct ManifestCollection { pub uid: String, pub name: String, #[serde(rename = "maxTTL")] pub max_expiry: u64, } #[derive(Debug, Deserialize)] pub struct ManifestScope { pub uid: String, pub name: String, pub collections: Vec<ManifestCollection>, } #[derive(Debug, Deserialize)] pub struct Manifest { pub uid: String, pub scopes: Vec<ManifestScope>, }
xShape::String, "the name of the scope", None) .named( "clusters", SyntaxShape::String, "the clusters to query against", None, ) }
function_block-function_prefixed
[ { "content": "pub fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Value {\n\n let tag = tag.into();\n\n let mut sig = TaggedListBuilder::new(&tag);\n\n\n\n for arg in signature.positional.iter() {\n\n let is_required = matches!(arg.0, PositionalType::Mandatory(_, _));\n\n\n\n sig.push_value(for_spec(arg.0.name(), \"argument\", is_required, &tag));\n\n }\n\n\n\n if signature.rest_positional.is_some() {\n\n let is_required = false;\n\n sig.push_value(for_spec(\"rest\", \"argument\", is_required, &tag));\n\n }\n\n\n\n for (name, ty) in signature.named.iter() {\n\n match ty.0 {\n\n NamedType::Mandatory(_, _) => sig.push_value(for_spec(name, \"flag\", true, &tag)),\n\n NamedType::Optional(_, _) => sig.push_value(for_spec(name, \"flag\", false, &tag)),\n\n NamedType::Switch(_) => sig.push_value(for_spec(name, \"switch\", false, &tag)),\n\n }\n\n }\n\n\n\n sig.into_value()\n\n}\n\n\n", "file_path": "src/cli/help.rs", "rank": 0, "score": 227482.08953787154 }, { "content": "fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Value {\n\n let tag = tag.into();\n\n\n\n let mut spec = TaggedDictBuilder::new(tag);\n\n\n\n spec.insert_untagged(\"name\", UntaggedValue::string(name));\n\n spec.insert_untagged(\"type\", UntaggedValue::string(ty));\n\n spec.insert_untagged(\n\n \"required\",\n\n UntaggedValue::string(if required { \"yes\" } else { \"no\" }),\n\n );\n\n\n\n spec.into_value()\n\n}\n\n\n", "file_path": "src/cli/help.rs", "rank": 1, "score": 220809.01501452085 }, { "content": "pub fn in_directory(str: impl AsRef<Path>) -> String {\n\n let path = str.as_ref();\n\n let path = if path.is_relative() {\n\n root().join(path)\n\n } else {\n\n path.to_path_buf()\n\n };\n\n\n\n path.display().to_string()\n\n}\n", "file_path": "tests/common/fs.rs", "rank": 2, "score": 199879.60853912742 }, { "content": "fn buckets(args: CommandArgs, state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let bucket_name = args.req(0)?;\n\n\n\n let guard = state.lock().unwrap();\n\n let active_cluster = guard.active_cluster();\n\n let cluster = active_cluster.cluster();\n\n\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"buckets config cannot be run against cloud clusters\",\n\n )?;\n\n\n\n let response = cluster.http_client().management_request(\n\n ManagementRequest::GetBucket { name: bucket_name },\n\n Instant::now().add(active_cluster.timeouts().management_timeout()),\n\n ctrl_c,\n\n )?;\n\n\n\n let content = serde_json::from_str(response.content())?;\n\n let converted = convert_json_value_to_nu_value(&content, Tag::default())?;\n\n\n\n Ok(vec![converted].into())\n\n}\n", "file_path": "src/cli/buckets_config.rs", "rank": 3, "score": 185726.9300143987 }, { "content": "fn use_cmd(args: CommandArgs, state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let show_cloud = args.has_flag(\"cloud\");\n\n\n\n let guard = state.lock().unwrap();\n\n let active = guard.active_cluster();\n\n let project = match guard.active_cloud() {\n\n Ok(c) => c.active_project().unwrap_or_else(|| \"\".to_string()),\n\n Err(_e) => \"\".to_string(),\n\n };\n\n let mut using_now = TaggedDictBuilder::new(Tag::default());\n\n if show_cloud {\n\n using_now.insert_value(\n\n \"cloud_organization\",\n\n guard\n\n .active_cloud_org_name()\n\n .unwrap_or_else(|| String::from(\"\")),\n\n );\n\n using_now.insert_value(\n\n \"cloud\",\n\n guard\n", "file_path": "src/cli/use_cmd.rs", "rank": 4, "score": 182990.71389938876 }, { "content": "fn buckets_flush(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let name: String = args.req(0)?;\n\n let bucket: String = args.get_flag(\"bucket\")?.unwrap_or_else(|| \"\".into());\n\n\n\n debug!(\"Running buckets flush for bucket {:?}\", &bucket);\n\n\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n cluster,\n\n \"buckets flush cannot be run against cloud clusters\",\n", "file_path": "src/cli/buckets_flush.rs", "rank": 5, "score": 182750.4935539816 }, { "content": "fn buckets_drop(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let name: String = args.req(0)?;\n\n let guard = state.lock().unwrap();\n\n\n\n debug!(\"Running buckets drop for bucket {:?}\", &name);\n\n\n\n for identifier in cluster_identifiers {\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n let result: HttpResponse;\n\n if let Some(plane) = cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n", "file_path": "src/cli/buckets_drop.rs", "rank": 6, "score": 182750.4935539816 }, { "content": "fn buckets_get(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let bucket: String = args.req(0)?;\n\n\n\n debug!(\"Running buckets get for bucket {:?}\", &bucket);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n if let Some(plane) = cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n", "file_path": "src/cli/buckets_get.rs", "rank": 7, "score": 182750.4935539816 }, { "content": "fn buckets_update(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let name: String = args.req(0)?;\n\n let ram = args.get_flag(\"ram\")?;\n\n let replicas = args.get_flag(\"replicas\")?;\n\n let flush = args.get_flag(\"flush\")?.unwrap_or(false);\n\n let durability = args.get_flag(\"durability\")?;\n\n let expiry = args.get_flag(\"expiry\")?;\n\n\n\n debug!(\"Running buckets update for bucket {}\", &name);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n", "file_path": "src/cli/buckets_update.rs", "rank": 8, "score": 182750.4935539816 }, { "content": "fn buckets_create(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let name: String = args.req(0)?;\n\n let ram = args.req(1)?;\n\n\n\n let bucket_type: Option<String> = args.get_flag(\"type\")?;\n\n let replicas: Option<i32> = args.get_flag(\"replicas\")?;\n\n let flush = args.get_flag(\"flush\")?.unwrap_or(false);\n\n let durability: Option<String> = args.get_flag(\"durability\")?;\n\n let expiry = args.get_flag(\"expiry\")?;\n\n\n\n debug!(\"Running buckets create for bucket {}\", &name);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let mut builder = BucketSettingsBuilder::new(name).ram_quota_mb(ram);\n\n if let Some(ref t) = bucket_type {\n\n builder = builder.bucket_type(match BucketType::try_from(t.as_str()) {\n\n Ok(bt) => bt,\n", "file_path": "src/cli/buckets_create.rs", "rank": 9, "score": 182750.4935539816 }, { "content": "pub fn convert_json_value_to_nu_value(\n\n v: &serde_json::Value,\n\n tag: impl Into<Tag>,\n\n) -> Result<Value, ShellError> {\n\n let tag = tag.into();\n\n let span = tag.span;\n\n\n\n let result = match v {\n\n serde_json::Value::Null => UntaggedValue::Primitive(Primitive::Nothing).into_value(&tag),\n\n serde_json::Value::Bool(b) => UntaggedValue::boolean(*b).into_value(&tag),\n\n serde_json::Value::Number(n) => {\n\n if n.is_i64() {\n\n if let Some(nas) = n.as_i64() {\n\n UntaggedValue::int(nas).into_value(&tag)\n\n } else {\n\n return Err(ShellError::unexpected(format!(\n\n \"Could not get value as number {}\",\n\n v\n\n )));\n\n }\n", "file_path": "src/cli/util.rs", "rank": 10, "score": 176149.14656498074 }, { "content": "fn dataverses(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement = \"SELECT `Bucket`.* FROM `Metadata`.`Bucket`\";\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n let response = active_cluster\n\n .cluster()\n", "file_path": "src/cli/analytics_buckets.rs", "rank": 11, "score": 172851.51301340124 }, { "content": "pub fn convert_row_to_nu_value(\n\n v: &serde_json::Value,\n\n tag: impl Into<Tag>,\n\n cluster_identifier: String,\n\n) -> Result<Value, ShellError> {\n\n let tag = tag.into();\n\n\n\n match v {\n\n serde_json::Value::Object(o) => {\n\n let mut collected = TaggedDictBuilder::new(&tag);\n\n for (k, v) in o.iter() {\n\n collected.insert_value(k.clone(), convert_json_value_to_nu_value(v, &tag)?);\n\n }\n\n collected.insert_value(\"cluster\", cluster_identifier);\n\n\n\n Ok(collected.into_value())\n\n }\n\n _ => Err(ShellError::unexpected(\n\n \"row not an object - malformed response\",\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/cli/util.rs", "rank": 12, "score": 167289.11602454243 }, { "content": "#[allow(dead_code)]\n\npub fn default_collection() -> String {\n\n STATE.collection.clone()\n\n}\n\n\n", "file_path": "tests/common/playground.rs", "rank": 13, "score": 163591.0248002035 }, { "content": "#[allow(dead_code)]\n\npub fn default_bucket() -> String {\n\n STATE.bucket.clone()\n\n}\n\n\n", "file_path": "tests/common/playground.rs", "rank": 14, "score": 163388.44154501794 }, { "content": "pub fn update_config_file(guard: &mut MutexGuard<State>) -> Result<(), ShellError> {\n\n let path = match guard.config_path() {\n\n Some(p) => p,\n\n None => {\n\n return Err(ShellError::unexpected(\n\n \"A config path must be discoverable to save config\",\n\n ));\n\n }\n\n };\n\n let mut cluster_configs = Vec::new();\n\n for (identifier, cluster) in guard.clusters() {\n\n cluster_configs.push(ClusterConfig::from((identifier.clone(), cluster)))\n\n }\n\n let mut cloud_configs = Vec::new();\n\n for (identifier, cloud) in guard.clouds() {\n\n cloud_configs.push(CloudConfig::new(identifier.clone(), cloud.active_project()))\n\n }\n\n let mut control_plane_configs = Vec::new();\n\n for (identifier, c) in guard.cloud_orgs() {\n\n control_plane_configs.push(CloudOrganizationConfig::new(\n", "file_path": "src/cli/clusters_register.rs", "rank": 15, "score": 160971.57713569858 }, { "content": "fn grab_bucket_names(\n\n cluster: &RemoteCluster,\n\n ctrl_c: Arc<AtomicBool>,\n\n) -> Result<Vec<String>, ShellError> {\n\n let response = cluster.cluster().http_client().management_request(\n\n ManagementRequest::GetBuckets,\n\n Instant::now().add(cluster.timeouts().management_timeout()),\n\n ctrl_c,\n\n )?;\n\n let resp: Vec<BucketInfo> = serde_json::from_str(response.content())?;\n\n Ok(resp.into_iter().map(|b| b.name).collect::<Vec<_>>())\n\n}\n\n\n", "file_path": "src/cli/clusters_health.rs", "rank": 16, "score": 152094.37644235484 }, { "content": "fn run_tutorial_prev(state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let guard = state.lock().unwrap();\n\n let tutorial = guard.tutorial();\n\n Ok(OutputStream::one(\n\n UntaggedValue::string(tutorial.prev_tutorial_step()).into_value(Tag::unknown()),\n\n ))\n\n}\n", "file_path": "src/cli/tutorial_prev.rs", "rank": 17, "score": 150428.1059249172 }, { "content": "fn run_tutorial_next(state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let guard = state.lock().unwrap();\n\n let tutorial = guard.tutorial();\n\n Ok(OutputStream::one(\n\n UntaggedValue::string(tutorial.next_tutorial_step()).into_value(Tag::unknown()),\n\n ))\n\n}\n", "file_path": "src/cli/tutorial_next.rs", "rank": 18, "score": 150428.1059249172 }, { "content": "struct State {\n\n hostnames: Vec<String>,\n\n bucket: String,\n\n scope: String,\n\n collection: String,\n\n username: String,\n\n password: String,\n\n}\n\n\n\npub struct CBPlayground {}\n\n\n\nimpl CBPlayground {\n\n pub fn setup(topic: &str, block: impl FnOnce(Dirs, &mut CBPlayground)) {\n\n Playground::setup(topic, |dirs, _sandbox| {\n\n let mut playground = CBPlayground {};\n\n let mut config_dir = dirs.test.join(\".cbsh\".to_string());\n\n\n\n if PathBuf::from(&config_dir).exists() {\n\n std::fs::remove_dir_all(PathBuf::from(&config_dir))\n\n .expect(\"can not remove cbsh directory\");\n", "file_path": "tests/common/playground.rs", "rank": 19, "score": 150204.10900381953 }, { "content": "fn run(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n let statement: String = args.req(0)?;\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let bucket = args\n\n .get_flag(\"bucket\")?\n\n .or_else(|| active_cluster.active_bucket());\n\n\n\n let scope = args.get_flag(\"scope\")?;\n", "file_path": "src/cli/query.rs", "rank": 20, "score": 144425.1264343241 }, { "content": "fn whoami(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let mut entries = vec![];\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(cluster, \"whoami cannot be run against cloud clusters\")?;\n\n\n\n let response = cluster.cluster().http_client().management_request(\n\n ManagementRequest::Whoami,\n\n Instant::now().add(cluster.timeouts().management_timeout()),\n\n ctrl_c.clone(),\n\n )?;\n\n let mut content: Map<String, Value> = serde_json::from_str(response.content())?;\n\n content.insert(\"cluster\".into(), json!(identifier.clone()));\n\n let converted = convert_json_value_to_nu_value(&Value::Object(content), Tag::default())?;\n\n entries.push(converted);\n\n }\n\n\n\n Ok(entries.into())\n\n}\n", "file_path": "src/cli/whoami.rs", "rank": 21, "score": 144425.12643432408 }, { "content": "fn run(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement: String = args.req(0)?;\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n\n\n let scope = args.get_flag(\"scope\")?;\n\n\n\n let with_meta = args.has_flag(\"with-meta\");\n\n\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n", "file_path": "src/cli/analytics.rs", "rank": 22, "score": 144425.1264343241 }, { "content": "fn addresses(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n debug!(\"Running addresses\");\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::untagged_runtime_error(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_cloud(\n\n active_cluster,\n\n \"addresses can only be used with clusters registered to a cloud control pane\",\n\n )?;\n\n\n", "file_path": "src/cli/addresses.rs", "rank": 23, "score": 144425.1264343241 }, { "content": "fn clouds(state: Arc<Mutex<State>>, _args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let guard = state.lock().unwrap();\n\n let active_cloud = guard.active_cloud_name().unwrap_or_else(|| \"\".to_string());\n\n let mut results = vec![];\n\n for cloud in guard.clouds() {\n\n let mut collected = TaggedDictBuilder::new(Tag::default());\n\n collected.insert_untagged(\n\n \"active\",\n\n UntaggedValue::boolean(cloud.0.clone() == active_cloud.clone()),\n\n );\n\n collected.insert_value(\"identifier\", cloud.0.clone());\n\n results.push(collected.into_value())\n\n }\n\n\n\n Ok(OutputStream::from(results))\n\n}\n", "file_path": "src/cli/clouds.rs", "rank": 24, "score": 144425.1264343241 }, { "content": "fn nodes(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n let mut nodes = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n if let Some(plane) = active_cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n\n let deadline = Instant::now().add(active_cluster.timeouts().management_timeout());\n\n let cluster_id =\n\n cloud.find_cluster_id(identifier.clone(), deadline.clone(), ctrl_c.clone())?;\n\n let response = cloud.cloud_request(\n", "file_path": "src/cli/nodes.rs", "rank": 25, "score": 144425.12643432408 }, { "content": "fn projects(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n debug!(\"Running projects\");\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n let response = client.cloud_request(\n\n CloudRequest::GetProjects {},\n\n Instant::now().add(control.timeout()),\n\n ctrl_c,\n\n )?;\n\n if response.status() != 200 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n\n\n let content: JSONCloudsProjectsResponse = serde_json::from_str(response.content())?;\n\n\n\n let mut results = vec![];\n\n for project in content.items() {\n\n let mut collected = TaggedDictBuilder::new(Tag::default());\n\n collected.insert_value(\"name\", project.name());\n\n collected.insert_value(\"id\", project.id());\n\n results.push(collected.into_value())\n\n }\n\n\n\n Ok(OutputStream::from(results))\n\n}\n", "file_path": "src/cli/projects.rs", "rank": 26, "score": 144425.1264343241 }, { "content": "fn clusters(args: CommandArgs, state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let identifiers = cluster_identifiers_from(&state, &args, false)?;\n\n\n\n let active = state.lock().unwrap().active();\n\n let clusters = state\n\n .lock()\n\n .unwrap()\n\n .clusters()\n\n .iter()\n\n .filter(|(k, _)| identifiers.contains(k))\n\n .map(|(k, v)| {\n\n let mut collected = TaggedDictBuilder::new(Tag::default());\n\n collected.insert_untagged(\"active\", UntaggedValue::boolean(k == &active));\n\n collected.insert_value(\"tls\", UntaggedValue::boolean(v.tls_config().enabled()));\n\n collected.insert_value(\"identifier\", k.clone());\n\n collected.insert_value(\"username\", String::from(v.username()));\n\n collected.insert_value(\n\n \"cloud_organization\",\n\n v.cloud_org().unwrap_or_else(|| \"\".to_string()),\n\n );\n\n collected.into_value()\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n Ok(clusters.into())\n\n}\n", "file_path": "src/cli/clusters.rs", "rank": 27, "score": 144425.1264343241 }, { "content": "fn run(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let index: String = args.req(0)?;\n\n let query: String = args.req(1)?;\n\n\n\n debug!(\"Running search query {} against {}\", &query, &index);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let response = active_cluster\n\n .cluster()\n", "file_path": "src/cli/search.rs", "rank": 28, "score": 144425.1264343241 }, { "content": "pub fn validate_is_not_cloud(cluster: &RemoteCluster, err_msg: &str) -> Result<(), ShellError> {\n\n if cluster.cloud_org().is_some() {\n\n return Err(ShellError::unexpected(err_msg));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn find_project_id(\n\n ctrl_c: Arc<AtomicBool>,\n\n name: String,\n\n client: &Arc<CloudClient>,\n\n deadline: Instant,\n\n) -> Result<String, ShellError> {\n\n let response = client.cloud_request(CloudRequest::GetProjects {}, deadline, ctrl_c)?;\n\n if response.status() != 200 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n let content: JSONCloudsProjectsResponse = serde_json::from_str(response.content())?;\n\n\n", "file_path": "src/cli/util.rs", "rank": 29, "score": 143584.09802194347 }, { "content": "pub fn execute_command(cwd: &PathBuf, command: &str) -> Outcome {\n\n let commands = &*format!(\n\n \"\n\n cd \\\"{}\\\"\n\n {}\n\n exit\",\n\n fs::in_directory(&cwd),\n\n fs::DisplayPath::display_path(&command)\n\n );\n\n\n\n let test_bins = fs::binaries();\n\n let test_bins = dunce::canonicalize(&test_bins).unwrap_or_else(|e| {\n\n panic!(\n\n \"Couldn't canonicalize dummy binaries path {}: {:?}\",\n\n test_bins.display(),\n\n e\n\n )\n\n });\n\n\n\n let mut paths = shell_os_paths();\n", "file_path": "tests/common/mod.rs", "rank": 30, "score": 143497.18680237507 }, { "content": "// Adapted from https://github.com/nushell/nushell/blob/main/crates/nu-command/src/commands/formats/to/json.rs\n\npub fn convert_nu_value_to_json_value(v: &Value) -> Result<serde_json::Value, ShellError> {\n\n Ok(match &v.value {\n\n UntaggedValue::Primitive(Primitive::Boolean(b)) => serde_json::Value::Bool(*b),\n\n UntaggedValue::Primitive(Primitive::Filesize(b)) => serde_json::Value::Number(\n\n serde_json::Number::from(b.to_u64().expect(\"What about really big numbers\")),\n\n ),\n\n UntaggedValue::Primitive(Primitive::Duration(i)) => {\n\n serde_json::Value::String(i.to_string())\n\n }\n\n UntaggedValue::Primitive(Primitive::Date(d)) => serde_json::Value::String(d.to_string()),\n\n UntaggedValue::Primitive(Primitive::EndOfStream) => serde_json::Value::Null,\n\n UntaggedValue::Primitive(Primitive::BeginningOfStream) => serde_json::Value::Null,\n\n UntaggedValue::Primitive(Primitive::Decimal(f)) => {\n\n if let Some(f) = f.to_f64() {\n\n if let Some(num) = serde_json::Number::from_f64(\n\n f.to_f64().expect(\"TODO: What about really big decimals?\"),\n\n ) {\n\n serde_json::Value::Number(num)\n\n } else {\n\n return Err(ShellError::labeled_error(\n", "file_path": "src/cli/util.rs", "rank": 31, "score": 142367.900846649 }, { "content": "fn dataverses(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement = \"SELECT `Link`.* FROM `Metadata`.`Link`\";\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let response = active_cluster\n\n .cluster()\n\n .http_client()\n\n .analytics_query_request(\n", "file_path": "src/cli/analytics_links.rs", "rank": 32, "score": 142095.5417837568 }, { "content": "fn indexes(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let with_meta = args.has_flag(\"with-meta\");\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let fetch_defs = args.has_flag(\"definitions\");\n\n\n\n let statement = \"select keyspace_id as `bucket`, name, state, `using` as `type`, ifmissing(condition, null) as condition, ifmissing(is_primary, false) as `primary`, index_key from system:indexes\";\n\n\n\n debug!(\"Running n1ql query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n", "file_path": "src/cli/query_indexes.rs", "rank": 33, "score": 142095.5417837568 }, { "content": "fn health(args: CommandArgs, state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let mut converted = vec![];\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n if let Some(plane) = cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?;\n\n let values =\n\n check_cloud_health(&identifier, cloud, cluster.timeouts(), ctrl_c.clone())?;\n\n for value in values {\n", "file_path": "src/cli/clusters_health.rs", "rank": 34, "score": 142095.5417837568 }, { "content": "fn dataverses(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement = \"SELECT d.* FROM Metadata.`Dataverse` d WHERE d.DataverseName <> \\\"Metadata\\\"\";\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n let response = active_cluster\n\n .cluster()\n\n .http_client()\n", "file_path": "src/cli/analytics_dataverses.rs", "rank": 35, "score": 142095.5417837568 }, { "content": "fn indexes(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement = \"SELECT d.* FROM Metadata.`Index` d WHERE d.DataverseName <> \\\"Metadata\\\"\";\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n let response = active_cluster\n\n .cluster()\n\n .http_client()\n", "file_path": "src/cli/analytics_indexes.rs", "rank": 36, "score": 142095.5417837568 }, { "content": "fn run_ping(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n\n\n debug!(\"Running ping\");\n\n\n\n let rt = Runtime::new().unwrap();\n\n let clusters_len = cluster_identifiers.len();\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => continue, // This can't actually happen, we filter the clusters in cluster_identifiers_from\n\n };\n\n let deadline = Instant::now().add(cluster.timeouts().management_timeout());\n\n\n\n let client = cluster.cluster().http_client();\n", "file_path": "src/cli/ping.rs", "rank": 37, "score": 142095.5417837568 }, { "content": "fn datasets(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement = \"SELECT d.* FROM Metadata.`Dataset` d WHERE d.DataverseName <> \\\"Metadata\\\"\";\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let response = active_cluster\n\n .cluster()\n\n .http_client()\n", "file_path": "src/cli/analytics_datasets.rs", "rank": 38, "score": 142095.5417837568 }, { "content": "fn users_get_all(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n debug!(\"Running users get all\");\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let mut stream: Vec<Value> = if let Some(plane) = active_cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n\n let deadline = Instant::now().add(active_cluster.timeouts().management_timeout());\n\n let cluster_id =\n\n cloud.find_cluster_id(identifier.clone(), deadline.clone(), ctrl_c.clone())?;\n", "file_path": "src/cli/users.rs", "rank": 39, "score": 142095.5417837568 }, { "content": "fn scopes_get(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"scopes get cannot be run against cloud clusters\",\n\n )?;\n\n\n\n let bucket = match args.get_flag(\"bucket\")? {\n", "file_path": "src/cli/scopes.rs", "rank": 40, "score": 142095.5417837568 }, { "content": "fn run(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let with_meta = args.has_flag(\"with-meta\");\n\n\n\n let statement: String = args.req(0)?;\n\n let statement = format!(\"ADVISE {}\", statement);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running n1ql query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let response = active_cluster.cluster().http_client().query_request(\n", "file_path": "src/cli/query_advise.rs", "rank": 41, "score": 142095.5417837568 }, { "content": "fn users_get(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let username: String = args.req(0)?;\n\n\n\n debug!(\"Running users get {}\", username);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n let mut stream: Vec<Value> = if let Some(plane) = active_cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n", "file_path": "src/cli/users_get.rs", "rank": 42, "score": 139867.1403586046 }, { "content": "fn users_drop(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let username: String = args.req(0)?;\n\n\n\n debug!(\"Running users drop {}\", username);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let response = if let Some(plane) = active_cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n\n let deadline = Instant::now().add(active_cluster.timeouts().management_timeout());\n\n let cluster_id =\n", "file_path": "src/cli/users_drop.rs", "rank": 43, "score": 139867.1403586046 }, { "content": "fn users_upsert(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let username: String = args.req(0)?;\n\n let roles: String = args.req(1)?;\n\n let password = args.get_flag(\"password\")?;\n\n let display_name = args.get_flag(\"display_name\")?;\n\n let groups = args.get_flag(\"groups\")?;\n\n\n\n debug!(\"Running users upsert for user {}\", &username);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n", "file_path": "src/cli/users_upsert.rs", "rank": 44, "score": 139867.1403586046 }, { "content": "fn addresses_drop(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let address: String = args.req(0)?;\n\n\n\n debug!(\"Running address drop for {}\", &address);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::untagged_runtime_error(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_cloud(\n\n active_cluster,\n\n \"addresses can only be used with clusters registered to a cloud control pane\",\n\n )?;\n", "file_path": "src/cli/addresses_drop.rs", "rank": 45, "score": 139867.1403586046 }, { "content": "fn scopes_drop(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let scope: String = args.req(0)?;\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"scopes create cannot be run against cloud clusters\",\n\n )?;\n\n\n\n let bucket = match args.get_flag(\"bucket\")? {\n", "file_path": "src/cli/scopes_drop.rs", "rank": 46, "score": 139867.1403586046 }, { "content": "fn run_get(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let id_column = args\n\n .get_flag(\"id-column\")?\n\n .unwrap_or_else(|| String::from(\"id\"));\n\n\n\n let bucket_flag = args.get_flag(\"bucket\")?;\n\n let scope_flag = args.get_flag(\"scope\")?;\n\n let collection_flag = args.get_flag(\"collection\")?;\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n\n\n let input_args = if let Some(id) = args.opt::<String>(0)? {\n\n vec![id]\n\n } else {\n\n vec![]\n\n };\n", "file_path": "src/cli/doc_remove.rs", "rank": 47, "score": 139867.1403586046 }, { "content": "fn run_async(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let permission = args.get_flag(\"permission\")?;\n\n\n\n let mut entries = vec![];\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"user roles cannot be run against cloud clusters\",\n\n )?;\n", "file_path": "src/cli/users_roles.rs", "rank": 48, "score": 139867.1403586046 }, { "content": "fn run_insert(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let id_column = args\n\n .get_flag(\"id-column\")?\n\n .unwrap_or_else(|| String::from(\"id\"));\n\n\n\n let content_column = args\n\n .get_flag(\"content-column\")?\n\n .unwrap_or_else(|| String::from(\"content\"));\n\n\n\n let expiry: i32 = args.get_flag(\"expiry\")?.unwrap_or(0);\n\n\n\n let bucket_flag = args.get_flag(\"bucket\")?;\n\n let scope_flag = args.get_flag(\"scope\")?;\n\n let collection_flag = args.get_flag(\"collection\")?;\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let input_args = if let Some(id) = args.opt::<String>(0)? {\n", "file_path": "src/cli/doc_insert.rs", "rank": 49, "score": 139867.1403586046 }, { "content": "fn clouds_status(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let all = args.get_flag(\"all\")?.unwrap_or(false);\n\n\n\n debug!(\"Running clouds status\");\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n let response = client.cloud_request(\n\n CloudRequest::GetClouds {},\n\n Instant::now().add(control.timeout()),\n\n ctrl_c,\n\n )?;\n\n if response.status() != 200 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n\n\n let content: JSONCloudsResponse = serde_json::from_str(response.content())?;\n\n\n", "file_path": "src/cli/clouds_status.rs", "rank": 50, "score": 139867.1403586046 }, { "content": "fn run_replace(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let id_column = args\n\n .get_flag(\"id-column\")?\n\n .unwrap_or_else(|| String::from(\"id\"));\n\n\n\n let content_column = args\n\n .get_flag(\"content-column\")?\n\n .unwrap_or_else(|| String::from(\"content\"));\n\n\n\n let expiry: i32 = args.get_flag(\"expiry\")?.unwrap_or(0);\n\n\n\n let bucket_flag = args.get_flag(\"bucket\")?;\n\n let scope_flag = args.get_flag(\"scope\")?;\n\n let collection_flag = args.get_flag(\"collection\")?;\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n", "file_path": "src/cli/doc_replace.rs", "rank": 51, "score": 139867.1403586046 }, { "content": "fn addresses_add(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let address: String = args.req(0)?;\n\n let duration = args.get_flag(\"duration\")?;\n\n\n\n debug!(\"Running address add for {}\", &address);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_cloud(\n\n active_cluster,\n\n \"addresses can only be used with clusters registered to a cloud control pane\",\n", "file_path": "src/cli/addresses_add.rs", "rank": 52, "score": 139867.1403586046 }, { "content": "fn cloud_clusters(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n debug!(\"Running clouds clusters\");\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n let response = client.cloud_request(\n\n CloudRequest::GetClusters {},\n\n Instant::now().add(control.timeout()),\n\n ctrl_c,\n\n )?;\n\n if response.status() != 200 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n\n\n let content: JSONCloudClustersSummaries = serde_json::from_str(response.content())?;\n\n\n\n let mut results = vec![];\n", "file_path": "src/cli/clouds_clusters.rs", "rank": 53, "score": 139867.1403586046 }, { "content": "fn run_upsert(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let id_column = args\n\n .get_flag(\"id-column\")?\n\n .unwrap_or_else(|| String::from(\"id\"));\n\n\n\n let content_column = args\n\n .get_flag(\"content-column\")?\n\n .unwrap_or_else(|| String::from(\"content\"));\n\n let bucket_flag = args.get_flag(\"bucket\")?;\n\n let scope_flag = args.get_flag(\"scope\")?;\n\n let collection_flag = args.get_flag(\"collection\")?;\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n\n\n let expiry: i32 = args.get_flag(\"expiry\")?.unwrap_or(0);\n\n\n", "file_path": "src/cli/doc_upsert.rs", "rank": 54, "score": 139867.1403586046 }, { "content": "fn scopes_create(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let scope: String = args.req(0)?;\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"scopes create cannot be run against cloud clusters\",\n\n )?;\n\n\n\n let bucket = match args.get_flag(\"bucket\")? {\n", "file_path": "src/cli/scopes_create.rs", "rank": 55, "score": 139867.1403586046 }, { "content": "pub fn parse_out_to_json(out: String) -> serde_json::Value {\n\n serde_json::from_str(out.as_str()).unwrap()\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 56, "score": 139734.9473449104 }, { "content": "fn clusters_get(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let name: String = args.req(0)?;\n\n\n\n debug!(\"Running clouds clusters get for {}\", &name);\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n\n\n let deadline = Instant::now().add(control.timeout());\n\n let cluster_id = find_cloud_cluster_id(ctrl_c.clone(), name, &client, deadline)?;\n\n let response =\n\n client.cloud_request(CloudRequest::GetCluster { cluster_id }, deadline, ctrl_c)?;\n\n if response.status() != 200 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n let cluster: JSONCloudCluster = serde_json::from_str(response.content())?;\n\n\n\n let mut collected = TaggedDictBuilder::new(Tag::default());\n", "file_path": "src/cli/clouds_clusters_get.rs", "rank": 57, "score": 137733.3328690378 }, { "content": "fn clusters_drop(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let name: String = args.req(0)?;\n\n\n\n debug!(\"Running clouds clusters drop for {}\", &name);\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n\n\n let deadline = Instant::now().add(control.timeout());\n\n let cluster_id = find_cloud_cluster_id(ctrl_c.clone(), name, &client, deadline)?;\n\n let response =\n\n client.cloud_request(CloudRequest::DeleteCluster { cluster_id }, deadline, ctrl_c)?;\n\n if response.status() != 202 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n\n\n Ok(OutputStream::empty())\n\n}\n", "file_path": "src/cli/clouds_clusters_drop.rs", "rank": 58, "score": 137733.3328690378 }, { "content": "/// Creates a regular, non-flex response with all fields necessary.\n\npub fn _response(\n\n opcode: Opcode,\n\n datatype: u8,\n\n status: u16,\n\n opaque: u32,\n\n cas: u64,\n\n key: Option<Bytes>,\n\n extras: Option<Bytes>,\n\n body: Option<Bytes>,\n\n) -> BytesMut {\n\n let key_size = key.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let extras_size = extras.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let total_body_size =\n\n key_size + extras_size + body.as_ref().map(|b| b.len()).unwrap_or_default();\n\n\n\n let mut builder = BytesMut::with_capacity(HEADER_SIZE + total_body_size);\n\n builder.put_u8(Magic::Response.encoded());\n\n builder.put_u8(opcode.encoded());\n\n builder.put_u16(key_size as u16);\n\n builder.put_u8(extras_size as u8);\n", "file_path": "src/client/protocol.rs", "rank": 59, "score": 135815.4574846783 }, { "content": "fn run_get(state: Arc<Mutex<State>>, mut args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let id_column: String = args.get_flag(\"id-column\")?.unwrap_or_else(|| \"id\".into());\n\n let mut ids = vec![];\n\n while let Some(item) = args.input.next() {\n\n let untagged = item.into();\n\n match untagged {\n\n UntaggedValue::Primitive(Primitive::String(s)) => ids.push(s.clone()),\n\n UntaggedValue::Row(d) => {\n\n if let MaybeOwned::Borrowed(d) = d.get_data(id_column.as_ref()) {\n\n let untagged = &d.value;\n\n if let UntaggedValue::Primitive(Primitive::String(s)) = untagged {\n\n ids.push(s.clone())\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n", "file_path": "src/cli/doc_get.rs", "rank": 60, "score": 135324.55427001588 }, { "content": "#[test]\n\npub fn get_a_document() {\n\n playground::CBPlayground::setup(\"get_a_document\", |dirs, _sandbox| {\n\n common::create_document(\n\n &dirs.test,\n\n playground::default_bucket(),\n\n playground::default_scope(),\n\n playground::default_collection(),\n\n \"get_a_document\",\n\n r#\"{\"testkey\": \"testvalue\"}\"#,\n\n );\n\n\n\n let out = common::execute_command(\n\n &dirs.test,\n\n r#\"doc get \"get_a_document\" | get content | to json\"#,\n\n );\n\n\n\n let json = common::parse_out_to_json(out.out);\n\n\n\n assert_eq!(\"\", out.err);\n\n assert_eq!(\"testvalue\", json[\"testkey\"]);\n\n });\n\n}\n\n\n", "file_path": "tests/doc_get.rs", "rank": 61, "score": 132866.82004103277 }, { "content": "#[test]\n\npub fn upserts_a_document() {\n\n playground::CBPlayground::setup(\"upsert_a_document\", |dirs, _sandbox| {\n\n let out =\n\n common::execute_command(&dirs.test, r#\"doc upsert test {\"test\": \"test\"} | to json\"#);\n\n\n\n assert_eq!(\"\", out.err);\n\n\n\n let json = common::parse_out_to_json(out.out);\n\n\n\n assert_eq!(1, json[\"success\"]);\n\n assert_eq!(1, json[\"processed\"]);\n\n assert_eq!(0, json[\"failed\"]);\n\n assert_eq!(serde_json::Value::Array(vec!()), json[\"failures\"]);\n\n });\n\n}\n", "file_path": "tests/doc_upsert.rs", "rank": 62, "score": 132866.82004103277 }, { "content": "// Creates a flexible request with optional framing extras\n\npub fn _flexible_request(\n\n opcode: Opcode,\n\n datatype: u8,\n\n partition: u16,\n\n opaque: u32,\n\n cas: u64,\n\n key: Option<Bytes>,\n\n framing_extras: Option<Bytes>,\n\n extras: Option<Bytes>,\n\n body: Option<Bytes>,\n\n) -> BytesMut {\n\n let key_size = key.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let extras_size = extras.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let framing_extras_size = framing_extras.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let total_body_size = key_size\n\n + extras_size\n\n + framing_extras_size\n\n + body.as_ref().map(|b| b.len()).unwrap_or_default();\n\n\n\n let mut builder = BytesMut::with_capacity(HEADER_SIZE + total_body_size);\n", "file_path": "src/client/protocol.rs", "rank": 63, "score": 132866.82004103277 }, { "content": "#[allow(dead_code)]\n\npub fn create_document(\n\n cwd: &PathBuf,\n\n bucket: String,\n\n scope: String,\n\n collection: String,\n\n key: &str,\n\n content: &str,\n\n) {\n\n let mut command = format!(\"doc upsert {} {} --bucket {}\", key, content, bucket);\n\n if !scope.is_empty() {\n\n command = format!(\"{} --scope {}\", command, scope)\n\n }\n\n if !collection.is_empty() {\n\n command = format!(\"{} --collection {}\", command, collection)\n\n }\n\n command = format!(\"{} | to json\", command);\n\n\n\n let out = execute_command(cwd, command.as_str());\n\n\n\n assert_eq!(\"\", out.err);\n\n\n\n let json = parse_out_to_json(out.out);\n\n\n\n assert_eq!(1, json[\"success\"]);\n\n assert_eq!(1, json[\"processed\"]);\n\n assert_eq!(0, json[\"failed\"]);\n\n assert_eq!(serde_json::Value::Array(vec!()), json[\"failures\"]);\n\n}\n", "file_path": "tests/common/mod.rs", "rank": 64, "score": 132866.82004103277 }, { "content": "pub fn namespace_from_args(\n\n bucket_flag: Option<String>,\n\n scope_flag: Option<String>,\n\n collection_flag: Option<String>,\n\n active_cluster: &RemoteCluster,\n\n) -> Result<(String, String, String), ShellError> {\n\n let bucket = match bucket_flag.or_else(|| active_cluster.active_bucket()) {\n\n Some(v) => Ok(v),\n\n None => Err(ShellError::unexpected(\n\n \"Could not auto-select a bucket - please use --bucket instead\".to_string(),\n\n )),\n\n }?;\n\n\n\n let scope = match scope_flag {\n\n Some(s) => s,\n\n None => match active_cluster.active_scope() {\n\n Some(s) => s,\n\n None => \"\".into(),\n\n },\n\n };\n", "file_path": "src/cli/util.rs", "rank": 65, "score": 132866.82004103277 }, { "content": "pub fn cluster_identifiers_from(\n\n state: &Arc<Mutex<State>>,\n\n args: &CommandArgs,\n\n default_active: bool,\n\n) -> Result<Vec<String>, ShellError> {\n\n let state = state.lock().unwrap();\n\n let identifier_arg: String = match args.get_flag(\"clusters\")? {\n\n Some(arg) => arg,\n\n None => {\n\n if default_active {\n\n return Ok(vec![state.active()]);\n\n }\n\n \"\".into()\n\n }\n\n };\n\n\n\n let re = match Regex::new(identifier_arg.as_str()) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n return Err(ShellError::unexpected(format!(\n", "file_path": "src/cli/util.rs", "rank": 66, "score": 132866.82004103277 }, { "content": "fn run_fake(_state: Arc<Mutex<State>>, args: CommandArgs) -> Result<ActionStream, ShellError> {\n\n let list_functions = args.has_flag(\"list-functions\");\n\n\n\n let ctx = Context::new();\n\n let mut tera = Tera::default();\n\n\n\n register_functions(&mut tera);\n\n\n\n if list_functions {\n\n let generated = tera\n\n .render_str(LIST_FUNCTIONS, &ctx)\n\n .map_err(|e| ShellError::unexpected(format!(\"{}\", e)))?;\n\n let content = serde_json::from_str(&generated)\n\n .map_err(|e| ShellError::unexpected(format!(\"{}\", e)))?;\n\n match content {\n\n serde_json::Value::Array(values) => {\n\n let converted = values.into_iter().map(|v| {\n\n match convert_json_value_to_nu_value(&v, Tag::default()) {\n\n Ok(c) => Ok(ReturnSuccess::Value(c)),\n\n Err(e) => Err(e),\n", "file_path": "src/cli/fake_data.rs", "rank": 67, "score": 132461.3870562153 }, { "content": "pub fn validate_is_cloud(cluster: &RemoteCluster, err_msg: &str) -> Result<(), ShellError> {\n\n if cluster.cloud_org().is_none() {\n\n return Err(ShellError::unexpected(err_msg));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/util.rs", "rank": 68, "score": 131990.6665228216 }, { "content": "fn command_dict(command: Command, tag: impl Into<Tag>) -> Value {\n\n let tag = tag.into();\n\n\n\n let mut cmd_dict = TaggedDictBuilder::new(&tag);\n\n\n\n cmd_dict.insert_untagged(\"name\", UntaggedValue::string(command.name()));\n\n\n\n cmd_dict.insert_untagged(\"type\", UntaggedValue::string(\"Command\"));\n\n\n\n cmd_dict.insert_value(\"signature\", signature_dict(command.signature(), tag));\n\n cmd_dict.insert_untagged(\"usage\", UntaggedValue::string(command.usage()));\n\n\n\n cmd_dict.into_value()\n\n}\n", "file_path": "src/cli/help.rs", "rank": 69, "score": 131735.58122312787 }, { "content": "#[test]\n\npub fn get_a_document_not_found() {\n\n playground::CBPlayground::setup(\"get_a_document_not_found\", |dirs, _sandbox| {\n\n let out = common::execute_command(\n\n &dirs.test,\n\n r#\"doc get \"get_a_document_not_found\" | get error\"#,\n\n );\n\n\n\n assert_eq!(\"\", out.err);\n\n assert!(out.out.contains(\"key not found\"));\n\n });\n\n}\n", "file_path": "tests/doc_get.rs", "rank": 70, "score": 130110.50394101045 }, { "content": "fn buckets_get_all(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n debug!(\"Running buckets\");\n\n\n\n let guard = state.lock().unwrap();\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n", "file_path": "src/cli/buckets.rs", "rank": 72, "score": 128259.95649474883 }, { "content": "/// Creates a regular, non-flex request with all fields necessary.\n\npub fn request(req: KvRequest, collections_enabled: bool) -> BytesMut {\n\n let key = match req.key {\n\n Some(k) => {\n\n if collections_enabled {\n\n let cid = make_uleb128_32(k, req.collection_id);\n\n Some(cid)\n\n } else {\n\n Some(k)\n\n }\n\n }\n\n None => None,\n\n };\n\n\n\n let key_size = key.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let extras_size = req.extras.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let total_body_size =\n\n key_size + extras_size + req.body.as_ref().map(|b| b.len()).unwrap_or_default();\n\n\n\n let mut builder = BytesMut::with_capacity(HEADER_SIZE + total_body_size);\n\n builder.put_u8(Magic::Request.encoded());\n", "file_path": "src/client/protocol.rs", "rank": 73, "score": 127854.47820600319 }, { "content": "fn collections_create(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n let collection: String = args.req(0)?;\n\n let expiry = args.get_flag(\"max-expiry\")?.unwrap_or(0);\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n", "file_path": "src/cli/collections_create.rs", "rank": 74, "score": 125866.87656601025 }, { "content": "fn collections_drop(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n let collection: String = args.req(0)?;\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"collections drop cannot be run against cloud clusters\",\n", "file_path": "src/cli/collections_drop.rs", "rank": 75, "score": 125866.87656601025 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct CollectionManifestCollection {\n\n uid: String,\n\n name: String,\n\n #[serde(alias = \"maxTTL\")]\n\n max_ttl: Option<u32>,\n\n}\n\n\n", "file_path": "src/client/kv_client.rs", "rank": 76, "score": 123874.0370076703 }, { "content": "fn update_bucket_settings(\n\n settings: &mut BucketSettings,\n\n ram: Option<u64>,\n\n replicas: Option<u64>,\n\n flush: bool,\n\n durability: Option<String>,\n\n expiry: Option<u64>,\n\n) -> Result<(), ShellError> {\n\n if let Some(r) = ram {\n\n settings.set_ram_quota_mb(r);\n\n }\n\n if let Some(r) = replicas {\n\n settings.set_num_replicas(match u32::try_from(r) {\n\n Ok(bt) => bt,\n\n Err(e) => {\n\n return Err(ShellError::unexpected(format!(\n\n \"Failed to parse durability level {}\",\n\n e\n\n )));\n\n }\n", "file_path": "src/cli/buckets_update.rs", "rank": 77, "score": 123083.6743816581 }, { "content": "fn load_sample_bucket(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let bucket_name: String = args.req(0)?;\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n validate_is_not_cloud(\n", "file_path": "src/cli/buckets_sample.rs", "rank": 78, "score": 123083.6743816581 }, { "content": "#[allow(dead_code)]\n\npub fn default_scope() -> String {\n\n STATE.scope.clone()\n\n}\n\n\n", "file_path": "tests/common/playground.rs", "rank": 79, "score": 122602.36348743882 }, { "content": "pub fn root() -> PathBuf {\n\n let manifest_dir = if let Ok(manifest_dir) = std::env::var(\"CARGO_MANIFEST_DIR\") {\n\n PathBuf::from(manifest_dir)\n\n } else {\n\n PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"))\n\n };\n\n\n\n let test_path = manifest_dir.join(\"Cargo.lock\");\n\n if test_path.exists() {\n\n manifest_dir\n\n } else {\n\n manifest_dir\n\n .parent()\n\n .expect(\"Couldn't find the debug binaries directory\")\n\n .parent()\n\n .expect(\"Couldn't find the debug binaries directory\")\n\n .to_path_buf()\n\n }\n\n}\n\n\n", "file_path": "tests/common/fs.rs", "rank": 80, "score": 122602.36348743882 }, { "content": "pub fn binaries() -> PathBuf {\n\n std::env::var(\"CARGO_TARGET_DIR\")\n\n .ok()\n\n .map(|target_dir| PathBuf::from(target_dir).join(\"debug\"))\n\n .unwrap_or_else(|| root().join(\"target/debug\"))\n\n}\n\n\n", "file_path": "tests/common/fs.rs", "rank": 81, "score": 122602.36348743882 }, { "content": "pub fn executable_path() -> PathBuf {\n\n let mut path = binaries();\n\n path.push(\"cbsh\");\n\n path\n\n}\n\n\n", "file_path": "tests/common/fs.rs", "rank": 82, "score": 120020.14707997598 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct CollectionManifest {\n\n uid: String,\n\n scopes: Vec<CollectionManifestScope>,\n\n}\n\n\n", "file_path": "src/client/kv_client.rs", "rank": 83, "score": 114513.62214871253 }, { "content": "struct CtrlcState {\n\n interrupt: Arc<AtomicBool>,\n\n waker: Option<Waker>,\n\n halt: Arc<AtomicBool>,\n\n}\n\n\n\nimpl CtrlcFuture {\n\n pub fn new(ctrl_c: Arc<AtomicBool>) -> CtrlcFuture {\n\n let state = Arc::new(Mutex::new(CtrlcState {\n\n interrupt: ctrl_c,\n\n waker: None,\n\n halt: Arc::new(AtomicBool::new(false)),\n\n }));\n\n\n\n let state_clone = state.clone();\n\n thread::spawn(move || loop {\n\n let mut state = state_clone.lock().unwrap();\n\n if state.halt.load(Ordering::SeqCst) {\n\n return;\n\n }\n", "file_path": "src/cli/ctrlc_future.rs", "rank": 84, "score": 114390.12466481882 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct BucketStats {\n\n op: BucketStatsOp,\n\n}\n\n\n", "file_path": "src/cli/clusters_health.rs", "rank": 85, "score": 114304.20955065245 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct BucketInfo {\n\n name: String,\n\n}\n\n\n", "file_path": "src/cli/clusters_health.rs", "rank": 86, "score": 114304.20955065245 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct JSONControllers {\n\n #[serde(default)]\n\n flush: String,\n\n}\n\n\n", "file_path": "src/cli/buckets_builder.rs", "rank": 87, "score": 114304.20955065245 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct BucketConfig {\n\n rev: u64,\n\n #[serde(alias = \"nodesExt\")]\n\n nodes_ext: Vec<NodeConfig>,\n\n loaded_from: Option<String>,\n\n #[serde(alias = \"vBucketServerMap\")]\n\n vbucket_server_map: VBucketServerMap,\n\n}\n\n\n\nimpl BucketConfig {\n\n pub fn key_value_seeds(&self, tls: bool) -> Vec<(String, u32)> {\n\n let key = if tls { \"kvSSL\" } else { \"kv\" };\n\n\n\n self.seeds(key)\n\n }\n\n\n\n pub fn set_loaded_from(&mut self, loaded_from: String) {\n\n self.loaded_from = Some(loaded_from);\n\n }\n\n\n", "file_path": "src/client/kv_client.rs", "rank": 88, "score": 114304.20955065245 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct JSONQuota {\n\n ram: u64,\n\n #[serde(rename = \"rawRAM\")]\n\n raw_ram: u64,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct JSONBucketSettings {\n\n name: String,\n\n controllers: JSONControllers,\n\n quota: JSONQuota,\n\n #[serde(rename = \"replicaNumber\")]\n\n num_replicas: u32,\n\n #[serde(default)]\n\n #[serde(rename = \"replicaIndex\")]\n\n replica_indexes: bool,\n\n #[serde(rename = \"bucketType\")]\n\n bucket_type: String,\n\n #[serde(rename = \"evictionPolicy\")]\n\n eviction_policy: String,\n", "file_path": "src/cli/buckets_builder.rs", "rank": 89, "score": 114304.20955065245 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct CollectionManifestScope {\n\n uid: String,\n\n name: String,\n\n collections: Vec<CollectionManifestCollection>,\n\n}\n\n\n", "file_path": "src/client/kv_client.rs", "rank": 90, "score": 111749.866574908 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct BucketStatsOp {\n\n samples: BucketStatsSamples,\n\n}\n\n\n", "file_path": "src/cli/clusters_health.rs", "rank": 91, "score": 111547.28331972245 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct BucketStatsSamples {\n\n #[serde(rename = \"vb_active_resident_items_ratio\")]\n\n active_resident_ratios: Vec<u32>,\n\n}\n", "file_path": "src/cli/clusters_health.rs", "rank": 92, "score": 111547.28331972245 }, { "content": "/// Dumps a packet into a easily debuggable string format.\n\n///\n\n/// Note that this is only really suitable when you want to println a full\n\n/// packet, but nonetheless it is helpful for testing.\n\npub fn _dump(input: &Bytes) -> String {\n\n if input.len() < HEADER_SIZE {\n\n return \"Received less bytes than a KV header, invalid data?\".into();\n\n }\n\n\n\n let mut slice = input.slice(0..input.len());\n\n\n\n let mut output = String::new();\n\n output.push_str(\"--- Packet Dump Info --\\n\");\n\n let magic = slice.get_u8();\n\n output.push_str(&format!(\n\n \" Magic: 0x{:x} ({:?})\\n\",\n\n magic,\n\n Magic::from(magic)\n\n ));\n\n let opcode = slice.get_u8();\n\n output.push_str(&format!(\n\n \" Opcode: 0x{:x} ({:?})\\n\",\n\n opcode,\n\n Opcode::try_from(opcode).unwrap()\n", "file_path": "src/client/protocol.rs", "rank": 93, "score": 110341.77102282582 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct VBucketServerMap {\n\n #[serde(alias = \"numReplicas\")]\n\n num_replicas: u32,\n\n #[serde(alias = \"serverList\")]\n\n server_list: Vec<String>,\n\n #[serde(alias = \"vBucketMap\")]\n\n vbucket_map: Vec<Vec<i32>>,\n\n}\n\n\n\npub enum KeyValueRequest {\n\n Get {\n\n key: String,\n\n },\n\n Set {\n\n key: String,\n\n value: Vec<u8>,\n\n expiry: u32,\n\n },\n\n Insert {\n\n key: String,\n", "file_path": "src/client/kv_client.rs", "rank": 94, "score": 108964.49531960352 }, { "content": "pub fn read_std(std: &[u8]) -> String {\n\n let out = String::from_utf8_lossy(std);\n\n let out = out.lines().collect::<Vec<_>>().join(\"\\n\");\n\n let out = out.replace(\"\\r\\n\", \"\");\n\n out.replace(\"\\n\", \"\")\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 95, "score": 107917.66408531912 }, { "content": "/// Takes a full packet and extracts the body as a slice if possible.\n\npub fn _body(input: &Bytes) -> Option<Bytes> {\n\n let mut slice = input.slice(0..input.len());\n\n\n\n let flexible = Magic::from(slice.get_u8()).is_flexible();\n\n\n\n let flexible_extras_len = if flexible {\n\n slice.advance(1);\n\n slice.get_u8()\n\n } else {\n\n 0\n\n } as usize;\n\n let key_len = if flexible {\n\n slice.get_u8() as u16\n\n } else {\n\n slice.advance(1);\n\n slice.get_u16()\n\n } as usize;\n\n let extras_len = slice.get_u8() as usize;\n\n slice.advance(3);\n\n let total_body_len = slice.get_u32() as usize;\n\n let body_len = total_body_len - key_len - extras_len - flexible_extras_len;\n\n\n\n if body_len > 0 {\n\n Some(input.slice((HEADER_SIZE + flexible_extras_len + extras_len + key_len)..))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol.rs", "rank": 96, "score": 104616.44743767969 }, { "content": "fn json_list(input: &[Value]) -> Result<Vec<serde_json::Value>, ShellError> {\n\n let mut out = vec![];\n\n\n\n for value in input {\n\n out.push(convert_nu_value_to_json_value(value)?);\n\n }\n\n\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/cli/util.rs", "rank": 97, "score": 90971.86397933758 }, { "content": "use crate::state::State;\n\nuse async_trait::async_trait;\n\nuse nu_engine::CommandArgs;\n\nuse nu_errors::ShellError;\n\nuse nu_protocol::{Signature, SyntaxShape, TaggedDictBuilder};\n\nuse nu_source::Tag;\n\nuse nu_stream::OutputStream;\n\nuse std::sync::{Arc, Mutex};\n\n\n\npub struct UseCollection {\n\n state: Arc<Mutex<State>>,\n\n}\n\n\n\nimpl UseCollection {\n\n pub fn new(state: Arc<Mutex<State>>) -> Self {\n\n Self { state }\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "src/cli/use_collection.rs", "rank": 98, "score": 90937.30641780666 }, { "content": "impl nu_engine::WholeStreamCommand for UseCollection {\n\n fn name(&self) -> &str {\n\n \"use collection\"\n\n }\n\n\n\n fn signature(&self) -> Signature {\n\n Signature::build(\"use collection\").required(\n\n \"identifier\",\n\n SyntaxShape::String,\n\n \"the name of the collection\",\n\n )\n\n }\n\n\n\n fn usage(&self) -> &str {\n\n \"Sets the active collection based on its name\"\n\n }\n\n\n\n fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let guard = self.state.lock().unwrap();\n\n let active = guard.active_cluster();\n", "file_path": "src/cli/use_collection.rs", "rank": 99, "score": 90933.15366280952 } ]
Rust
c-uint256-tests/src/bindings.rs
jjyr/godwoken-scripts
d983fb351410eb6fbe02bb298af909193aeb5f22
/* automatically generated by rust-bindgen 0.59.2 */ pub const true_: u32 = 1; pub const false_: u32 = 0; pub const INT8_MIN: i32 = -128; pub const INT16_MIN: i32 = -32768; pub const INT32_MIN: i32 = -2147483648; pub const INT64_MIN: i64 = -9223372036854775808; pub const INT8_MAX: u32 = 127; pub const INT16_MAX: u32 = 32767; pub const INT32_MAX: u32 = 2147483647; pub const INT64_MAX: u64 = 9223372036854775807; pub const UINT8_MAX: u32 = 255; pub const UINT16_MAX: u32 = 65535; pub const UINT32_MAX: u32 = 4294967295; pub const UINT64_MAX: i32 = -1; pub const SIZE_MAX: i32 = -1; pub type size_t = ::std::os::raw::c_ulong; pub type ssize_t = ::std::os::raw::c_long; extern "C" { pub fn memset( dest: *mut ::std::os::raw::c_void, c: ::std::os::raw::c_int, n: ::std::os::raw::c_ulong, ) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn memcpy( dest: *mut ::std::os::raw::c_void, src: *const ::std::os::raw::c_void, n: ::std::os::raw::c_ulong, ) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn memcmp( vl: *const ::std::os::raw::c_void, vr: *const ::std::os::raw::c_void, n: ::std::os::raw::c_ulong, ) -> ::std::os::raw::c_int; } pub type WT = size_t; extern "C" { pub fn memmove( dest: *mut ::std::os::raw::c_void, src: *const ::std::os::raw::c_void, n: ::std::os::raw::c_ulong, ) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn strcpy( d: *mut ::std::os::raw::c_char, s: *const ::std::os::raw::c_char, ) -> *mut ::std::os::raw::c_char; } extern "C" { pub fn strlen(s: *const ::std::os::raw::c_char) -> ::std::os::raw::c_ulong; } extern "C" { pub fn strcmp( l: *const ::std::os::raw::c_char, r: *const ::std::os::raw::c_char, ) -> ::std::os::raw::c_int; } extern "C" { pub fn malloc(size: ::std::os::raw::c_ulong) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn free(ptr: *mut ::std::os::raw::c_void); } extern "C" { pub fn calloc( nmemb: ::std::os::raw::c_ulong, size: ::std::os::raw::c_ulong, ) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn realloc( ptr: *mut ::std::os::raw::c_void, size: ::std::os::raw::c_ulong, ) -> *mut ::std::os::raw::c_void; } pub type cmpfun = ::std::option::Option< unsafe extern "C" fn( arg1: *const ::std::os::raw::c_void, arg2: *const ::std::os::raw::c_void, ) -> ::std::os::raw::c_int, >; extern "C" { pub fn qsort(base: *mut ::std::os::raw::c_void, nel: size_t, width: size_t, cmp: cmpfun); } extern "C" { pub fn bsearch( key: *const ::std::os::raw::c_void, base: *const ::std::os::raw::c_void, nel: size_t, width: size_t, cmp: ::std::option::Option< unsafe extern "C" fn( arg1: *const ::std::os::raw::c_void, arg2: *const ::std::os::raw::c_void, ) -> ::std::os::raw::c_int, >, ) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn printf(format: *const ::std::os::raw::c_char, ...) -> ::std::os::raw::c_int; } extern "C" { pub fn _start(); } #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct uint256_t { pub array: [u32; 8usize], } #[test] fn bindgen_test_layout_uint256_t() { assert_eq!( ::std::mem::size_of::<uint256_t>(), 32usize, concat!("Size of: ", stringify!(uint256_t)) ); assert_eq!( ::std::mem::align_of::<uint256_t>(), 4usize, concat!("Alignment of ", stringify!(uint256_t)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<uint256_t>())).array as *const _ as usize }, 0usize, concat!( "Offset of field: ", stringify!(uint256_t), "::", stringify!(array) ) ); } extern "C" { pub fn gw_uint256_zero(num: *mut uint256_t); } extern "C" { pub fn gw_uint256_one(num: *mut uint256_t); } extern "C" { pub fn gw_uint256_max(num: *mut uint256_t); } extern "C" { pub fn gw_uint256_overflow_add( a: uint256_t, b: uint256_t, sum: *mut uint256_t, ) -> ::std::os::raw::c_int; } extern "C" { pub fn gw_uint256_underflow_sub( a: uint256_t, b: uint256_t, rem: *mut uint256_t, ) -> ::std::os::raw::c_int; } pub const GW_UINT256_SMALLER: ::std::os::raw::c_int = -1; pub const GW_UINT256_EQUAL: ::std::os::raw::c_int = 0; pub const GW_UINT256_LARGER: ::std::os::raw::c_int = 1; pub type _bindgen_ty_1 = ::std::os::raw::c_int; extern "C" { pub fn gw_uint256_cmp(a: uint256_t, b: uint256_t) -> ::std::os::raw::c_int; }
/* automatically generated by rust-bindgen 0.59.2 */ pub const true_: u32 = 1; pub const false_: u32 = 0; pub const INT8_MIN: i32 = -128; pub const INT16_MIN: i32 = -32768; pub const INT32_MIN: i32 = -2147483648; pub const INT64_MIN: i64 = -9223372036854775808; pub const INT8_MAX: u32 = 127; pub const INT16_MAX: u32 = 32767; pub const INT32_MAX: u32 = 2147483647; pub const INT64_MAX: u64 = 9223372036854775807; pub const UINT8_MAX: u32 = 255; pub const UINT16_MAX: u32 = 65535; pub const UINT32_MAX: u32 = 4294967295; pub const UINT64_MAX: i32 = -1; pub const SIZE_MAX: i32 = -1; pub type size_t = ::std::os::raw::c_ulong; pub type ssize_t = ::std::os::raw::c_long; extern "C" { pub fn memset( dest: *mut ::std::os::raw::c_void, c: ::std::os::raw::c_int, n: ::std::os::raw::c_ulong, ) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn memcpy( dest: *mut ::std::os::raw::c_void, src: *const ::std::os::raw::c_void, n: ::std::os::raw::c_ulong, ) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn memcmp( vl: *const ::std::os::raw::c_void, vr: *const ::std::os::raw::c_void, n: ::std::os::raw::c_ulong, ) -> ::std::os::raw::c_int; } pub type WT = size_t; extern "C" { pub fn memmove( dest: *mut ::std::os::raw::c_void, src: *const ::std::os::raw::c_void, n: ::std::os::raw::c_ulong, ) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn strcpy( d: *mut ::std::os::raw::c_char, s: *const ::std::os::raw::c_char, ) -> *mut ::std::os::raw::c_char; } extern "C" { pub fn strlen(s: *const ::std::os::raw::
t uint256_t { pub array: [u32; 8usize], } #[test] fn bindgen_test_layout_uint256_t() { assert_eq!( ::std::mem::size_of::<uint256_t>(), 32usize, concat!("Size of: ", stringify!(uint256_t)) ); assert_eq!( ::std::mem::align_of::<uint256_t>(), 4usize, concat!("Alignment of ", stringify!(uint256_t)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<uint256_t>())).array as *const _ as usize }, 0usize, concat!( "Offset of field: ", stringify!(uint256_t), "::", stringify!(array) ) ); } extern "C" { pub fn gw_uint256_zero(num: *mut uint256_t); } extern "C" { pub fn gw_uint256_one(num: *mut uint256_t); } extern "C" { pub fn gw_uint256_max(num: *mut uint256_t); } extern "C" { pub fn gw_uint256_overflow_add( a: uint256_t, b: uint256_t, sum: *mut uint256_t, ) -> ::std::os::raw::c_int; } extern "C" { pub fn gw_uint256_underflow_sub( a: uint256_t, b: uint256_t, rem: *mut uint256_t, ) -> ::std::os::raw::c_int; } pub const GW_UINT256_SMALLER: ::std::os::raw::c_int = -1; pub const GW_UINT256_EQUAL: ::std::os::raw::c_int = 0; pub const GW_UINT256_LARGER: ::std::os::raw::c_int = 1; pub type _bindgen_ty_1 = ::std::os::raw::c_int; extern "C" { pub fn gw_uint256_cmp(a: uint256_t, b: uint256_t) -> ::std::os::raw::c_int; }
c_char) -> ::std::os::raw::c_ulong; } extern "C" { pub fn strcmp( l: *const ::std::os::raw::c_char, r: *const ::std::os::raw::c_char, ) -> ::std::os::raw::c_int; } extern "C" { pub fn malloc(size: ::std::os::raw::c_ulong) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn free(ptr: *mut ::std::os::raw::c_void); } extern "C" { pub fn calloc( nmemb: ::std::os::raw::c_ulong, size: ::std::os::raw::c_ulong, ) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn realloc( ptr: *mut ::std::os::raw::c_void, size: ::std::os::raw::c_ulong, ) -> *mut ::std::os::raw::c_void; } pub type cmpfun = ::std::option::Option< unsafe extern "C" fn( arg1: *const ::std::os::raw::c_void, arg2: *const ::std::os::raw::c_void, ) -> ::std::os::raw::c_int, >; extern "C" { pub fn qsort(base: *mut ::std::os::raw::c_void, nel: size_t, width: size_t, cmp: cmpfun); } extern "C" { pub fn bsearch( key: *const ::std::os::raw::c_void, base: *const ::std::os::raw::c_void, nel: size_t, width: size_t, cmp: ::std::option::Option< unsafe extern "C" fn( arg1: *const ::std::os::raw::c_void, arg2: *const ::std::os::raw::c_void, ) -> ::std::os::raw::c_int, >, ) -> *mut ::std::os::raw::c_void; } extern "C" { pub fn printf(format: *const ::std::os::raw::c_char, ...) -> ::std::os::raw::c_int; } extern "C" { pub fn _start(); } #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struc
random
[ { "content": "pub fn since_timestamp(t: u64) -> Uint64 {\n\n let input_timestamp = Duration::from_millis(t).as_secs() + 1;\n\n (SINCE_BLOCK_TIMESTAMP_FLAG | input_timestamp).pack()\n\n}\n\n\n", "file_path": "tests/src/script_tests/utils/layer1.rs", "rank": 0, "score": 195985.14016819192 }, { "content": "// type_id: usually the first 32-bytes of the current script.args\n\n// notice the type_id must be included in the script.args\n\npub fn check_type_id(type_id: [u8; 32]) -> Result<(), Error> {\n\n // check there is only one type id cell in each input/output group\n\n let has_second_input_type_id_cell = has_type_id_cell(1, Source::GroupInput);\n\n let has_second_output_type_id_cell = has_type_id_cell(1, Source::GroupOutput);\n\n if has_second_input_type_id_cell || has_second_output_type_id_cell {\n\n debug!(\"There are only be at most one input and at most one output type id cell\");\n\n return Err(Error::InvalidTypeID);\n\n }\n\n let has_first_input_type_id_cell = has_type_id_cell(0, Source::GroupInput);\n\n // we already has type_id, just return OK\n\n if has_first_input_type_id_cell {\n\n return Ok(());\n\n }\n\n // no type_id cell in the input, we are on the creation of a new type_id cell\n\n // search current output index.\n\n // (since we have no input in the group, we must have at least one output)\n\n let script_hash = load_script_hash()?;\n\n let output_index: u64 = QueryIter::new(load_cell_type_hash, Source::Output)\n\n .position(|type_hash| type_hash == Some(script_hash))\n\n .ok_or(Error::InvalidTypeID)? as u64;\n", "file_path": "contracts/gw-utils/src/type_id.rs", "rank": 1, "score": 173792.67620472776 }, { "content": "pub fn new_block_info(block_producer: &RegistryAddress, number: u64, timestamp: u64) -> BlockInfo {\n\n BlockInfo::new_builder()\n\n .block_producer(Bytes::from(block_producer.to_bytes()).pack())\n\n .number(number.pack())\n\n .timestamp(timestamp.pack())\n\n .build()\n\n}\n\n\n", "file_path": "tests/src/script_tests/l2_scripts/mod.rs", "rank": 2, "score": 173389.45269009288 }, { "content": "/// Verify revert\n\n/// 1. check revert merkle roots\n\n/// 2. check reverted block root\n\n/// 3. check other lock cells\n\npub fn verify(\n\n rollup_type_hash: H256,\n\n config: &RollupConfig,\n\n revert_args: RollupRevertReader,\n\n prev_global_state: &GlobalState,\n\n post_global_state: &GlobalState,\n\n) -> Result<(), Error> {\n\n check_status(prev_global_state, Status::Halting)?;\n\n // check rollup lock cells,\n\n // we do not handle the reverting of lock cells in here,\n\n // instead we handle them in the submitting layer2 block action\n\n check_rollup_lock_cells_except_stake(&rollup_type_hash, config)?;\n\n // do not accept stake cells in the output\n\n if !collect_stake_cells(&rollup_type_hash, config, Source::Output)?.is_empty() {\n\n return Err(Error::InvalidStakeCell);\n\n }\n\n // load reverted blocks\n\n let reverted_blocks_vec = revert_args.reverted_blocks();\n\n let reverted_blocks: Vec<_> = reverted_blocks_vec.iter().collect();\n\n // check challenge cells\n", "file_path": "contracts/state-validator/src/verifications/revert.rs", "rank": 3, "score": 162732.29068912697 }, { "content": "pub fn build_type_id_script(name: &[u8]) -> ckb_types::packed::Script {\n\n ckb_types::packed::Script::new_builder()\n\n .code_hash(CKBPack::pack(&ALWAYS_SUCCESS_CODE_HASH.clone()))\n\n .args(CKBPack::pack(&Bytes::from(name.to_vec())))\n\n .build()\n\n}\n\n\n", "file_path": "tests/src/script_tests/utils/rollup.rs", "rank": 4, "score": 162511.5639043403 }, { "content": "pub fn build_sync_tx(\n\n rollup_cell: CellOutput,\n\n produce_block_result: ProduceBlockResult,\n\n) -> Transaction {\n\n let ProduceBlockResult {\n\n block,\n\n global_state,\n\n withdrawal_extras: _,\n\n } = produce_block_result;\n\n let action = RollupAction::new_builder()\n\n .set(RollupActionUnion::RollupSubmitBlock(\n\n RollupSubmitBlock::new_builder().block(block).build(),\n\n ))\n\n .build();\n\n let witness = WitnessArgs::new_builder()\n\n .output_type(Pack::<_>::pack(&Some(action.as_bytes())))\n\n .build();\n\n let raw = RawTransaction::new_builder()\n\n .outputs(vec![rollup_cell].pack())\n\n .outputs_data(vec![global_state.as_bytes()].pack())\n", "file_path": "tests/src/testing_tool/chain.rs", "rank": 5, "score": 159890.95522349438 }, { "content": "/// Verify Deposit & Withdrawal\n\npub fn verify(\n\n rollup_type_hash: H256,\n\n config: &RollupConfig,\n\n block: &L2BlockReader,\n\n prev_global_state: &GlobalState,\n\n post_global_state: &GlobalState,\n\n) -> Result<(), Error> {\n\n check_status(prev_global_state, Status::Running)?;\n\n\n\n // check checkpoints\n\n check_state_checkpoints(block)?;\n\n\n\n // Check withdrawals root\n\n check_block_withdrawals(block)?;\n\n\n\n let mut tree_buffer = [Pair::default(); GW_MAX_KV_PAIRS];\n\n let kv_state_proof: Bytes = block.kv_state_proof().unpack();\n\n\n\n let (context, mut kv_state) = load_block_context_and_state(\n\n rollup_type_hash,\n", "file_path": "contracts/state-validator/src/verifications/submit_block.rs", "rank": 6, "score": 159890.95522349438 }, { "content": "/// Verify withdrawal signature\n\npub fn verify_withdrawal(\n\n _rollup_script_hash: &[u8; 32],\n\n rollup_config: &RollupConfig,\n\n lock_args: &ChallengeLockArgs,\n\n) -> Result<(), Error> {\n\n let WithdrawalContext {\n\n withdrawal,\n\n sender_script_hash,\n\n withdrawal_address,\n\n owner_lock,\n\n } = verify_withdrawal_proof(lock_args)?;\n\n let raw_withdrawal = withdrawal.raw();\n\n\n\n // check rollup chain id\n\n let expected_rollup_chain_id: u64 = rollup_config.chain_id().unpack();\n\n let chain_id: u64 = raw_withdrawal.chain_id().unpack();\n\n if expected_rollup_chain_id != chain_id {\n\n debug!(\"Withdrawal using wrong rollup_chain_id\");\n\n return Err(Error::WrongSignature);\n\n }\n", "file_path": "contracts/challenge-lock/src/verifications/withdrawal.rs", "rank": 7, "score": 159890.95522349438 }, { "content": "pub fn build_simple_tx(\n\n data_loader: &mut DummyDataLoader,\n\n input_cell: (CellOutput, Bytes),\n\n since: Uint64,\n\n output_cell: (CellOutput, Bytes),\n\n) -> TransactionView {\n\n let out_point = random_out_point();\n\n\n\n build_simple_tx_with_out_point_and_since(\n\n data_loader,\n\n input_cell,\n\n (out_point, since),\n\n output_cell,\n\n )\n\n}\n\n\n", "file_path": "tests/src/script_tests/utils/layer1.rs", "rank": 8, "score": 157196.70962662876 }, { "content": "pub fn verify_enter_challenge(\n\n rollup_type_hash: H256,\n\n config: &RollupConfig,\n\n args: RollupEnterChallengeReader,\n\n prev_global_state: &GlobalState,\n\n post_global_state: &GlobalState,\n\n) -> Result<(), Error> {\n\n check_status(prev_global_state, Status::Running)?;\n\n // check challenge cells\n\n let has_input_challenge =\n\n find_challenge_cell(&rollup_type_hash, config, Source::Input)?.is_some();\n\n if has_input_challenge {\n\n return Err(Error::InvalidChallengeCell);\n\n }\n\n let challenge_cell = find_challenge_cell(&rollup_type_hash, config, Source::Output)?\n\n .ok_or(Error::InvalidChallengeCell)?;\n\n // check that challenge target is exists\n\n let witness = args.witness();\n\n let challenged_block = witness.raw_l2block();\n\n // check challenged block isn't finazlied\n", "file_path": "contracts/state-validator/src/verifications/challenge.rs", "rank": 9, "score": 157196.70962662876 }, { "content": "pub fn verify_cancel_challenge(\n\n rollup_type_hash: H256,\n\n config: &RollupConfig,\n\n prev_global_state: &GlobalState,\n\n post_global_state: &GlobalState,\n\n) -> Result<(), Error> {\n\n check_status(prev_global_state, Status::Halting)?;\n\n // check challenge cells\n\n let has_input_challenge =\n\n find_challenge_cell(&rollup_type_hash, config, Source::Input)?.is_some();\n\n let has_output_challenge =\n\n find_challenge_cell(&rollup_type_hash, config, Source::Output)?.is_some();\n\n if !has_input_challenge || has_output_challenge {\n\n debug!(\"cancel challenge, invalid challenge cell\");\n\n return Err(Error::InvalidChallengeCell);\n\n }\n\n\n\n // Check cancel burn\n\n let challenge_cell = find_challenge_cell(&rollup_type_hash, config, Source::Input)?\n\n .ok_or(Error::InvalidChallengeCell)?;\n", "file_path": "contracts/state-validator/src/verifications/challenge.rs", "rank": 10, "score": 157196.70962662876 }, { "content": "pub fn init_env_log() {\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n", "file_path": "tests/src/script_tests/utils/mod.rs", "rank": 11, "score": 157196.70962662876 }, { "content": "pub fn parse_rollup_action(\n\n buf: &mut [u8; MAX_ROLLUP_WITNESS_SIZE],\n\n index: usize,\n\n source: Source,\n\n) -> Result<RollupActionReader, Error> {\n\n let loaded_len = load_witness(buf, 0, index, source)?;\n\n debug!(\"load rollup witness, loaded len: {}\", loaded_len);\n\n\n\n let witness_args = WitnessArgsReader::from_slice(&buf[..loaded_len]).map_err(|_err| {\n\n debug!(\"witness is not a valid WitnessArgsReader\");\n\n Error::Encoding\n\n })?;\n\n let output = witness_args.output_type().to_opt().ok_or_else(|| {\n\n debug!(\"WitnessArgs#output_type is none\");\n\n Error::Encoding\n\n })?;\n\n let action = RollupActionReader::from_slice(output.raw_data()).map_err(|_err| {\n\n debug!(\"output is not a valid RollupActionReader\");\n\n Error::Encoding\n\n })?;\n\n Ok(action)\n\n}\n", "file_path": "contracts/gw-utils/src/cells/rollup.rs", "rank": 12, "score": 157196.70962662876 }, { "content": "pub fn search_rollup_state(\n\n rollup_type_hash: &[u8; 32],\n\n source: Source,\n\n) -> Result<Option<GlobalState>, SysError> {\n\n let index = match QueryIter::new(load_cell_type_hash, source)\n\n .position(|type_hash| type_hash.as_ref() == Some(rollup_type_hash))\n\n {\n\n Some(i) => i,\n\n None => return Ok(None),\n\n };\n\n let data = load_cell_data(index, source)?;\n\n match GlobalStateReader::verify(&data, false) {\n\n Ok(_) => Ok(Some(GlobalState::new_unchecked(data.into()))),\n\n Err(_) if GlobalStateV0Reader::verify(&data, false).is_ok() => {\n\n let global_state_v0 = GlobalStateV0::new_unchecked(data.into());\n\n Ok(Some(GlobalState::from(global_state_v0)))\n\n }\n\n Err(_) => {\n\n debug!(\"Invalid encoding of Global state\");\n\n Err(SysError::Encoding)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/gw-utils/src/cells/rollup.rs", "rank": 13, "score": 157196.70962662876 }, { "content": "pub fn build_resolved_tx(\n\n data_loader: &DummyDataLoader,\n\n tx: &TransactionView,\n\n) -> ResolvedTransaction {\n\n let resolved_cell_deps = tx\n\n .cell_deps()\n\n .into_iter()\n\n .map(|dep| {\n\n let deps_out_point = dep.clone();\n\n let (dep_output, dep_data) =\n\n data_loader.cells.get(&deps_out_point.out_point()).unwrap();\n\n CellMetaBuilder::from_cell_output(dep_output.to_owned(), dep_data.to_owned())\n\n .out_point(deps_out_point.out_point().clone())\n\n .build()\n\n })\n\n .collect();\n\n\n\n let mut resolved_inputs = Vec::new();\n\n for i in 0..tx.inputs().len() {\n\n let previous_out_point = tx.inputs().get(i).unwrap().previous_output();\n", "file_path": "tests/src/script_tests/utils/layer1.rs", "rank": 14, "score": 157196.70962662876 }, { "content": "pub fn calculate_state_validator_type_id(input_out_point: ckb_types::packed::OutPoint) -> [u8; 32] {\n\n let input = ckb_types::packed::CellInput::new_builder()\n\n .previous_output(input_out_point)\n\n .build();\n\n let mut hasher = new_blake2b();\n\n let output_index: u64 = 0;\n\n hasher.update(&input.as_bytes());\n\n hasher.update(&output_index.to_le_bytes());\n\n let mut expected_type_id = [0u8; 32];\n\n hasher.finalize(&mut expected_type_id);\n\n expected_type_id\n\n}\n", "file_path": "tests/src/script_tests/utils/rollup.rs", "rank": 15, "score": 155193.1810721174 }, { "content": "pub fn collect_stake_cells(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n source: Source,\n\n) -> Result<Vec<StakeCell>, Error> {\n\n let iter = QueryIter::new(load_cell_lock, source)\n\n .enumerate()\n\n .filter_map(|(index, lock)| -> Option<Result<StakeCell, _>> {\n\n let args = match extract_args_from_lock::<StakeLockArgs>(\n\n &lock,\n\n rollup_type_hash,\n\n &config.stake_script_type_hash(),\n\n ) {\n\n Some(Ok(args)) => args,\n\n Some(Err(err)) => return Some(Err(err)),\n\n None => return None,\n\n };\n\n let value = match fetch_capacity_and_sudt_value(config, index, source) {\n\n Ok(value) => value,\n\n Err(err) => return Some(Err(err)),\n", "file_path": "contracts/gw-utils/src/cells/lock_cells.rs", "rank": 16, "score": 154638.42028761393 }, { "content": "/// this function ensure transaction doesn't contains any deposit / withdrawal / custodian / stake cells\n\npub fn check_rollup_lock_cells(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n) -> Result<(), Error> {\n\n check_rollup_lock_cells_except_stake(rollup_type_hash, config)?;\n\n if !collect_stake_cells(rollup_type_hash, config, Source::Input)?.is_empty() {\n\n debug!(\"unexpected input stake cell\");\n\n return Err(Error::InvalidStakeCell);\n\n }\n\n if !collect_stake_cells(rollup_type_hash, config, Source::Output)?.is_empty() {\n\n debug!(\"unexpected output stake cell\");\n\n return Err(Error::InvalidStakeCell);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/state-validator/src/verifications/mod.rs", "rank": 17, "score": 154638.42028761393 }, { "content": "/// Verify tx execution\n\npub fn verify_tx_execution(\n\n rollup_config: &RollupConfig,\n\n lock_args: &ChallengeLockArgs,\n\n) -> Result<(), Error> {\n\n let mut buf = [0u8; MAX_ROLLUP_WITNESS_SIZE];\n\n let loaded_len = load_witness(&mut buf, 0, 0, Source::GroupInput)?;\n\n debug!(\"verity tx execution witness, loaded len: {}\", loaded_len);\n\n\n\n let witness_args: BytesReader = {\n\n let reader = WitnessArgsReader::from_slice(&buf[..loaded_len]).map_err(|_err| {\n\n debug!(\"witness is not a valid WitnessArgsReader\");\n\n Error::Encoding\n\n })?;\n\n\n\n reader.lock().to_opt().ok_or(Error::InvalidArgs)?\n\n };\n\n\n\n let unlock_args = match CCTransactionWitnessReader::verify(witness_args.raw_data(), false) {\n\n Ok(_) => CCTransactionWitnessReader::new_unchecked(witness_args.raw_data()),\n\n Err(_) => return Err(Error::InvalidArgs),\n", "file_path": "contracts/challenge-lock/src/verifications/tx_execution.rs", "rank": 18, "score": 154638.42028761393 }, { "content": "pub fn build_l2_sudt_script(\n\n rollup_script_hash: &H256,\n\n config: &RollupConfig,\n\n l1_sudt_script_hash: &H256,\n\n) -> Script {\n\n let args = {\n\n let mut args = Vec::with_capacity(64);\n\n args.extend(rollup_script_hash.as_slice());\n\n args.extend(l1_sudt_script_hash.as_slice());\n\n Bytes::from(args)\n\n };\n\n Script::new_builder()\n\n .args(args.pack())\n\n .code_hash(config.l2_sudt_validator_script_type_hash())\n\n .hash_type(ScriptHashType::Type.into())\n\n .build()\n\n}\n", "file_path": "contracts/gw-utils/src/cells/utils.rs", "rank": 19, "score": 154638.42028761393 }, { "content": "pub fn build_simple_tx_with_out_point(\n\n data_loader: &mut DummyDataLoader,\n\n input_cell: (CellOutput, Bytes),\n\n input_out_point: OutPoint,\n\n output_cell: (CellOutput, Bytes),\n\n) -> TransactionView {\n\n build_simple_tx_with_out_point_and_since(\n\n data_loader,\n\n input_cell,\n\n (input_out_point, Default::default()),\n\n output_cell,\n\n )\n\n}\n\n\n", "file_path": "tests/src/script_tests/utils/layer1.rs", "rank": 20, "score": 154638.42028761393 }, { "content": "pub fn collect_custodian_locks(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n source: Source,\n\n) -> Result<Vec<CustodianCell>, Error> {\n\n QueryIter::new(load_cell_lock, source)\n\n .enumerate()\n\n .filter_map(|(index, lock)| {\n\n let args = match extract_args_from_lock(\n\n &lock,\n\n rollup_type_hash,\n\n &config.custodian_script_type_hash(),\n\n ) {\n\n Some(Ok(args)) => args,\n\n Some(Err(err)) => return Some(Err(err)),\n\n None => return None,\n\n };\n\n let value = match fetch_capacity_and_sudt_value(config, index, source) {\n\n Ok(value) => value,\n\n Err(err) => return Some(Err(err)),\n\n };\n\n let cell = CustodianCell { index, args, value };\n\n Some(Ok(cell))\n\n })\n\n .collect::<Result<_, Error>>()\n\n}\n\n\n", "file_path": "contracts/gw-utils/src/cells/lock_cells.rs", "rank": 21, "score": 154638.42028761393 }, { "content": "pub fn build_always_success_cell(\n\n capacity: u64,\n\n type_: Option<ckb_types::packed::Script>,\n\n) -> ckb_types::packed::CellOutput {\n\n CellOutput::new_builder()\n\n .lock(always_success_script())\n\n .type_(CKBPack::pack(&type_))\n\n .capacity(CKBPack::pack(&capacity))\n\n .build()\n\n}\n\n\n", "file_path": "tests/src/script_tests/utils/rollup.rs", "rank": 22, "score": 154638.42028761393 }, { "content": "pub fn collect_withdrawal_locks(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n source: Source,\n\n) -> Result<Vec<WithdrawalCell>, Error> {\n\n QueryIter::new(load_cell_lock, source)\n\n .enumerate()\n\n .filter_map(|(index, lock)| {\n\n let lock_args: Bytes = lock.args().unpack();\n\n let is_withdrawal_lock = lock_args.len() > 32\n\n && &lock_args[..32] == rollup_type_hash.as_slice()\n\n && lock.code_hash().as_slice() == config.withdrawal_script_type_hash().as_slice()\n\n && lock.hash_type() == ScriptHashType::Type.into();\n\n if !is_withdrawal_lock {\n\n return None;\n\n }\n\n let args = match crate::withdrawal::parse_lock_args(&lock_args) {\n\n Ok(r) => r.lock_args,\n\n Err(_) => {\n\n debug!(\"Fail to parsing withdrawal lock args\");\n", "file_path": "contracts/gw-utils/src/cells/lock_cells.rs", "rank": 23, "score": 154638.42028761393 }, { "content": "pub fn get_receiver_cells_capacity(\n\n config: &RollupConfig,\n\n lock_hash: &[u8; 32],\n\n source: Source,\n\n) -> Result<u128, Error> {\n\n let capacity = search_lock_hashes(lock_hash, source)\n\n .into_iter()\n\n .map(|index| {\n\n fetch_capacity_and_sudt_value(config, index, source).map(|value| value.capacity.into())\n\n })\n\n .collect::<Result<Vec<u128>, Error>>()?\n\n .into_iter()\n\n .sum();\n\n Ok(capacity)\n\n}\n\n\n", "file_path": "contracts/state-validator/src/verifications/revert.rs", "rank": 24, "score": 154638.42028761393 }, { "content": "pub fn build_rollup_locked_cell(\n\n rollup_type_script_hash: &[u8; 32],\n\n script_type_hash: &[u8; 32],\n\n capacity: u64,\n\n lock_args: Bytes,\n\n) -> ckb_types::packed::CellOutput {\n\n let lock = {\n\n let mut args = Vec::new();\n\n args.extend_from_slice(rollup_type_script_hash);\n\n args.extend_from_slice(&lock_args);\n\n ckb_types::packed::Script::new_builder()\n\n .code_hash(CKBPack::pack(script_type_hash))\n\n .hash_type(ScriptHashType::Type.into())\n\n .args(CKBPack::pack(&Bytes::from(args)))\n\n .build()\n\n };\n\n CellOutput::new_builder()\n\n .lock(lock)\n\n .capacity(CKBPack::pack(&capacity))\n\n .build()\n\n}\n\n\n", "file_path": "tests/src/script_tests/utils/rollup.rs", "rank": 25, "score": 154638.42028761393 }, { "content": "/// Check l2 account signature cell\n\npub fn check_l2_account_signature_cell(\n\n script_hash: &H256,\n\n expected_signing_type: SigningType,\n\n message: H256,\n\n) -> Result<(), Error> {\n\n debug!(\"Check l2 account signature for message {:?}\", message);\n\n // search layer2 account lock cell from inputs\n\n for index in search_lock_hashes(&(*script_hash).into(), Source::Input) {\n\n // expected data is equals to onetime_lock_hash(32 bytes) | sign type (1 byte) | message(32 bytes)\n\n let mut data = [0u8; 33];\n\n let len = load_cell_data(&mut data, 32, index, Source::Input)?;\n\n\n\n // skip if the data isn't 32 length\n\n if len != data.len() {\n\n continue;\n\n }\n\n\n\n let signing_type = match SigningType::try_from(data[0]) {\n\n Ok(type_) => type_,\n\n Err(_err) => continue,\n", "file_path": "contracts/gw-utils/src/signature.rs", "rank": 26, "score": 154638.42028761393 }, { "content": "pub fn check_transfer_logs(\n\n logs: &[LogItem],\n\n sudt_id: u32,\n\n block_producer_addr: &RegistryAddress,\n\n fee: u128,\n\n from_addr: &RegistryAddress,\n\n to_addr: &RegistryAddress,\n\n amount: U256,\n\n) {\n\n // pay fee log\n\n let sudt_fee_log = SudtLog::from_log_item(&logs[0]).unwrap();\n\n assert_eq!(sudt_fee_log.sudt_id, CKB_SUDT_ACCOUNT_ID);\n\n assert_eq!(&sudt_fee_log.from_addr, from_addr,);\n\n assert_eq!(&sudt_fee_log.to_addr, block_producer_addr);\n\n assert_eq!(sudt_fee_log.amount, fee.into());\n\n assert_eq!(sudt_fee_log.log_type, SudtLogType::PayFee);\n\n // transfer to `to_id`\n\n let sudt_transfer_log = SudtLog::from_log_item(&logs[1]).unwrap();\n\n assert_eq!(sudt_transfer_log.sudt_id, sudt_id);\n\n assert_eq!(&sudt_transfer_log.from_addr, from_addr);\n\n assert_eq!(&sudt_transfer_log.to_addr, to_addr);\n\n assert_eq!(sudt_transfer_log.amount, amount.into());\n\n assert_eq!(sudt_transfer_log.log_type, SudtLogType::Transfer);\n\n}\n\n\n", "file_path": "tests/src/script_tests/l2_scripts/mod.rs", "rank": 27, "score": 154638.42028761393 }, { "content": "pub fn find_challenge_cell(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n source: Source,\n\n) -> Result<Option<ChallengeCell>, Error> {\n\n let iter = QueryIter::new(load_cell_lock, source)\n\n .enumerate()\n\n .filter_map(|(index, lock)| {\n\n let args = match extract_args_from_lock(\n\n &lock,\n\n rollup_type_hash,\n\n &config.challenge_script_type_hash(),\n\n ) {\n\n Some(Ok(args)) => args,\n\n Some(Err(err)) => return Some(Err(err)),\n\n None => return None,\n\n };\n\n let value = match fetch_capacity_and_sudt_value(config, index, source) {\n\n Ok(value) => value,\n\n Err(err) => {\n", "file_path": "contracts/gw-utils/src/cells/lock_cells.rs", "rank": 28, "score": 154638.42028761393 }, { "content": "/// Verify tx signature\n\npub fn verify_tx_signature(\n\n _rollup_script_hash: &[u8; 32],\n\n rollup_config: &RollupConfig,\n\n lock_args: &ChallengeLockArgs,\n\n) -> Result<(), Error> {\n\n let witness_args: Bytes = load_witness_args(0, Source::GroupInput)?\n\n .lock()\n\n .to_opt()\n\n .ok_or(Error::InvalidArgs)?\n\n .unpack();\n\n let unlock_args = match CCTransactionSignatureWitnessReader::verify(&witness_args, false) {\n\n Ok(_) => CCTransactionSignatureWitness::new_unchecked(witness_args),\n\n Err(_) => return Err(Error::InvalidArgs),\n\n };\n\n let tx = unlock_args.l2tx();\n\n\n\n // check rollup chain id\n\n let expected_rollup_chain_id: u64 = rollup_config.chain_id().unpack();\n\n let chain_id: u64 = tx.raw().chain_id().unpack();\n\n if expected_rollup_chain_id != chain_id {\n", "file_path": "contracts/challenge-lock/src/verifications/tx_signature.rs", "rank": 29, "score": 154638.42028761393 }, { "content": "pub fn collect_deposit_locks(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n source: Source,\n\n) -> Result<Vec<DepositRequestCell>, Error> {\n\n QueryIter::new(load_cell_lock, source)\n\n .enumerate()\n\n .filter_map(|(index, lock)| {\n\n let args: DepositLockArgs = match extract_args_from_lock(\n\n &lock,\n\n rollup_type_hash,\n\n &config.deposit_script_type_hash(),\n\n ) {\n\n Some(Ok(args)) => args,\n\n Some(Err(err)) => return Some(Err(err)),\n\n None => return None,\n\n };\n\n let value = match fetch_capacity_and_sudt_value(config, index, source) {\n\n Ok(value) => value,\n\n Err(err) => return Some(Err(err)),\n", "file_path": "contracts/gw-utils/src/cells/lock_cells.rs", "rank": 30, "score": 154638.42028761393 }, { "content": "pub fn recover_uncompressed_key(message: [u8; 32], signature: [u8; 65]) -> Result<[u8; 65], i32> {\n\n let mut pubkey = [0u8; 65];\n\n let ret = unsafe {\n\n recover_secp256k1_uncompressed_key(\n\n message.as_ptr(),\n\n signature.as_ptr(),\n\n pubkey.as_mut_ptr(),\n\n )\n\n };\n\n if ret == 0 {\n\n Ok(pubkey)\n\n } else {\n\n Err(ret)\n\n }\n\n}\n", "file_path": "contracts/secp256k1-utils/src/secp256k1_utils.rs", "rank": 31, "score": 154184.18758301076 }, { "content": "/// fetch capacity and SUDT value of a cell\n\npub fn fetch_capacity_and_sudt_value(\n\n config: &RollupConfig,\n\n index: usize,\n\n source: Source,\n\n) -> Result<CellValue, Error> {\n\n let capacity = load_cell_capacity(index, source)?;\n\n let value = match fetch_sudt_script_hash(config, index, source)? {\n\n Some(sudt_script_hash) => {\n\n let data = load_cell_data(index, source)?;\n\n let mut buf = [0u8; 16];\n\n buf.copy_from_slice(&data[..16]);\n\n let amount = u128::from_le_bytes(buf);\n\n CellValue {\n\n sudt_script_hash: sudt_script_hash.into(),\n\n amount,\n\n capacity,\n\n }\n\n }\n\n None => CellValue {\n\n sudt_script_hash: H256::zero(),\n\n amount: 0,\n\n capacity,\n\n },\n\n };\n\n Ok(value)\n\n}\n\n\n", "file_path": "contracts/gw-utils/src/cells/lock_cells.rs", "rank": 32, "score": 152206.04958501676 }, { "content": "pub fn build_simple_tx_with_out_point_and_since(\n\n data_loader: &mut DummyDataLoader,\n\n input_cell: (CellOutput, Bytes),\n\n input_out_point_since: (OutPoint, Uint64),\n\n output_cell: (CellOutput, Bytes),\n\n) -> TransactionView {\n\n let (out_point, since) = input_out_point_since;\n\n data_loader.cells.insert(out_point.clone(), input_cell);\n\n\n\n let input = CellInput::new_builder()\n\n .previous_output(out_point)\n\n .since(since)\n\n .build();\n\n let (output_cell, output_data) = output_cell;\n\n\n\n Transaction::default()\n\n .as_advanced_builder()\n\n .input(input)\n\n .output(output_cell)\n\n .output_data(output_data.pack())\n\n .build()\n\n}\n\n\n", "file_path": "tests/src/script_tests/utils/layer1.rs", "rank": 33, "score": 152206.04958501676 }, { "content": "pub fn fetch_token_amount_by_lock_hash(\n\n owner_lock_hash: &[u8; 32],\n\n token_type: &TokenType,\n\n source: Source,\n\n) -> Result<CellTokenAmount, Error> {\n\n let mut total_token_amount = 0u128;\n\n let mut total_capacity = 0u128;\n\n for (i, lock_hash) in QueryIter::new(load_cell_lock_hash, source)\n\n .into_iter()\n\n .enumerate()\n\n {\n\n if &lock_hash != owner_lock_hash {\n\n continue;\n\n }\n\n\n\n let capacity = load_cell_capacity(i, source)?;\n\n total_capacity = total_capacity\n\n .checked_add(capacity as u128)\n\n .ok_or(Error::AmountOverflow)?;\n\n let amount = match load_cell_type_hash(i, source)? {\n", "file_path": "contracts/gw-utils/src/cells/token.rs", "rank": 34, "score": 152206.04958501676 }, { "content": "// Verify reverted_block_root\n\npub fn verify_reverted_block_hashes(\n\n reverted_block_hashes: Vec<H256>,\n\n reverted_block_proof: Bytes,\n\n prev_global_state: &GlobalState,\n\n) -> Result<(), Error> {\n\n let reverted_block_root = prev_global_state.reverted_block_root().unpack();\n\n if reverted_block_hashes.is_empty() && reverted_block_proof.is_empty() {\n\n return Ok(());\n\n }\n\n let mut buf = [Pair::default(); 256];\n\n let mut block_tree = Tree::new(&mut buf);\n\n for key in reverted_block_hashes {\n\n block_tree\n\n .update(&key.into(), &H256::one().into())\n\n .map_err(|err| {\n\n debug!(\"[verify reverted block] update kv error: {}\", err);\n\n Error::MerkleProof\n\n })?;\n\n }\n\n block_tree\n\n .verify(&reverted_block_root, &reverted_block_proof)\n\n .map_err(|err| {\n\n debug!(\"[verify reverted block] merkle verify error: {}\", err);\n\n Error::MerkleProof\n\n })?;\n\n Ok(())\n\n}\n", "file_path": "contracts/state-validator/src/verifications/submit_block.rs", "rank": 35, "score": 152206.04958501676 }, { "content": "pub fn random_out_point() -> OutPoint {\n\n let mut tx_hash = [0u8; 32];\n\n let mut rng = thread_rng();\n\n rng.fill(&mut tx_hash);\n\n OutPoint::new_builder()\n\n .tx_hash(tx_hash.pack())\n\n .index(0u32.pack())\n\n .build()\n\n}\n\n\n", "file_path": "tests/src/script_tests/utils/layer1.rs", "rank": 36, "score": 150872.93524264928 }, { "content": "/// this function ensure transaction doesn't contains any deposit / withdrawal / custodian\n\npub fn check_rollup_lock_cells_except_stake(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n) -> Result<(), Error> {\n\n if !collect_deposit_locks(rollup_type_hash, config, Source::Input)?.is_empty() {\n\n return Err(Error::InvalidDepositCell);\n\n }\n\n if !collect_deposit_locks(rollup_type_hash, config, Source::Output)?.is_empty() {\n\n return Err(Error::InvalidDepositCell);\n\n }\n\n if !collect_withdrawal_locks(rollup_type_hash, config, Source::Input)?.is_empty() {\n\n return Err(Error::InvalidWithdrawalCell);\n\n }\n\n if !collect_withdrawal_locks(rollup_type_hash, config, Source::Output)?.is_empty() {\n\n return Err(Error::InvalidWithdrawalCell);\n\n }\n\n if !collect_custodian_locks(rollup_type_hash, config, Source::Input)?.is_empty() {\n\n return Err(Error::InvalidCustodianCell);\n\n }\n\n if !collect_custodian_locks(rollup_type_hash, config, Source::Output)?.is_empty() {\n\n return Err(Error::InvalidCustodianCell);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/state-validator/src/verifications/mod.rs", "rank": 37, "score": 149890.52426497772 }, { "content": "/// Find block producer's stake cell\n\n/// this function return Option<StakeCell> if we have 1 or zero stake cell,\n\n/// otherwise return an error.\n\npub fn find_block_producer_stake_cell(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n source: Source,\n\n owner_lock_hash: &Byte32Reader,\n\n) -> Result<Option<StakeCell>, Error> {\n\n let mut cells = collect_stake_cells(rollup_type_hash, config, source)?;\n\n // return an error if more than one stake cell returned\n\n if cells.len() > 1 {\n\n debug!(\n\n \"expected no more than 1 stake cell from {:?}, found {}\",\n\n source,\n\n cells.len()\n\n );\n\n return Err(Error::InvalidStakeCell);\n\n }\n\n if cells\n\n .iter()\n\n .any(|cell| cell.args.owner_lock_hash().as_slice() != owner_lock_hash.as_slice())\n\n {\n\n debug!(\"found stake cell with unexpected owner_lock_hash\");\n\n return Err(Error::InvalidStakeCell);\n\n }\n\n Ok(cells.pop())\n\n}\n\n\n", "file_path": "contracts/gw-utils/src/cells/lock_cells.rs", "rank": 38, "score": 149890.52426497772 }, { "content": "pub fn main() -> Result<(), Error> {\n\n let script = load_script()?;\n\n let ParsedLockArgs {\n\n rollup_type_hash,\n\n lock_args,\n\n owner_lock_hash,\n\n } = parse_lock_args(&script)?;\n\n\n\n // load unlock arguments from witness\n\n let witness_args = load_witness_args(0, Source::GroupInput)?;\n\n let unlock_args = {\n\n let unlock_args: Bytes = witness_args\n\n .lock()\n\n .to_opt()\n\n .ok_or(Error::InvalidArgs)?\n\n .unpack();\n\n match UnlockWithdrawalWitnessReader::verify(&unlock_args, false) {\n\n Ok(()) => UnlockWithdrawalWitness::new_unchecked(unlock_args),\n\n Err(_) => return Err(Error::ProofNotFound),\n\n }\n", "file_path": "contracts/withdrawal-lock/src/entry.rs", "rank": 39, "score": 148530.81010015335 }, { "content": "/// args:\n\n/// * rollup_script_hash | ChallengeLockArgs\n\n///\n\n/// unlock paths:\n\n/// * challenge success\n\n/// * after CHALLENGE_MATURITY_BLOCKS, the submitter can resume rollup to running status and revert the invalid rollup states\n\n/// * cancel challenge\n\n/// * during the rollup halting, anyone can submit context to run verification on-chain and cancel this challenge\n\n/// * the cancel-challenge tx must contains a verifier cell in the inputs which cell's lock script equals to the account.script\n\n/// * the lock script of verifier cell reads the context from tx.witnesses and run verification\n\npub fn main() -> Result<(), Error> {\n\n let (rollup_script_hash, lock_args) = parse_lock_args()?;\n\n\n\n // check rollup cell\n\n let mut rollup_action_witness = [0u8; MAX_ROLLUP_WITNESS_SIZE];\n\n let index =\n\n search_rollup_cell(&rollup_script_hash, Source::Output).ok_or(Error::RollupCellNotFound)?;\n\n let action = parse_rollup_action(&mut rollup_action_witness, index, Source::Output)?;\n\n match action.to_enum() {\n\n RollupActionUnionReader::RollupEnterChallenge(_)\n\n | RollupActionUnionReader::RollupRevert(_) => {\n\n // state-validator will do the verification\n\n return Ok(());\n\n }\n\n RollupActionUnionReader::RollupCancelChallenge(_) => {}\n\n _ => {\n\n debug!(\"unsupport action {:?}\", action.to_enum());\n\n return Err(Error::InvalidArgs);\n\n }\n\n }\n", "file_path": "contracts/challenge-lock/src/entry.rs", "rank": 40, "score": 148530.81010015335 }, { "content": "// always return success\n\npub fn main() -> Result<(), Error> {\n\n Ok(())\n\n}\n", "file_path": "contracts/always-success/src/entry.rs", "rank": 41, "score": 148530.81010015335 }, { "content": "pub fn main() -> Result<(), Error> {\n\n let (rollup_type_hash, lock_args) = parse_lock_args()?;\n\n\n\n // Unlock by User\n\n // read global state from rollup cell in deps\n\n if let Some(global_state) = search_rollup_state(&rollup_type_hash, Source::CellDep)? {\n\n let stake_block_number: u64 = lock_args.stake_block_number().unpack();\n\n let last_finalized_block_number: u64 = global_state.last_finalized_block_number().unpack();\n\n\n\n // 1. check if stake_block_number is finalized\n\n // 2. check if owner_lock_hash exists in input cells\n\n if stake_block_number <= last_finalized_block_number\n\n && search_lock_hash(&lock_args.owner_lock_hash().unpack(), Source::Input).is_some()\n\n {\n\n return Ok(());\n\n }\n\n }\n\n\n\n // Unlock by Rollup cell\n\n // check if rollup cell exists in the inputs, the following verification will be handled\n\n // by rollup state validator.\n\n if search_rollup_cell(&rollup_type_hash, Source::Input).is_some() {\n\n return Ok(());\n\n }\n\n\n\n Err(Error::InvalidStakeCellUnlock)\n\n}\n", "file_path": "contracts/stake-lock/src/entry.rs", "rank": 42, "score": 148530.81010015335 }, { "content": "pub fn main() -> Result<(), Error> {\n\n let (rollup_type_hash, lock_args) = parse_lock_args()?;\n\n\n\n // read global state from rollup cell\n\n let global_state = match search_rollup_state(&rollup_type_hash, Source::Input)? {\n\n Some(state) => state,\n\n None => return Err(Error::RollupCellNotFound),\n\n };\n\n\n\n let deposit_block_number: u64 = lock_args.deposit_block_number().unpack();\n\n let last_finalized_block_number: u64 = global_state.last_finalized_block_number().unpack();\n\n\n\n if deposit_block_number <= last_finalized_block_number {\n\n // this custodian lock is already finalized, rollup will handle the logic\n\n return Ok(());\n\n }\n\n\n\n // otherwise, the submitter try to prove the deposit is reverted.\n\n let config = load_rollup_config(&global_state.rollup_config_hash().unpack())?;\n\n\n", "file_path": "contracts/custodian-lock/src/entry.rs", "rank": 43, "score": 148530.81010015335 }, { "content": "// We have two unlock paths\n\n// 1. unlock by Rollup cell\n\n// 2. unlock by user after timeout\n\n//\n\n// We always try the 1 first, then try 2, otherwise the unlock return a failure.\n\npub fn main() -> Result<(), Error> {\n\n let (rollup_type_hash, lock_args) = parse_lock_args()?;\n\n // try unlock by Rollup\n\n // return success if rollup cell in the inputs, the following verification will be handled by rollup state validator.\n\n if search_rollup_cell(&rollup_type_hash, Source::Input).is_some() {\n\n return Ok(());\n\n }\n\n\n\n // unlock by user\n\n // 1. check since is satisfied the cancel timeout\n\n let input_since = Since::new(load_input_since(0, Source::GroupInput)?);\n\n let cancel_timeout = Since::new(lock_args.cancel_timeout().unpack());\n\n if input_since.flags() != cancel_timeout.flags()\n\n || input_since.as_u64() < cancel_timeout.as_u64()\n\n {\n\n return Err(Error::InvalidSince);\n\n }\n\n // 2. search owner cell\n\n match search_lock_hash(&lock_args.owner_lock_hash().unpack(), Source::Input) {\n\n Some(_) => Ok(()),\n\n None => Err(Error::OwnerCellNotFound),\n\n }\n\n}\n", "file_path": "contracts/deposit-lock/src/entry.rs", "rank": 44, "score": 148530.81010015335 }, { "content": "pub fn main() -> Result<(), Error> {\n\n // check type_id\n\n {\n\n let script = load_script()?;\n\n let args: Bytes = CKBUnpack::unpack(&script.args());\n\n if args.len() < TYPE_ID_SIZE {\n\n return Err(Error::InvalidTypeID);\n\n }\n\n let mut type_id = [0u8; TYPE_ID_SIZE];\n\n type_id.copy_from_slice(&args[..TYPE_ID_SIZE]);\n\n check_type_id(type_id)?;\n\n }\n\n // return success if we are in the initialization\n\n if check_initialization()? {\n\n return Ok(());\n\n }\n\n // basic verification\n\n let prev_global_state = parse_global_state(Source::GroupInput)?;\n\n let post_global_state = parse_global_state(Source::GroupOutput)?;\n\n let rollup_config = load_rollup_config(&prev_global_state.rollup_config_hash().unpack())?;\n", "file_path": "contracts/state-validator/src/entry.rs", "rank": 45, "score": 148530.81010015335 }, { "content": "pub fn always_success_script() -> Script {\n\n Script::new_builder()\n\n .code_hash(ALWAYS_SUCCESS_CODE_HASH.pack())\n\n .hash_type(ScriptHashType::Data.into())\n\n .build()\n\n}\n\n\n", "file_path": "tests/src/script_tests/utils/layer1.rs", "rank": 46, "score": 148314.64590363446 }, { "content": "/// Eth account lock\n\n/// script args: rollup_script_hash(32 bytes) | eth_address(20 bytes)\n\n/// data: onetime_owner_lock_hash(32 bytes) | signing type (1 byte) | message(32 bytes)\n\npub fn main() -> Result<(), Error> {\n\n // parse args\n\n let script = load_script()?;\n\n let args: Bytes = CKBUnpack::unpack(&script.args());\n\n let (_rollup_script_hash, eth_address) = extract_eth_lock_args(args)?;\n\n debug!(\"eth_address {:?}\", &eth_address);\n\n\n\n // parse data\n\n let (onetime_owner_lock_hash, signing_type, message) = parse_data()?;\n\n\n\n // check owner lock hash cell\n\n // to prevent others unlock this cell\n\n if search_lock_hash(&onetime_owner_lock_hash, Source::Input).is_none() {\n\n return Err(Error::OwnerCellNotFound);\n\n }\n\n\n\n // verify signature\n\n debug!(\"Verify message signature {:?}\", &message);\n\n verify_message_signature(eth_address, signing_type, message)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/eth-account-lock/src/entry.rs", "rank": 47, "score": 145840.18820639435 }, { "content": "/// Eth account lock\n\n/// script args: rollup_script_hash(32 bytes) | tron_address(20 bytes)\n\n/// data: onetime_owner_lock_hash(32 bytes) | signing type (1 byte) | message(32 bytes)\n\npub fn main() -> Result<(), Error> {\n\n // parse args\n\n let script = load_script()?;\n\n let args: Bytes = CKBUnpack::unpack(&script.args());\n\n let (_rollup_script_hash, tron_address) = extract_lock_args(args)?;\n\n debug!(\"tron_address {:?}\", &tron_address);\n\n\n\n // parse data\n\n let (onetime_owner_lock_hash, signing_type, message) = parse_data()?;\n\n\n\n // check owner lock hash cell\n\n // to prevent others unlock this cell\n\n if search_lock_hash(&onetime_owner_lock_hash, Source::Input).is_none() {\n\n return Err(Error::OwnerCellNotFound);\n\n }\n\n\n\n // verify signature\n\n debug!(\"Verify message signature {:?}\", &message);\n\n verify_message_signature(tron_address, signing_type, message)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/tron-account-lock/src/entry.rs", "rank": 48, "score": 145840.18820639435 }, { "content": "pub fn search_rollup_cell(rollup_type_hash: &[u8; 32], source: Source) -> Option<usize> {\n\n QueryIter::new(load_cell_type_hash, source)\n\n .position(|type_hash| type_hash.as_ref() == Some(rollup_type_hash))\n\n}\n\n\n", "file_path": "contracts/gw-utils/src/cells/rollup.rs", "rank": 49, "score": 143671.35523857584 }, { "content": "pub fn run_contract<S: State + CodeStore>(\n\n rollup_config: &RollupConfig,\n\n tree: &mut S,\n\n from_id: u32,\n\n to_id: u32,\n\n args: Bytes,\n\n block_info: &BlockInfo,\n\n) -> Result<Vec<u8>, TransactionError> {\n\n let run_result =\n\n run_contract_get_result(rollup_config, tree, from_id, to_id, args, block_info)?;\n\n Ok(run_result.return_data)\n\n}\n", "file_path": "tests/src/script_tests/l2_scripts/mod.rs", "rank": 50, "score": 132217.6933746363 }, { "content": "pub fn build_backend_manage(rollup_config: &RollupConfig) -> BackendManage {\n\n let sudt_validator_script_type_hash: [u8; 32] =\n\n rollup_config.l2_sudt_validator_script_type_hash().unpack();\n\n let configs = vec![\n\n BackendConfig {\n\n validator_path: META_VALIDATOR_PATH.into(),\n\n generator_path: META_GENERATOR_PATH.into(),\n\n validator_script_type_hash: META_VALIDATOR_SCRIPT_TYPE_HASH.into(),\n\n backend_type: BackendType::Meta,\n\n },\n\n BackendConfig {\n\n validator_path: SUDT_VALIDATOR_PATH.into(),\n\n generator_path: SUDT_GENERATOR_PATH.into(),\n\n validator_script_type_hash: sudt_validator_script_type_hash.into(),\n\n backend_type: BackendType::Sudt,\n\n },\n\n ];\n\n BackendManage::from_config(configs).expect(\"default backend\")\n\n}\n\n\n", "file_path": "tests/src/testing_tool/chain.rs", "rank": 51, "score": 130111.94658830433 }, { "content": " pub trait ToCKBType<T> {\n\n fn to_ckb(&self) -> T;\n\n }\n\n\n\n macro_rules! impl_to_ckb {\n\n ($type_:tt) => {\n\n impl ToCKBType<$type_> for super::$type_ {\n\n fn to_ckb(&self) -> $type_ {\n\n $type_::new_unchecked(self.as_bytes())\n\n }\n\n }\n\n };\n\n }\n\n impl_to_ckb!(Script);\n\n impl_to_ckb!(CellInput);\n\n impl_to_ckb!(CellOutput);\n\n impl_to_ckb!(WitnessArgs);\n\n impl_to_ckb!(CellDep);\n\n\n\n impl ToCKBType<Bytes> for super::Bytes {\n\n fn to_ckb(&self) -> Bytes {\n\n self.pack()\n\n }\n\n }\n\n\n", "file_path": "tests/src/script_tests/withdrawal.rs", "rank": 52, "score": 129957.15915906394 }, { "content": " pub trait ToGWType<T> {\n\n fn to_gw(&self) -> T;\n\n }\n\n\n\n macro_rules! impl_to_gw {\n\n ($type_:tt) => {\n\n impl ToGWType<super::$type_> for $type_ {\n\n fn to_gw(&self) -> super::$type_ {\n\n super::$type_::new_unchecked(self.as_bytes())\n\n }\n\n }\n\n };\n\n }\n\n\n\n impl_to_gw!(OutPoint);\n\n impl_to_gw!(CellOutput);\n\n impl_to_gw!(Script);\n\n}\n\n\n\nuse conversion::{ToCKBType, ToGWType};\n", "file_path": "tests/src/script_tests/withdrawal.rs", "rank": 53, "score": 129957.15915906394 }, { "content": "pub fn run_contract_get_result<S: State + CodeStore>(\n\n rollup_config: &RollupConfig,\n\n tree: &mut S,\n\n from_id: u32,\n\n to_id: u32,\n\n args: Bytes,\n\n block_info: &BlockInfo,\n\n) -> Result<RunResult, TransactionError> {\n\n let raw_tx = RawL2Transaction::new_builder()\n\n .from_id(from_id.pack())\n\n .to_id(to_id.pack())\n\n .args(args.pack())\n\n .build();\n\n let backend_manage = build_backend_manage(rollup_config);\n\n let account_lock_manage = AccountLockManage::default();\n\n let rollup_ctx = RollupContext {\n\n rollup_config: rollup_config.clone(),\n\n rollup_script_hash: [42u8; 32].into(),\n\n };\n\n let generator = Generator::new(backend_manage, account_lock_manage, rollup_ctx);\n", "file_path": "tests/src/script_tests/l2_scripts/mod.rs", "rank": 54, "score": 128100.55596473657 }, { "content": "pub fn sha3_pubkey_hash(pubkey: &Pubkey) -> Bytes {\n\n let mut hasher = Keccak256::new();\n\n hasher.update(&pubkey.as_bytes());\n\n let buf = hasher.finalize();\n\n buf[12..].to_vec().into()\n\n}\n\n\n", "file_path": "tests/src/script_tests/account_lock_scripts/tron_account_lock.rs", "rank": 55, "score": 126177.31848757253 }, { "content": "pub fn sha3_pubkey_hash(pubkey: &Pubkey) -> Bytes {\n\n let mut hasher = Keccak256::new();\n\n hasher.update(&pubkey.as_bytes());\n\n let buf = hasher.finalize();\n\n buf[12..].to_vec().into()\n\n}\n\n\n", "file_path": "tests/src/script_tests/account_lock_scripts/eth_account_lock.rs", "rank": 56, "score": 126177.31848757253 }, { "content": "fn random_type_id_script() -> ckb_types::packed::Script {\n\n let random_bytes: [u8; 32] = rand::random();\n\n build_type_id_script(&random_bytes)\n\n}\n\n\n", "file_path": "tests/src/script_tests/utils/rollup.rs", "rank": 57, "score": 125435.43512272119 }, { "content": "pub fn parse_global_state(source: Source) -> Result<GlobalState, Error> {\n\n let data = load_cell_data(0, source)?;\n\n match GlobalStateReader::verify(&data, false) {\n\n Ok(_) => Ok(GlobalState::new_unchecked(data.into())),\n\n Err(_) if GlobalStateV0Reader::verify(&data, false).is_ok() => {\n\n let global_state_v0 = GlobalStateV0::new_unchecked(data.into());\n\n Ok(GlobalState::from(global_state_v0))\n\n }\n\n Err(_) => {\n\n debug!(\"Fail to parsing global state\");\n\n Err(Error::Encoding)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/state-validator/src/entry.rs", "rank": 58, "score": 123821.3521343412 }, { "content": "fn has_type_id_cell(index: usize, source: Source) -> bool {\n\n let mut buf = [0u8; 0];\n\n match load_cell(&mut buf, 0, index, source) {\n\n Ok(_) => true,\n\n Err(SysError::LengthNotEnough(..)) => true,\n\n _ => false,\n\n }\n\n}\n", "file_path": "contracts/gw-utils/src/type_id.rs", "rank": 59, "score": 121747.9786727048 }, { "content": "/// used in filter_map\n\nfn extract_args_from_lock<ArgsType: Entity>(\n\n lock: &crate::ckb_std::ckb_types::packed::Script,\n\n rollup_type_hash: &H256,\n\n lock_script_type_hash: &Byte32,\n\n) -> Option<Result<ArgsType, Error>> {\n\n let lock_args: Bytes = lock.args().unpack();\n\n let is_lock = lock_args.len() > 32\n\n && &lock_args[..32] == rollup_type_hash.as_slice()\n\n && lock.code_hash().as_slice() == lock_script_type_hash.as_slice()\n\n && lock.hash_type() == ScriptHashType::Type.into();\n\n\n\n // return none to skip this cell\n\n if !is_lock {\n\n return None;\n\n }\n\n\n\n // parse the remaining lock_args\n\n let raw_args = lock_args[32..].to_vec();\n\n Some(ArgsType::from_slice(&raw_args).map_err(|_err| {\n\n debug!(\"Fail to extract args, lock args parsing err\");\n\n Error::Encoding\n\n }))\n\n}\n\n\n", "file_path": "contracts/gw-utils/src/cells/lock_cells.rs", "rank": 60, "score": 118519.7028538126 }, { "content": "/// args: rollup_type_hash | withdrawal lock args | owner lock len (optional) | owner lock (optional)\n\npub fn parse_lock_args(args: &Bytes) -> Result<WithdrawalLockArgsWithOwnerLock, Error> {\n\n let lock_args_start = 32;\n\n let lock_args_end = lock_args_start + WithdrawalLockArgs::TOTAL_SIZE;\n\n\n\n let args_len = args.len();\n\n if args_len < lock_args_end {\n\n return Err(Error::InvalidArgs);\n\n }\n\n\n\n let raw_args = args.slice(lock_args_start..lock_args_end);\n\n let lock_args = match WithdrawalLockArgsReader::verify(&raw_args, false) {\n\n Ok(()) => WithdrawalLockArgs::new_unchecked(raw_args),\n\n Err(_) => return Err(Error::InvalidArgs),\n\n };\n\n\n\n let owner_lock_start = lock_args_end + 4; // u32 length\n\n if args_len <= owner_lock_start {\n\n debug!(\"[parse withdrawal] missing owner lock\");\n\n return Err(Error::InvalidArgs);\n\n }\n", "file_path": "contracts/gw-utils/src/withdrawal.rs", "rank": 61, "score": 118049.93808029094 }, { "content": "pub fn verify_tx_context(input: TxContextInput) -> Result<TxContext, Error> {\n\n let TxContextInput {\n\n tx,\n\n kv_state,\n\n scripts,\n\n raw_block,\n\n rollup_config,\n\n target,\n\n tx_proof,\n\n } = input;\n\n\n\n let raw_tx = tx.raw();\n\n\n\n // verify tx account's script\n\n let sender_id: u32 = raw_tx.from_id().unpack();\n\n let receiver_id: u32 = raw_tx.to_id().unpack();\n\n let sender_script_hash = kv_state.get_script_hash(sender_id).map_err(|_| {\n\n debug!(\"get sender script_hash\");\n\n Error::SMTKeyMissing\n\n })?;\n", "file_path": "contracts/challenge-lock/src/verifications/context.rs", "rank": 62, "score": 118045.96874597383 }, { "content": "pub fn check_status(global_state: &GlobalState, status: Status) -> Result<(), Error> {\n\n let expected_status: u8 = status.into();\n\n let status: u8 = global_state.status().into();\n\n if status != expected_status {\n\n return Err(Error::InvalidStatus);\n\n }\n\n Ok(())\n\n}\n", "file_path": "contracts/state-validator/src/verifications/mod.rs", "rank": 63, "score": 117423.9963321081 }, { "content": "pub fn load_rollup_config(rollup_config_hash: &[u8; 32]) -> Result<RollupConfig, Error> {\n\n let index = search_rollup_config_cell(rollup_config_hash).ok_or(Error::RollupConfigNotFound)?;\n\n let data = load_cell_data(index, Source::CellDep)?;\n\n match RollupConfigReader::verify(&data, false) {\n\n Ok(_) => Ok(RollupConfig::new_unchecked(data.into())),\n\n Err(_) => {\n\n debug!(\"Invalid encoding of RollupConfig\");\n\n Err(Error::Encoding)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/gw-utils/src/cells/rollup.rs", "rank": 64, "score": 115583.24104447252 }, { "content": "pub fn extract_lock_args(lock_args: Bytes) -> Result<(H256, TronAddress), Error> {\n\n if lock_args.len() != 52 {\n\n debug!(\"Invalid lock args len: {}\", lock_args.len());\n\n return Err(Error::InvalidArgs);\n\n }\n\n let rollup_script_hash = {\n\n let mut buf = [0u8; 32];\n\n buf.copy_from_slice(&lock_args[..32]);\n\n buf.into()\n\n };\n\n let address = {\n\n let mut buf = [0u8; 20];\n\n buf.copy_from_slice(&lock_args[32..]);\n\n buf\n\n };\n\n Ok((rollup_script_hash, address))\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Secp256k1Tron;\n", "file_path": "contracts/tron-account-lock/src/tron_signature.rs", "rank": 65, "score": 113819.77320027354 }, { "content": "pub fn search_lock_hashes(owner_lock_hash: &[u8; 32], source: Source) -> Vec<usize> {\n\n QueryIter::new(load_cell_lock_hash, source)\n\n .enumerate()\n\n .filter_map(|(i, lock_hash)| {\n\n if &lock_hash == owner_lock_hash {\n\n Some(i)\n\n } else {\n\n None\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "contracts/gw-utils/src/cells/utils.rs", "rank": 66, "score": 113452.77391981338 }, { "content": "pub fn search_lock_hash(owner_lock_hash: &[u8; 32], source: Source) -> Option<usize> {\n\n QueryIter::new(load_cell_lock_hash, source).position(|lock_hash| &lock_hash == owner_lock_hash)\n\n}\n\n\n", "file_path": "contracts/gw-utils/src/cells/utils.rs", "rank": 67, "score": 113452.77391981338 }, { "content": "pub fn extract_eth_lock_args(lock_args: Bytes) -> Result<(H256, EthAddress), Error> {\n\n if lock_args.len() != 52 {\n\n debug!(\"Invalid lock args len: {}\", lock_args.len());\n\n return Err(Error::InvalidArgs);\n\n }\n\n let rollup_script_hash = {\n\n let mut buf = [0u8; 32];\n\n buf.copy_from_slice(&lock_args[..32]);\n\n buf.into()\n\n };\n\n let eth_address = {\n\n let mut buf = [0u8; 20];\n\n buf.copy_from_slice(&lock_args[32..]);\n\n buf\n\n };\n\n Ok((rollup_script_hash, eth_address))\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Secp256k1Eth;\n", "file_path": "contracts/eth-account-lock/src/eth_signature.rs", "rank": 68, "score": 112128.825305581 }, { "content": "/// parse cell's data\n\n/// return (onetime_owner_lock_hash, sign type, message)\n\nfn parse_data() -> Result<([u8; 32], SigningType, H256), Error> {\n\n let mut data = [0u8; 65];\n\n let loaded_size = load_cell_data(&mut data, 0, 0, Source::GroupInput)?;\n\n\n\n if loaded_size != 65 {\n\n debug!(\"Invalid data size: {}\", loaded_size);\n\n return Err(Error::Encoding);\n\n }\n\n\n\n // copy owner lock hash\n\n let mut owner_lock_hash = [0u8; 32];\n\n owner_lock_hash.copy_from_slice(&data[..32]);\n\n\n\n // copy message\n\n let signing_type = SigningType::try_from(data[32]).map_err(|err| {\n\n debug!(\"Invalid signature message type {}\", err);\n\n Error::Encoding\n\n })?;\n\n\n\n let mut msg = [0u8; 32];\n\n msg.copy_from_slice(&data[33..65]);\n\n\n\n Ok((owner_lock_hash, signing_type, msg.into()))\n\n}\n", "file_path": "contracts/tron-account-lock/src/entry.rs", "rank": 69, "score": 108742.72566045402 }, { "content": "/// parse cell's data\n\n/// return (onetime_owner_lock_hash, sign type, message)\n\nfn parse_data() -> Result<([u8; 32], SigningType, H256), Error> {\n\n let mut data = [0u8; 65];\n\n let loaded_size = load_cell_data(&mut data, 0, 0, Source::GroupInput)?;\n\n\n\n if loaded_size != 65 {\n\n debug!(\"Invalid data size: {}\", loaded_size);\n\n return Err(Error::Encoding);\n\n }\n\n\n\n // copy owner lock hash\n\n let mut owner_lock_hash = [0u8; 32];\n\n owner_lock_hash.copy_from_slice(&data[..32]);\n\n\n\n // copy message\n\n let signing_type = SigningType::try_from(data[32]).map_err(|err| {\n\n debug!(\"Invalid signature message type {}\", err);\n\n Error::Encoding\n\n })?;\n\n\n\n let mut msg = [0u8; 32];\n\n msg.copy_from_slice(&data[33..65]);\n\n\n\n Ok((owner_lock_hash, signing_type, msg.into()))\n\n}\n", "file_path": "contracts/eth-account-lock/src/entry.rs", "rank": 70, "score": 108742.72566045402 }, { "content": "pub fn collect_burn_cells(config: &RollupConfig, source: Source) -> Result<Vec<BurnCell>, Error> {\n\n QueryIter::new(load_cell_lock_hash, source)\n\n .enumerate()\n\n .filter_map(|(index, lock_hash)| {\n\n let is_lock = lock_hash == config.burn_lock_hash().as_slice();\n\n if !is_lock {\n\n return None;\n\n }\n\n let value = match fetch_capacity_and_sudt_value(config, index, source) {\n\n Ok(value) => value,\n\n Err(err) => return Some(Err(err)),\n\n };\n\n let cell = BurnCell { index, value };\n\n Some(Ok(cell))\n\n })\n\n .collect::<Result<_, Error>>()\n\n}\n", "file_path": "contracts/gw-utils/src/cells/lock_cells.rs", "rank": 71, "score": 106514.34893315414 }, { "content": "/// Check rewards\n\nfn check_rewards(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n reverted_blocks: &[RawL2BlockReader],\n\n challenge_cell: &ChallengeCell,\n\n) -> Result<(), Error> {\n\n let reverted_block_stake_set: BTreeSet<_> = reverted_blocks\n\n .iter()\n\n .map(|b| b.stake_cell_owner_lock_hash().to_entity())\n\n .collect();\n\n\n\n let stake_cells = collect_stake_cells(rollup_type_hash, config, Source::Input)?;\n\n let reverted_stake_cells_set: BTreeSet<_> = stake_cells\n\n .iter()\n\n .map(|cell| cell.args.owner_lock_hash())\n\n .collect();\n\n // ensure stake cells are all belongs to reverted blocks and no missing stake cells\n\n if reverted_block_stake_set != reverted_stake_cells_set {\n\n debug!(\"reverted stake cells isn't according to reverted block stake set\");\n\n return Err(Error::InvalidStakeCell);\n", "file_path": "contracts/state-validator/src/verifications/revert.rs", "rank": 72, "score": 100029.23266141093 }, { "content": "/// args: rollup_type_hash | withdrawal lock args | owner lock len (optional) | owner lock (optional)\n\nfn parse_lock_args(script: &ckb_types::packed::Script) -> Result<ParsedLockArgs, Error> {\n\n let mut rollup_type_hash = [0u8; 32];\n\n let args: Bytes = script.args().unpack();\n\n if args.len() < rollup_type_hash.len() {\n\n return Err(Error::InvalidArgs);\n\n }\n\n\n\n rollup_type_hash.copy_from_slice(&args[..32]);\n\n let parsed = gw_utils::withdrawal::parse_lock_args(&args)?;\n\n\n\n Ok(ParsedLockArgs {\n\n rollup_type_hash,\n\n lock_args: parsed.lock_args,\n\n owner_lock_hash: parsed.owner_lock.hash(),\n\n })\n\n}\n\n\n", "file_path": "contracts/withdrawal-lock/src/entry.rs", "rank": 74, "score": 99640.01443477595 }, { "content": "fn check_challenge_cell(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n challenge_cell: &ChallengeCell,\n\n revert_target_block_hash: &H256,\n\n) -> Result<(), Error> {\n\n // check challenge maturity\n\n check_challenge_maturity(config, challenge_cell)?;\n\n // check other challenge cells\n\n let has_output_challenge =\n\n find_challenge_cell(rollup_type_hash, config, Source::Output)?.is_some();\n\n if has_output_challenge {\n\n return Err(Error::InvalidChallengeCell);\n\n }\n\n // check challenge target\n\n let challenge_target = challenge_cell.args.target();\n\n let challenge_block_hash: H256 = challenge_target.block_hash().unpack();\n\n if &challenge_block_hash != revert_target_block_hash {\n\n return Err(Error::InvalidChallengeCell);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/state-validator/src/verifications/revert.rs", "rank": 75, "score": 98205.21827424798 }, { "content": "fn check_reverted_blocks(\n\n config: &RollupConfig,\n\n reverted_blocks: &[RawL2BlockReader],\n\n revert_args: &RollupRevertReader,\n\n prev_global_state: &GlobalState,\n\n post_global_state: &GlobalState,\n\n) -> Result<GlobalState, Error> {\n\n if reverted_blocks.is_empty() {\n\n return Err(Error::InvalidRevertedBlocks);\n\n }\n\n let reverted_block_hashes: Vec<H256> =\n\n reverted_blocks.iter().map(|b| b.hash().into()).collect();\n\n let reverted_block_smt_keys: Vec<H256> = reverted_blocks\n\n .iter()\n\n .map(|b| RawL2Block::compute_smt_key(b.number().unpack()).into())\n\n .collect();\n\n // check reverted_blocks is continues\n\n {\n\n let mut prev_hash: Byte32 = reverted_blocks[0].hash().pack();\n\n let mut prev_number = reverted_blocks[0].number().unpack();\n", "file_path": "contracts/state-validator/src/verifications/revert.rs", "rank": 76, "score": 98205.21827424798 }, { "content": "#[test]\n\nfn test_sudt() {\n\n init_env_log();\n\n let rollup_config = RollupConfig::new_builder()\n\n .l2_sudt_validator_script_type_hash(DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.pack())\n\n .build();\n\n let mut ctx = TestingContext::setup(&rollup_config);\n\n\n\n let init_a_balance = U256::from(10000u64);\n\n\n\n // init accounts\n\n let _meta = ctx\n\n .state\n\n .create_account_from_script(\n\n Script::new_builder()\n\n .code_hash(DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.clone().pack())\n\n .args([1u8; 64].to_vec().pack())\n\n .hash_type(ScriptHashType::Type.into())\n\n .build(),\n\n )\n\n .expect(\"create account\");\n", "file_path": "tests/src/script_tests/l2_scripts/sudt.rs", "rank": 77, "score": 98205.21827424798 }, { "content": "fn verify_message_signature(\n\n tron_address: TronAddress,\n\n signing_type: SigningType,\n\n message: H256,\n\n) -> Result<(), Error> {\n\n // load signature\n\n let signature = load_signature_from_witness()?;\n\n // verify message\n\n let secp256k1_tron = Secp256k1Tron::default();\n\n let valid = match signing_type {\n\n SigningType::WithPrefix => {\n\n secp256k1_tron.verify_message(tron_address, signature, message)?\n\n }\n\n SigningType::Raw => secp256k1_tron.verify_alone(tron_address, signature, message)?,\n\n };\n\n if !valid {\n\n debug!(\"Wrong signature, message: {:?}\", message);\n\n return Err(Error::WrongSignature);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/tron-account-lock/src/entry.rs", "rank": 78, "score": 98205.21827424798 }, { "content": "/// Check challenge cell is maturity(on the layer1)\n\nfn check_challenge_maturity(\n\n config: &RollupConfig,\n\n challenge_cell: &ChallengeCell,\n\n) -> Result<(), Error> {\n\n let challenge_maturity_blocks: u64 = config.challenge_maturity_blocks().unpack();\n\n let since = Since::new(load_input_since(challenge_cell.index, Source::Input)?);\n\n if let Some(LockValue::BlockNumber(n)) = since.extract_lock_value() {\n\n if since.is_relative() && n >= challenge_maturity_blocks {\n\n return Ok(());\n\n }\n\n }\n\n Err(Error::InvalidChallengeCell)\n\n}\n\n\n", "file_path": "contracts/state-validator/src/verifications/revert.rs", "rank": 79, "score": 98205.21827424798 }, { "content": "fn verify_message_signature(\n\n eth_address: EthAddress,\n\n signing_type: SigningType,\n\n message: H256,\n\n) -> Result<(), Error> {\n\n // load signature\n\n let signature = load_signature_from_witness()?;\n\n // verify message\n\n let secp256k1_eth = Secp256k1Eth::default();\n\n let valid = match signing_type {\n\n SigningType::WithPrefix => secp256k1_eth.verify_message(eth_address, signature, message)?,\n\n SigningType::Raw => secp256k1_eth.verify_alone(eth_address, signature, message)?,\n\n };\n\n if !valid {\n\n debug!(\"Wrong signature, message: {:?}\", message);\n\n return Err(Error::WrongSignature);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/eth-account-lock/src/entry.rs", "rank": 80, "score": 98205.21827424798 }, { "content": "fn check_output_cell_has_same_content(\n\n input_index: usize,\n\n input_source: Source,\n\n output_index: usize,\n\n) -> Result<(), Error> {\n\n if load_cell_capacity(input_index, input_source)?\n\n != load_cell_capacity(output_index, Source::Output)?\n\n {\n\n return Err(Error::InvalidOutput);\n\n }\n\n\n\n // TODO: use load_cell_data_hash\n\n // NOTE: load_cell_data_hash from inputs throw ItemMissing error. Comparing data directly\n\n // as temporary workaround. Right now data should be sudt amount only, 16 bytes long.\n\n if load_cell_data(input_index, input_source)? != load_cell_data(output_index, Source::Output)? {\n\n return Err(Error::InvalidOutput);\n\n }\n\n\n\n if load_cell_type_hash(input_index, input_source)?\n\n != load_cell_type_hash(output_index, Source::Output)?\n\n {\n\n return Err(Error::InvalidOutput);\n\n }\n\n Ok(())\n\n}\n", "file_path": "contracts/withdrawal-lock/src/entry.rs", "rank": 81, "score": 98205.21827424798 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_transfer_overflow() {\n\n let init_a_balance = U256::from(10000u64);\n\n let init_b_balance: U256 = U256::MAX - init_a_balance;\n\n let init_a_ckb = U256::from(100u64);\n\n\n\n let rollup_config = RollupConfig::new_builder()\n\n .l2_sudt_validator_script_type_hash(DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.pack())\n\n .build();\n\n let mut ctx = TestingContext::setup(&rollup_config);\n\n\n\n // init accounts\n\n let _meta = ctx\n\n .state\n\n .create_account_from_script(\n\n Script::new_builder()\n\n .code_hash(DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.clone().pack())\n\n .args([1u8; 20].to_vec().pack())\n\n .hash_type(ScriptHashType::Type.into())\n\n .build(),\n\n )\n", "file_path": "tests/src/script_tests/l2_scripts/sudt.rs", "rank": 82, "score": 96475.62921632969 }, { "content": "fn verify_block_producer(\n\n config: &RollupConfig,\n\n context: &BlockContext,\n\n block: &L2BlockReader,\n\n) -> Result<(), Error> {\n\n let raw_block = block.raw();\n\n let owner_lock_hash = raw_block.stake_cell_owner_lock_hash();\n\n // make sure we have one stake cell in the output\n\n let output_stake_cell = find_block_producer_stake_cell(\n\n &context.rollup_type_hash,\n\n config,\n\n Source::Output,\n\n &owner_lock_hash,\n\n )?\n\n .ok_or(Error::InvalidStakeCell)?;\n\n // check stake cell capacity\n\n let required_staking_capacity: u64 = config.required_staking_capacity().unpack();\n\n if output_stake_cell.capacity < required_staking_capacity {\n\n debug!(\n\n \"[verify block producer] stake cell's capacity is insufficient {} {}\",\n", "file_path": "contracts/state-validator/src/verifications/submit_block.rs", "rank": 83, "score": 96475.62921632969 }, { "content": "#[test]\n\nfn test_transfer_to_self() {\n\n let init_a_balance = U256::from(10000u64);\n\n let init_ckb: U256 = 100u64.into();\n\n\n\n let rollup_config = RollupConfig::new_builder()\n\n .l2_sudt_validator_script_type_hash(DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.pack())\n\n .build();\n\n let mut ctx = TestingContext::setup(&rollup_config);\n\n\n\n // init accounts\n\n let _meta = ctx\n\n .state\n\n .create_account_from_script(\n\n Script::new_builder()\n\n .code_hash(DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.clone().pack())\n\n .args([1u8; 20].to_vec().pack())\n\n .hash_type(ScriptHashType::Type.into())\n\n .build(),\n\n )\n\n .expect(\"create account\");\n", "file_path": "tests/src/script_tests/l2_scripts/sudt.rs", "rank": 84, "score": 96475.62921632969 }, { "content": "fn check_block_timestamp(\n\n prev_global_state: &GlobalState,\n\n post_global_state: &GlobalState,\n\n block_timestamp: u64,\n\n) -> Result<(), Error> {\n\n let prev_version: u8 = prev_global_state.version().into();\n\n let post_version: u8 = post_global_state.version().into();\n\n\n\n if 0 == post_version && post_global_state.tip_block_timestamp().unpack() != 0 {\n\n debug!(\"v0 global state tip block timestamp isn't 0\");\n\n return Err(Error::InvalidPostGlobalState);\n\n }\n\n\n\n // NOTE: Downgrade already checked in main\n\n if 0 == post_version {\n\n debug!(\"[check block timestamp] skip block timestamp\");\n\n return Ok(());\n\n }\n\n\n\n let rollup_input_since = Since::new(load_input_since(0, Source::GroupInput)?);\n", "file_path": "contracts/state-validator/src/verifications/submit_block.rs", "rank": 85, "score": 96475.62921632969 }, { "content": "#[test]\n\nfn bindgen_test_layout_smt_pair_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<smt_pair_t>(),\n\n 68usize,\n\n concat!(\"Size of: \", stringify!(smt_pair_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<smt_pair_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(smt_pair_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<smt_pair_t>())).key as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(smt_pair_t),\n\n \"::\",\n\n stringify!(key)\n\n )\n", "file_path": "contracts/ckb-smt/src/bindings.rs", "rank": 86, "score": 96475.62921632969 }, { "content": "fn check_layer2_withdrawal(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n kv_state: &mut KVState,\n\n block: &L2BlockReader,\n\n) -> Result<(), Error> {\n\n /// Pay fee to block producer\n\n fn pay_fee(\n\n kv_state: &mut KVState,\n\n payer_address: &RegistryAddress,\n\n block_producer_address: &RegistryAddress,\n\n amount: U256,\n\n ) -> Result<(), Error> {\n\n kv_state.burn_sudt(CKB_SUDT_ACCOUNT_ID, payer_address, amount)?;\n\n kv_state.mint_sudt(CKB_SUDT_ACCOUNT_ID, block_producer_address, amount)?;\n\n Ok(())\n\n }\n\n\n\n let withdrawals = block.withdrawals();\n\n // return ok if no withdrawals\n", "file_path": "contracts/state-validator/src/verifications/submit_block.rs", "rank": 87, "score": 96475.62921632969 }, { "content": "#[test]\n\nfn test_insufficient_balance() {\n\n init_env_log();\n\n let init_a_balance = U256::from(10000);\n\n\n\n let rollup_config = RollupConfig::new_builder()\n\n .l2_sudt_validator_script_type_hash(DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.pack())\n\n .build();\n\n let mut ctx = TestingContext::setup(&rollup_config);\n\n\n\n // init accounts\n\n let _meta = ctx\n\n .state\n\n .create_account_from_script(\n\n Script::new_builder()\n\n .code_hash(DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.clone().pack())\n\n .args([1u8; 20].to_vec().pack())\n\n .hash_type(ScriptHashType::Type.into())\n\n .build(),\n\n )\n\n .expect(\"create account\");\n", "file_path": "tests/src/script_tests/l2_scripts/sudt.rs", "rank": 88, "score": 96475.62921632969 }, { "content": "#[test]\n\nfn test_example_sum() {\n\n let mut tree = DummyState::default();\n\n let chain_view = DummyChainStore;\n\n let from_id: u32 = 2;\n\n let init_value: u64 = 0;\n\n let rollup_config = RollupConfig::default();\n\n\n\n let contract_id = tree\n\n .create_account_from_script(\n\n Script::new_builder()\n\n .code_hash(SUM_PROGRAM_CODE_HASH.pack())\n\n .args([0u8; 20].to_vec().pack())\n\n .hash_type(ScriptHashType::Type.into())\n\n .build(),\n\n )\n\n .expect(\"create account\");\n\n\n\n // run handle message\n\n {\n\n let mut backend_manage = build_backend_manage(&rollup_config);\n", "file_path": "tests/src/script_tests/l2_scripts/examples.rs", "rank": 89, "score": 96475.62921632969 }, { "content": "#[test]\n\nfn bindgen_test_layout_smt_state_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<smt_state_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(smt_state_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<smt_state_t>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(smt_state_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<smt_state_t>())).pairs as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(smt_state_t),\n\n \"::\",\n\n stringify!(pairs)\n\n )\n", "file_path": "contracts/ckb-smt/src/bindings.rs", "rank": 90, "score": 96475.62921632969 }, { "content": "fn check_layer2_deposit(\n\n rollup_type_hash: &H256,\n\n config: &RollupConfig,\n\n kv_state: &mut KVState,\n\n deposit_cells: &[DepositRequestCell],\n\n) -> Result<(), Error> {\n\n let registry_ctx = gw_common::registry::context::RegistryContext::new(\n\n config.allowed_eoa_type_hashes().into_iter().collect(),\n\n );\n\n for request in deposit_cells {\n\n // check that account's script is a valid EOA script\n\n if request.account_script.hash_type() != ScriptHashType::Type.into() {\n\n return Err(Error::UnknownEOAScript);\n\n }\n\n let registry_id: u32 = request.args.registry_id().unpack();\n\n\n\n // find or create EOA\n\n let address = match kv_state.get_account_id_by_script_hash(&request.account_script_hash)? {\n\n Some(_id) => {\n\n // account is exist, query registry address\n", "file_path": "contracts/state-validator/src/verifications/submit_block.rs", "rank": 91, "score": 96475.62921632969 }, { "content": "/// program entry\n\nfn program_entry() -> i8 {\n\n // Call main function and return error code\n\n match entry::main() {\n\n Ok(_) => 0,\n\n Err(err) => err as i8,\n\n }\n\n}\n", "file_path": "contracts/always-success/src/main.rs", "rank": 92, "score": 96017.98951049699 }, { "content": "/// program entry\n\nfn program_entry() -> i8 {\n\n // Call main function and return error code\n\n match entry::main() {\n\n Ok(_) => 0,\n\n Err(err) => err as i8,\n\n }\n\n}\n", "file_path": "contracts/custodian-lock/src/main.rs", "rank": 93, "score": 96017.98951049699 }, { "content": "/// program entry\n\nfn program_entry() -> i8 {\n\n // Call main function and return error code\n\n match entry::main() {\n\n Ok(_) => 0,\n\n Err(err) => err as i8,\n\n }\n\n}\n", "file_path": "contracts/challenge-lock/src/main.rs", "rank": 94, "score": 96017.98951049699 }, { "content": "/// program entry\n\nfn program_entry() -> i8 {\n\n // Call main function and return error code\n\n match entry::main() {\n\n Ok(_) => 0,\n\n Err(err) => err as i8,\n\n }\n\n}\n", "file_path": "contracts/state-validator/src/main.rs", "rank": 95, "score": 96017.98951049699 }, { "content": "/// program entry\n\nfn program_entry() -> i8 {\n\n // Call main function and return error code\n\n match entry::main() {\n\n Ok(_) => 0,\n\n Err(err) => err as i8,\n\n }\n\n}\n", "file_path": "contracts/withdrawal-lock/src/main.rs", "rank": 96, "score": 96017.98951049699 }, { "content": "/// program entry\n\nfn program_entry() -> i8 {\n\n // Call main function and return error code\n\n match entry::main() {\n\n Ok(_) => 0,\n\n Err(err) => err as i8,\n\n }\n\n}\n", "file_path": "contracts/deposit-lock/src/main.rs", "rank": 97, "score": 96017.98951049699 }, { "content": "/// program entry\n\nfn program_entry() -> i8 {\n\n // Call main function and return error code\n\n match entry::main() {\n\n Ok(_) => 0,\n\n Err(err) => err as i8,\n\n }\n\n}\n", "file_path": "contracts/stake-lock/src/main.rs", "rank": 98, "score": 96017.98951049699 }, { "content": "#[test]\n\nfn test_transfer_to_self_overflow() {\n\n let init_a_balance: U256 = U256::MAX - U256::one();\n\n let init_ckb = U256::from(100u64);\n\n\n\n let rollup_config = RollupConfig::new_builder()\n\n .l2_sudt_validator_script_type_hash(DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.pack())\n\n .build();\n\n let mut ctx = TestingContext::setup(&rollup_config);\n\n\n\n // init accounts\n\n let _meta = ctx\n\n .state\n\n .create_account_from_script(\n\n Script::new_builder()\n\n .code_hash(DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.clone().pack())\n\n .args([1u8; 20].to_vec().pack())\n\n .hash_type(ScriptHashType::Type.into())\n\n .build(),\n\n )\n\n .expect(\"create account\");\n", "file_path": "tests/src/script_tests/l2_scripts/sudt.rs", "rank": 99, "score": 94833.31819134494 } ]
Rust
src/message_decoder/mod.rs
silathdiir/pg_wire
f106d57abfd501e4f1a1f7f8c20418e2998c2be0
use crate::{ cursor::Cursor, message_decoder::state::{Payload, Tag}, messages::FrontendMessage, Result, }; use state::State; use std::mem::MaybeUninit; mod state; #[derive(Debug, PartialEq)] pub enum Status { Requesting(usize), Decoding, Done(FrontendMessage), } pub struct MessageDecoder { state: State, tag: u8, } impl Default for MessageDecoder { fn default() -> MessageDecoder { MessageDecoder::new() } } impl MessageDecoder { pub fn new() -> MessageDecoder { MessageDecoder { state: State::new(), tag: 0, } } pub fn next_stage(&mut self, payload: Option<&[u8]>) -> Result<Status> { let payload = if let Some(payload) = payload { payload } else { &[] }; let mut state = unsafe { MaybeUninit::zeroed().assume_init() }; std::mem::swap(&mut state, &mut self.state); let (new_state, prev) = state.try_step(payload)?; self.state = new_state; match prev { State::Created(_) => Ok(Status::Requesting(1)), State::RequestingTag(_) => Ok(Status::Requesting(4)), State::Tag(Tag(tag)) => { self.tag = tag; Ok(Status::Requesting((Cursor::from(payload).read_i32()? - 4) as usize)) } State::WaitingForPayload(_) => Ok(Status::Decoding), State::Payload(Payload(data)) => { let message = FrontendMessage::decode(self.tag, &data)?; Ok(Status::Done(message)) } } } } #[cfg(test)] mod tests { use super::*; use crate::messages::QUERY; const QUERY_STRING: &str = "select * from t\0"; const QUERY_BYTES: &[u8] = QUERY_STRING.as_bytes(); const LEN: i32 = QUERY_STRING.len() as i32; #[test] fn request_message_tag() { let mut decoder = MessageDecoder::default(); assert_eq!(decoder.next_stage(None), Ok(Status::Requesting(1))); } #[test] fn request_message_len() { let mut decoder = MessageDecoder::default(); decoder.next_stage(None).expect("proceed to the next stage"); assert_eq!(decoder.next_stage(Some(&[QUERY])), Ok(Status::Requesting(4))); } #[test] fn request_message_payload() { let mut decoder = MessageDecoder::default(); decoder.next_stage(None).expect("proceed to the next stage"); decoder.next_stage(Some(&[QUERY])).expect("proceed to the next stage"); assert_eq!( decoder.next_stage(Some(&LEN.to_be_bytes())), Ok(Status::Requesting((LEN - 4) as usize)) ); } #[test] fn decoding_message() { let mut decoder = MessageDecoder::default(); decoder.next_stage(None).expect("proceed to the next stage"); decoder.next_stage(Some(&[QUERY])).expect("proceed to the next stage"); decoder .next_stage(Some(&LEN.to_be_bytes())) .expect("proceed to the next stage"); assert_eq!(decoder.next_stage(Some(QUERY_BYTES)), Ok(Status::Decoding)); } #[test] fn request_next_message() { let mut decoder = MessageDecoder::default(); decoder.next_stage(None).expect("proceed to the next stage"); decoder.next_stage(Some(&[QUERY])).expect("proceed to the next stage"); decoder .next_stage(Some(&LEN.to_be_bytes())) .expect("proceed to the next stage"); decoder .next_stage(Some(QUERY_BYTES)) .expect("proceed to the next stage"); assert_eq!( decoder.next_stage(None), Ok(Status::Done(FrontendMessage::Query { sql: "select * from t".to_owned() })) ); } #[test] fn full_cycle() { let mut decoder = MessageDecoder::default(); decoder.next_stage(None).expect("proceed to the next stage"); decoder.next_stage(Some(&[QUERY])).expect("proceed to the next stage"); decoder .next_stage(Some(&LEN.to_be_bytes())) .expect("proceed to the next stage"); decoder .next_stage(Some(QUERY_BYTES)) .expect("proceed to the next stage"); decoder.next_stage(None).expect("proceed to the next stage"); assert_eq!(decoder.next_stage(None), Ok(Status::Requesting(1))); } }
use crate::{ cursor::Cursor, message_decoder::state::{Payload, Tag}, messages::FrontendMessage, Result, }; use state::State; use std::mem::MaybeUninit; mod state; #[derive(Debug, PartialEq)] pub enum Status { Requesting(usize), Decoding, Done(FrontendMessage), } pub struct MessageDecoder { state: State, tag: u8, } impl Default for MessageDecoder { fn default() -> MessageDecoder { MessageDecoder::new() } } impl MessageDecoder { pub fn new() -> MessageDecoder { MessageDecoder { state: State::new(), tag: 0, } } pub fn next_stage(&mut self, payload: Option<&[u8]>) -> Result<Status> { let payload = if let Some(payload) = payload { payload } else { &[] }; let mut state = unsafe { MaybeUninit::zeroed().assume_init() }; std::mem::swap(&mut state, &mut self.state); let (new_state, prev) = state.try_step(payload)?; self.state = new_state; match prev { State::Created(_) => Ok(Status::Requesting(1)), State::RequestingTag(_) => Ok(Status::Requesting(4)), State::Tag(Tag(tag)) => { self.tag = tag; Ok(Status::Requesting((Cursor::from(payload).read_i32()? - 4) as usize)) } State::WaitingForPayload(_) => Ok(Status::Decoding), State::Payload(Payload(data)) => { let message = FrontendMessage::decode(self.tag, &data)?; Ok(Status::Done(message)) } } } } #[cfg(test)] mod tests { use super::*; use crate::messages::QUERY; const QUERY_STRING: &str = "select * from t\0"; const QUERY_BYTES: &[u8] = QUERY_STRING.as_bytes(); const LEN: i32 = QUERY_STRING.len() as i32; #[test] fn request_message_tag() { let mut decoder = MessageDecoder::default(); assert_eq!(decoder.next_stage(None), Ok(Status::Requesting(1))); } #[test] fn request_message_len() { let mut decoder = MessageDecoder::default(); decoder.next_stage(None).expect("proceed to the next stage"); assert_eq!(decoder.next_stage(Some(&[QUERY])), Ok(Status::Requesting(4))); } #[test] fn request_message_payload() { let mut decoder = MessageDecoder::default(); decoder.next_stage(None).expect("proceed to the next stage"); decoder.next_stage(Some(&[QUERY])).expect("proceed to the next stage"); assert_eq!( decoder.next_stage(Some(&LEN.to_be_bytes())), Ok(Status::Requesting((LEN - 4) as usize)) ); } #[test] fn decoding_message() { let mut decoder = MessageDecoder::default(); decoder.next_stage(None).expect("proceed to the next stage"); decoder.next_stage(Some(&[QUERY])).expect("proceed to the next stage"); decoder .next_stage(Some(&LEN.to_be_bytes())) .expect("proceed to the next stage"); assert_eq!(decoder.next_stage(Some(QUERY_BYTES)), Ok(Status::Decoding)); } #[test] fn request_next_message() { let mut decoder = MessageDecoder::default(); decoder.next_stage(None).expect("proceed to the next stage"); decoder.next_stage(Some(&[QUERY])).expect("proceed to the next stage"); decoder .next_stage(Some(&LEN.to_be_bytes())) .expect("proceed to the next stage"); decoder .next_stage(Some(QUERY_BYTES)) .expect("proceed to the next stage"); assert_eq!( decoder.next_stage(None), Ok(Status::Done(FrontendMessage::Query { sql: "select * from t".to_owned() })) ); } #[test] fn full_cycle() { let mut decoder = MessageDecoder::default(); decoder.next_stage(None).expect("proceed to the next stage"); decoder.next_stage(Some(&[QUERY])).expect("proceed to the next stage"); decoder .next_stage(Some(&LEN.to_be_bytes())) .expect("proceed to the next stage"); decode
}
r .next_stage(Some(QUERY_BYTES)) .expect("proceed to the next stage"); decoder.next_stage(None).expect("proceed to the next stage"); assert_eq!(decoder.next_stage(None), Ok(Status::Requesting(1))); }
function_block-function_prefixed
[ { "content": "fn decode_execute(mut cursor: Cursor) -> Result<FrontendMessage> {\n\n let portal_name = cursor.read_cstr()?.to_owned();\n\n let max_rows = cursor.read_i32()?;\n\n Ok(FrontendMessage::Execute { portal_name, max_rows })\n\n}\n\n\n", "file_path": "src/messages.rs", "rank": 0, "score": 124314.85484124636 }, { "content": "fn decode_bind(mut cursor: Cursor) -> Result<FrontendMessage> {\n\n let portal_name = cursor.read_cstr()?.to_owned();\n\n let statement_name = cursor.read_cstr()?.to_owned();\n\n\n\n let mut param_formats = vec![];\n\n for _ in 0..cursor.read_i16()? {\n\n match PgFormat::try_from(cursor.read_i16()?) {\n\n Ok(format) => param_formats.push(format),\n\n Err(UnrecognizedFormat(code)) => return Err(Error::InvalidInput(format!(\"unknown format code: {}\", code))),\n\n }\n\n }\n\n\n\n let mut raw_params = vec![];\n\n for _ in 0..cursor.read_i16()? {\n\n let len = cursor.read_i32()?;\n\n if len == -1 {\n\n // As a special case, -1 indicates a NULL parameter value.\n\n raw_params.push(None);\n\n } else {\n\n let mut value = vec![];\n", "file_path": "src/messages.rs", "rank": 1, "score": 124314.85484124636 }, { "content": "fn decode_close(mut cursor: Cursor) -> Result<FrontendMessage> {\n\n let first_char = cursor.read_byte()?;\n\n let name = cursor.read_cstr()?.to_owned();\n\n match first_char {\n\n b'P' => Ok(FrontendMessage::ClosePortal { name }),\n\n b'S' => Ok(FrontendMessage::CloseStatement { name }),\n\n other => Err(Error::InvalidInput(format!(\n\n \"invalid type byte in Close frontend message: {:?}\",\n\n std::char::from_u32(other as u32).unwrap(),\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/messages.rs", "rank": 2, "score": 124314.85484124636 }, { "content": "fn decode_describe(mut cursor: Cursor) -> Result<FrontendMessage> {\n\n let first_char = cursor.read_byte()?;\n\n let name = cursor.read_cstr()?.to_owned();\n\n match first_char {\n\n b'P' => Ok(FrontendMessage::DescribePortal { name }),\n\n b'S' => Ok(FrontendMessage::DescribeStatement { name }),\n\n other => Err(Error::InvalidInput(format!(\n\n \"invalid type byte in Describe frontend message: {:?}\",\n\n char::from(other),\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/messages.rs", "rank": 3, "score": 124314.85484124636 }, { "content": "fn decode_query(mut cursor: Cursor) -> Result<FrontendMessage> {\n\n let sql = cursor.read_cstr()?.to_owned();\n\n Ok(FrontendMessage::Query { sql })\n\n}\n\n\n", "file_path": "src/messages.rs", "rank": 4, "score": 124314.85484124636 }, { "content": "fn decode_parse(mut cursor: Cursor) -> Result<FrontendMessage> {\n\n let statement_name = cursor.read_cstr()?.to_owned();\n\n let sql = cursor.read_cstr()?.to_owned();\n\n\n\n let mut param_types = vec![];\n\n for _ in 0..cursor.read_i16()? {\n\n let oid = PgType::from_oid(cursor.read_u32()?)?;\n\n log::trace!(\"OID {:?}\", oid);\n\n param_types.push(oid);\n\n }\n\n\n\n Ok(FrontendMessage::Parse {\n\n statement_name,\n\n sql,\n\n param_types,\n\n })\n\n}\n\n\n", "file_path": "src/messages.rs", "rank": 5, "score": 124314.85484124637 }, { "content": "fn decode_flush(_cursor: Cursor) -> Result<FrontendMessage> {\n\n Ok(FrontendMessage::Flush)\n\n}\n\n\n", "file_path": "src/messages.rs", "rank": 6, "score": 97180.96051707635 }, { "content": "fn decode_terminate(_cursor: Cursor) -> Result<FrontendMessage> {\n\n Ok(FrontendMessage::Terminate)\n\n}\n\n\n\n#[cfg(test)]\n\nmod decoding_frontend_messages {\n\n use super::*;\n\n\n\n #[test]\n\n fn query() {\n\n let buffer = [\n\n 99, 114, 101, 97, 116, 101, 32, 115, 99, 104, 101, 109, 97, 32, 115, 99, 104, 101, 109, 97, 95, 110, 97,\n\n 109, 101, 59, 0,\n\n ];\n\n let message = FrontendMessage::decode(b'Q', &buffer);\n\n assert_eq!(\n\n message,\n\n Ok(FrontendMessage::Query {\n\n sql: \"create schema schema_name;\".to_owned()\n\n })\n", "file_path": "src/messages.rs", "rank": 7, "score": 97180.96051707635 }, { "content": "fn decode_sync(_cursor: Cursor) -> Result<FrontendMessage> {\n\n Ok(FrontendMessage::Sync)\n\n}\n\n\n", "file_path": "src/messages.rs", "rank": 8, "score": 97180.96051707635 }, { "content": "fn parse_bool_from_text(s: &str) -> Result<Value, String> {\n\n match s.trim().to_lowercase().as_str() {\n\n \"t\" | \"tr\" | \"tru\" | \"true\" | \"y\" | \"ye\" | \"yes\" | \"on\" | \"1\" => Ok(Value::True),\n\n \"f\" | \"fa\" | \"fal\" | \"fals\" | \"false\" | \"n\" | \"no\" | \"of\" | \"off\" | \"0\" => Ok(Value::False),\n\n _ => Err(format!(\"Failed to parse Bool from: {}\", s)),\n\n }\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 9, "score": 85790.48357676281 }, { "content": "fn parse_integer_from_text(s: &str) -> Result<Value, String> {\n\n let v: i32 = match s.trim().parse() {\n\n Ok(v) => v,\n\n Err(_) => return Err(format!(\"Failed to parse SmallInt from: {}\", s)),\n\n };\n\n\n\n Ok(Value::Int32(v))\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 10, "score": 85790.48357676281 }, { "content": "fn parse_bigint_from_text(s: &str) -> Result<Value, String> {\n\n let v: i64 = match s.trim().parse() {\n\n Ok(v) => v,\n\n Err(_) => return Err(format!(\"Failed to parse SmallInt from: {}\", s)),\n\n };\n\n\n\n Ok(Value::Int64(v))\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 11, "score": 85790.48357676281 }, { "content": "fn parse_smallint_from_text(s: &str) -> Result<Value, String> {\n\n let v: i16 = match s.trim().parse() {\n\n Ok(v) => v,\n\n Err(_) => return Err(format!(\"Failed to parse SmallInt from: {}\", s)),\n\n };\n\n\n\n Ok(Value::Int16(v))\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 12, "score": 85790.48357676281 }, { "content": "fn parse_varchar_from_text(s: &str) -> Result<Value, String> {\n\n Ok(Value::String(s.into()))\n\n}\n\n\n\n/// Represents PostgreSQL data values sent and received over wire\n\n#[allow(missing_docs)]\n\n#[derive(Debug, PartialEq)]\n\npub enum Value {\n\n Null,\n\n True,\n\n False,\n\n Int16(i16),\n\n Int32(i32),\n\n Int64(i64),\n\n String(String),\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/types.rs", "rank": 13, "score": 85790.48357676281 }, { "content": "fn parse_char_from_text(s: &str) -> Result<Value, String> {\n\n Ok(Value::String(s.into()))\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 14, "score": 85790.48357676281 }, { "content": "fn parse_integer_from_binary(buf: &mut Cursor) -> Result<Value, String> {\n\n let v = match buf.read_i32() {\n\n Ok(v) => v,\n\n Err(_) => return Err(format!(\"Failed to parse Integer from: {:?}\", buf)),\n\n };\n\n\n\n Ok(Value::Int32(v))\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 15, "score": 77841.21230942824 }, { "content": "fn parse_varchar_from_binary(buf: &mut Cursor) -> Result<Value, String> {\n\n let s = match buf.read_str() {\n\n Ok(s) => s,\n\n Err(_) => return Err(format!(\"Failed to parse UTF8 from: {:?}\", buf)),\n\n };\n\n\n\n Ok(Value::String(s.into()))\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 16, "score": 77841.21230942824 }, { "content": "fn parse_char_from_binary(buf: &mut Cursor) -> Result<Value, String> {\n\n let s = match buf.read_str() {\n\n Ok(s) => s,\n\n Err(_) => return Err(format!(\"Failed to parse UTF8 from: {:?}\", buf)),\n\n };\n\n\n\n Ok(Value::String(s.into()))\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 17, "score": 77841.21230942824 }, { "content": "fn parse_bool_from_binary(buf: &mut Cursor) -> Result<Value, String> {\n\n let v = match buf.read_byte() {\n\n Ok(0) => Value::False,\n\n Ok(_) => Value::True,\n\n _ => return Err(\"invalid buffer size\".into()),\n\n };\n\n\n\n Ok(v)\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 18, "score": 77841.21230942824 }, { "content": "fn parse_smallint_from_binary(buf: &mut Cursor) -> Result<Value, String> {\n\n let v = match buf.read_i32() {\n\n Ok(v) => v as i16,\n\n Err(_) => return Err(format!(\"Failed to parse SmallInt from: {:?}\", buf)),\n\n };\n\n\n\n Ok(Value::Int16(v))\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 19, "score": 77841.21230942824 }, { "content": "fn parse_bigint_from_binary(buf: &mut Cursor) -> Result<Value, String> {\n\n let v = match buf.read_i64() {\n\n Ok(v) => v,\n\n Err(_) => return Err(format!(\"Failed to parse BigInt from: {:?}\", buf)),\n\n };\n\n\n\n Ok(Value::Int64(v))\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 20, "score": 77841.21230942824 }, { "content": " State::Created(Created)\n\n }\n\n\n\n pub(crate) fn try_step(self, buf: &[u8]) -> Result<(State, State)> {\n\n let mut cursor = Cursor::from(buf);\n\n match &self {\n\n State::Created(created) => Ok((State::RequestingTag(created.transform(&mut cursor)?), self)),\n\n State::RequestingTag(rt) => Ok((State::Tag(rt.transform(&mut cursor)?), self)),\n\n State::Tag(tag) => Ok((State::WaitingForPayload(tag.transform(&mut cursor)?), self)),\n\n State::WaitingForPayload(w) => Ok((State::Payload(w.transform(&mut cursor)?), self)),\n\n State::Payload(decoded) => Ok((State::Created(decoded.transform(&mut cursor)?), self)),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::messages::QUERY;\n\n\n", "file_path": "src/message_decoder/state.rs", "rank": 30, "score": 71891.9237815194 }, { "content": "#[derive(Debug, PartialEq)]\n\npub(crate) struct Payload(pub(crate) Vec<u8>);\n\n\n\nimpl<'p> Transform<Created> for &'p Payload {\n\n fn transform(self, _buf: &mut Cursor) -> Result<Created> {\n\n Ok(Created)\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub(crate) enum State {\n\n Created(Created),\n\n RequestingTag(RequestingTag),\n\n Tag(Tag),\n\n WaitingForPayload(WaitingForPayload),\n\n Payload(Payload),\n\n}\n\n\n\nimpl State {\n\n pub(crate) fn new() -> State {\n", "file_path": "src/message_decoder/state.rs", "rank": 31, "score": 71885.73540861561 }, { "content": "}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub(crate) struct Tag(pub(crate) u8);\n\n\n\nimpl<'t> Transform<WaitingForPayload> for &'t Tag {\n\n fn transform(self, _buf: &mut Cursor) -> Result<WaitingForPayload> {\n\n Ok(WaitingForPayload)\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub(crate) struct WaitingForPayload;\n\n\n\nimpl<'w> Transform<Payload> for &'w WaitingForPayload {\n\n fn transform(self, buf: &mut Cursor) -> Result<Payload> {\n\n Ok(Payload(Vec::from(&*buf)))\n\n }\n\n}\n\n\n", "file_path": "src/message_decoder/state.rs", "rank": 32, "score": 71881.3415638533 }, { "content": "\n\n let (state, _prev) = state.try_step(&[]).expect(\"proceed to the next step\");\n\n\n\n assert_eq!(\n\n state.try_step(&[QUERY]),\n\n Ok((State::Tag(Tag(QUERY)), State::RequestingTag(RequestingTag)))\n\n );\n\n }\n\n\n\n #[test]\n\n fn decoding_body() {\n\n let state = State::new();\n\n\n\n let (state, _prev) = state.try_step(&[]).expect(\"proceed to the next step\");\n\n let (state, _prev) = state.try_step(&[QUERY]).expect(\"proceed to the next step\");\n\n\n\n assert_eq!(\n\n state.try_step(&[]),\n\n Ok((State::WaitingForPayload(WaitingForPayload), State::Tag(Tag(QUERY))))\n\n );\n", "file_path": "src/message_decoder/state.rs", "rank": 33, "score": 71879.88776014997 }, { "content": " const QUERY_STRING: &[u8] = b\"select * from t\";\n\n\n\n #[test]\n\n fn created() {\n\n assert_eq!(State::new(), State::Created(Created));\n\n }\n\n\n\n #[test]\n\n fn requesting_tag() {\n\n let state = State::new();\n\n\n\n assert_eq!(\n\n state.try_step(&[]),\n\n Ok((State::RequestingTag(RequestingTag), State::new()))\n\n );\n\n }\n\n\n\n #[test]\n\n fn parse_tag() {\n\n let state = State::new();\n", "file_path": "src/message_decoder/state.rs", "rank": 34, "score": 71877.08154328572 }, { "content": " }\n\n\n\n #[test]\n\n fn read_body() {\n\n let state = State::new();\n\n\n\n let (state, _prev) = state.try_step(&[]).expect(\"proceed to the next step\");\n\n let (state, _prev) = state.try_step(&[QUERY]).expect(\"proceed to the next step\");\n\n let (state, _prev) = state.try_step(&[]).expect(\"proceed to the next step\");\n\n\n\n assert_eq!(\n\n state.try_step(QUERY_STRING),\n\n Ok((\n\n State::Payload(Payload(QUERY_STRING.to_vec())),\n\n State::WaitingForPayload(WaitingForPayload)\n\n ))\n\n );\n\n }\n\n\n\n #[test]\n", "file_path": "src/message_decoder/state.rs", "rank": 35, "score": 71876.53129599347 }, { "content": " fn full_cycle() {\n\n let state = State::new();\n\n\n\n let (state, _prev) = state.try_step(&[]).expect(\"proceed to the next step\");\n\n let (state, _prev) = state.try_step(&[QUERY]).expect(\"proceed to the next step\");\n\n let (state, _prev) = state.try_step(&[]).expect(\"proceed to the next step\");\n\n let (state, _prev) = state.try_step(QUERY_STRING).expect(\"proceed to the next step\");\n\n\n\n assert_eq!(\n\n state.try_step(&[]),\n\n Ok((State::Created(Created), State::Payload(Payload(QUERY_STRING.to_vec()))))\n\n );\n\n }\n\n}\n", "file_path": "src/message_decoder/state.rs", "rank": 36, "score": 71873.67902267947 }, { "content": "// Copyright 2020 Alex Dukhno\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::{cursor::Cursor, Result};\n\n\n", "file_path": "src/message_decoder/state.rs", "rank": 37, "score": 71864.44935643759 }, { "content": "trait Transform<C> {\n\n fn transform(self, buf: &mut Cursor) -> Result<C>;\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub(crate) struct Created;\n\n\n\nimpl<'c> Transform<RequestingTag> for &'c Created {\n\n fn transform(self, _buf: &mut Cursor) -> Result<RequestingTag> {\n\n Ok(RequestingTag)\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub(crate) struct RequestingTag;\n\n\n\nimpl<'rt> Transform<Tag> for &'rt RequestingTag {\n\n fn transform(self, buf: &mut Cursor) -> Result<Tag> {\n\n Ok(Tag(buf.read_byte()?))\n\n }\n", "file_path": "src/message_decoder/state.rs", "rank": 38, "score": 64727.84365005866 }, { "content": "// Copyright 2020 Alex Dukhno\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::types::NotSupportedOid;\n\n\n\n/// Protocol operation result\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// `Error` type in protocol `Result`. Indicates that something went not well\n", "file_path": "src/result.rs", "rank": 39, "score": 27718.7833166027 }, { "content": "#[derive(Debug, PartialEq)]\n\npub enum Error {\n\n /// Indicates that the current count of active connections is full\n\n ConnectionIdExhausted,\n\n /// Indicates that incoming data is invalid\n\n InvalidInput(String),\n\n /// Indicates that incoming data can't be parsed as UTF-8 string\n\n InvalidUtfString,\n\n /// Indicates that incoming string is not terminated by zero byte\n\n ZeroByteNotFound,\n\n /// Indicates that frontend message is not supported\n\n UnsupportedFrontendMessage,\n\n /// Indicates that protocol version is not supported\n\n UnsupportedVersion,\n\n /// Indicates that client request is not supported\n\n UnsupportedRequest,\n\n /// Indicates that during handshake client sent unrecognized protocol version\n\n UnrecognizedVersion,\n\n /// Indicates that connection verification is failed\n\n VerificationFailed,\n\n}\n\n\n\nimpl From<NotSupportedOid> for Error {\n\n fn from(error: NotSupportedOid) -> Error {\n\n Error::InvalidInput(error.to_string())\n\n }\n\n}\n", "file_path": "src/result.rs", "rank": 40, "score": 27717.117050519653 }, { "content": " }\n\n}\n\n\n\n/// Struct description of metadata that describes how client should interpret\n\n/// outgoing selected data\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ColumnMetadata {\n\n /// name of the column that was specified in query\n\n pub name: String,\n\n /// PostgreSQL data type id\n\n pub type_id: u32,\n\n /// PostgreSQL data type size\n\n pub type_size: i16,\n\n}\n\n\n\nimpl ColumnMetadata {\n\n /// Creates new column metadata\n\n pub fn new<S: ToString>(name: S, pg_type: PgType) -> ColumnMetadata {\n\n Self {\n\n name: name.to_string(),\n\n type_id: pg_type.type_oid(),\n\n type_size: pg_type.type_len(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/messages.rs", "rank": 41, "score": 25454.31026846223 }, { "content": " let mut buff = vec![BACKEND_KEY_DATA, 0, 0, 0, 12];\n\n buff.extend_from_slice(&conn_id.to_be_bytes());\n\n buff.extend_from_slice(&secret_key.to_be_bytes());\n\n buff\n\n }\n\n BackendMessage::ReadyForQuery => vec![READY_FOR_QUERY, 0, 0, 0, 5, EMPTY_QUERY_RESPONSE],\n\n BackendMessage::DataRow(row) => {\n\n let mut row_buff = Vec::new();\n\n for field in row.iter() {\n\n row_buff.extend_from_slice(&(field.len() as i32).to_be_bytes());\n\n row_buff.extend_from_slice(field.as_str().as_bytes());\n\n }\n\n let mut len_buff = Vec::new();\n\n len_buff.extend_from_slice(&[DATA_ROW]);\n\n len_buff.extend_from_slice(&(6 + row_buff.len() as i32).to_be_bytes());\n\n len_buff.extend_from_slice(&(row.len() as i16).to_be_bytes());\n\n len_buff.extend_from_slice(&row_buff);\n\n len_buff\n\n }\n\n BackendMessage::RowDescription(description) => {\n", "file_path": "src/messages.rs", "rank": 42, "score": 25451.29935991997 }, { "content": " pub fn decode(tag: u8, buffer: &[u8]) -> Result<Self> {\n\n log::trace!(\"Receives frontend tag = {:?}, buffer = {:?}\", char::from(tag), buffer);\n\n\n\n let cursor = Cursor::from(buffer);\n\n match tag {\n\n // Simple query flow.\n\n QUERY => decode_query(cursor),\n\n\n\n // Extended query flow.\n\n BIND => decode_bind(cursor),\n\n CLOSE => decode_close(cursor),\n\n DESCRIBE => decode_describe(cursor),\n\n EXECUTE => decode_execute(cursor),\n\n FLUSH => decode_flush(cursor),\n\n PARSE => decode_parse(cursor),\n\n SYNC => decode_sync(cursor),\n\n\n\n TERMINATE => decode_terminate(cursor),\n\n\n\n // Invalid.\n", "file_path": "src/messages.rs", "rank": 43, "score": 25451.186967703186 }, { "content": " let mut buff = Vec::new();\n\n for field in description.iter() {\n\n buff.extend_from_slice(field.name.as_str().as_bytes());\n\n buff.extend_from_slice(&[0]); // end of c string\n\n buff.extend_from_slice(&(0i32).to_be_bytes()); // table id\n\n buff.extend_from_slice(&(0i16).to_be_bytes()); // column id\n\n buff.extend_from_slice(&field.type_id.to_be_bytes());\n\n buff.extend_from_slice(&field.type_size.to_be_bytes());\n\n buff.extend_from_slice(&(-1i32).to_be_bytes()); // type modifier\n\n buff.extend_from_slice(&0i16.to_be_bytes());\n\n }\n\n let mut len_buff = Vec::new();\n\n len_buff.extend_from_slice(&[ROW_DESCRIPTION]);\n\n len_buff.extend_from_slice(&(6 + buff.len() as i32).to_be_bytes());\n\n len_buff.extend_from_slice(&(description.len() as i16).to_be_bytes());\n\n len_buff.extend_from_slice(&buff);\n\n len_buff\n\n }\n\n BackendMessage::CommandComplete(command) => {\n\n let mut command_buff = Vec::new();\n", "file_path": "src/messages.rs", "rank": 44, "score": 25448.372598208924 }, { "content": " statement_name: \"\".to_owned(),\n\n sql: \"select * from schema_name.table_name where si_column = $1;\".to_owned(),\n\n param_types: vec![Some(PgType::Integer)],\n\n })\n\n );\n\n }\n\n\n\n #[test]\n\n fn sync() {\n\n let message = FrontendMessage::decode(b'S', &[]);\n\n assert_eq!(message, Ok(FrontendMessage::Sync));\n\n }\n\n\n\n #[test]\n\n fn terminate() {\n\n let message = FrontendMessage::decode(b'X', &[]);\n\n assert_eq!(message, Ok(FrontendMessage::Terminate));\n\n }\n\n}\n\n\n", "file_path": "src/messages.rs", "rank": 45, "score": 25447.800055350686 }, { "content": " parameter_status_buff.extend_from_slice(parameters.as_ref());\n\n parameter_status_buff\n\n }\n\n BackendMessage::ParameterDescription(pg_types) => {\n\n let mut type_id_buff = Vec::new();\n\n for pg_type in pg_types.iter() {\n\n type_id_buff.extend_from_slice(&pg_type.type_oid().to_be_bytes());\n\n }\n\n let mut buff = Vec::new();\n\n buff.extend_from_slice(&[PARAMETER_DESCRIPTION]);\n\n buff.extend_from_slice(&(6 + type_id_buff.len() as i32).to_be_bytes());\n\n buff.extend_from_slice(&(pg_types.len() as i16).to_be_bytes());\n\n buff.extend_from_slice(&type_id_buff);\n\n buff\n\n }\n\n BackendMessage::NoData => vec![NO_DATA, 0, 0, 0, 4],\n\n BackendMessage::ParseComplete => vec![PARSE_COMPLETE, 0, 0, 0, 4],\n\n BackendMessage::BindComplete => vec![BIND_COMPLETE, 0, 0, 0, 4],\n\n BackendMessage::CloseComplete => vec![CLOSE_COMPLETE, 0, 0, 0, 4],\n\n }\n", "file_path": "src/messages.rs", "rank": 46, "score": 25447.646914113244 }, { "content": "#[cfg(test)]\n\nmod serializing_backend_messages {\n\n use super::*;\n\n\n\n #[test]\n\n fn notice() {\n\n assert_eq!(BackendMessage::NoticeResponse.as_vec(), vec![NOTICE_RESPONSE]);\n\n }\n\n\n\n #[test]\n\n fn authentication_cleartext_password() {\n\n assert_eq!(\n\n BackendMessage::AuthenticationCleartextPassword.as_vec(),\n\n vec![AUTHENTICATION, 0, 0, 0, 8, 0, 0, 0, 3]\n\n )\n\n }\n\n\n\n #[test]\n\n fn authentication_md5_password() {\n\n assert_eq!(\n", "file_path": "src/messages.rs", "rank": 47, "score": 25447.47486858473 }, { "content": "const SEVERITY: u8 = b'S';\n\nconst CODE: u8 = b'C';\n\nconst MESSAGE: u8 = b'M';\n\nconst EMPTY_QUERY_RESPONSE: u8 = b'I';\n\nconst NOTICE_RESPONSE: u8 = b'N';\n\nconst AUTHENTICATION: u8 = b'R';\n\nconst BACKEND_KEY_DATA: u8 = b'K';\n\nconst PARAMETER_STATUS: u8 = b'S';\n\nconst ROW_DESCRIPTION: u8 = b'T';\n\nconst READY_FOR_QUERY: u8 = b'Z';\n\nconst PARAMETER_DESCRIPTION: u8 = b't';\n\nconst NO_DATA: u8 = b'n';\n\nconst PARSE_COMPLETE: u8 = b'1';\n\nconst BIND_COMPLETE: u8 = b'2';\n\nconst CLOSE_COMPLETE: u8 = b'3';\n\n\n\npub(crate) const QUERY: u8 = b'Q';\n\nconst BIND: u8 = b'B';\n\nconst CLOSE: u8 = b'C';\n\nconst DESCRIBE: u8 = b'D';\n", "file_path": "src/messages.rs", "rank": 48, "score": 25446.361505659384 }, { "content": " /// This message informs the frontend about the previous `Parse` frontend\n\n /// message is successful.\n\n ParseComplete,\n\n /// This message informs the frontend about the previous `Bind` frontend\n\n /// message is successful.\n\n BindComplete,\n\n /// This message informs the frontend about the previous `Close` frontend\n\n /// message is successful.\n\n CloseComplete,\n\n}\n\n\n\nimpl BackendMessage {\n\n /// returns binary representation of a backend message\n\n pub fn as_vec(&self) -> Vec<u8> {\n\n match self {\n\n BackendMessage::NoticeResponse => vec![NOTICE_RESPONSE],\n\n BackendMessage::AuthenticationCleartextPassword => vec![AUTHENTICATION, 0, 0, 0, 8, 0, 0, 0, 3],\n\n BackendMessage::AuthenticationMD5Password => vec![AUTHENTICATION, 0, 0, 0, 12, 0, 0, 0, 5, 1, 1, 1, 1],\n\n BackendMessage::AuthenticationOk => vec![AUTHENTICATION, 0, 0, 0, 8, 0, 0, 0, 0],\n\n BackendMessage::BackendKeyData(conn_id, secret_key) => {\n", "file_path": "src/messages.rs", "rank": 49, "score": 25446.198673193136 }, { "content": " }\n\n if let Some(message) = message.as_ref() {\n\n message_buff.extend_from_slice(&[MESSAGE]);\n\n message_buff.extend_from_slice(message.as_bytes());\n\n message_buff.extend_from_slice(&[0]);\n\n }\n\n error_response_buff.extend_from_slice(&(message_buff.len() as i32 + 4 + 1).to_be_bytes());\n\n error_response_buff.extend_from_slice(message_buff.as_ref());\n\n error_response_buff.extend_from_slice(&[0]);\n\n error_response_buff.to_vec()\n\n }\n\n BackendMessage::ParameterStatus(name, value) => {\n\n let mut parameter_status_buff = Vec::new();\n\n parameter_status_buff.extend_from_slice(&[PARAMETER_STATUS]);\n\n let mut parameters = Vec::new();\n\n parameters.extend_from_slice(name.as_bytes());\n\n parameters.extend_from_slice(&[0]);\n\n parameters.extend_from_slice(value.as_bytes());\n\n parameters.extend_from_slice(&[0]);\n\n parameter_status_buff.extend_from_slice(&(4 + parameters.len() as u32).to_be_bytes());\n", "file_path": "src/messages.rs", "rank": 50, "score": 25445.89916384045 }, { "content": " /// The frontend must now send a PasswordMessage containing the password\n\n /// (with user name) encrypted via MD5, then encrypted again using the 4-byte\n\n /// random salt specified in the AuthenticationMD5Password message. If this\n\n /// is the correct password, the server responds with an AuthenticationOk,\n\n /// otherwise it responds with an ErrorResponse. The actual PasswordMessage\n\n /// can be computed in SQL as concat('md5', md5(concat(md5(concat(password,\n\n /// username)), random-salt))). (Keep in mind the md5() function returns its\n\n /// result as a hex string.)\n\n AuthenticationMD5Password,\n\n /// The authentication exchange is successfully completed.\n\n AuthenticationOk,\n\n /// Identifies as cancellation key data. The frontend must save these values\n\n /// if it wishes to be able to issue CancelRequest messages later.\n\n BackendKeyData(ConnId, ConnSecretKey),\n\n /// Start-up is completed. The frontend can now issue commands.\n\n ReadyForQuery,\n\n /// One of the set of rows returned by a SELECT, FETCH, etc query.\n\n DataRow(Vec<String>),\n\n /// Indicates that rows are about to be returned in response to a SELECT, FETCH,\n\n /// etc query. The contents of this message describe the column layout of\n", "file_path": "src/messages.rs", "rank": 51, "score": 25445.219723917067 }, { "content": " vec![READY_FOR_QUERY, 0, 0, 0, 5, EMPTY_QUERY_RESPONSE]\n\n )\n\n }\n\n\n\n #[test]\n\n fn data_row() {\n\n assert_eq!(\n\n BackendMessage::DataRow(vec![\"1\".to_owned(), \"2\".to_owned(), \"3\".to_owned()]).as_vec(),\n\n vec![DATA_ROW, 0, 0, 0, 21, 0, 3, 0, 0, 0, 1, 49, 0, 0, 0, 1, 50, 0, 0, 0, 1, 51]\n\n )\n\n }\n\n\n\n #[test]\n\n fn row_description() {\n\n assert_eq!(\n\n BackendMessage::RowDescription(vec![ColumnMetadata::new(\"c1\".to_owned(), PgType::Integer)]).as_vec(),\n\n vec![\n\n ROW_DESCRIPTION,\n\n 0,\n\n 0,\n", "file_path": "src/messages.rs", "rank": 52, "score": 25445.14625681488 }, { "content": " command_buff.extend_from_slice(&[COMMAND_COMPLETE]);\n\n command_buff.extend_from_slice(&(4 + command.len() as i32 + 1).to_be_bytes());\n\n command_buff.extend_from_slice(command.as_bytes());\n\n command_buff.extend_from_slice(&[0]);\n\n command_buff\n\n }\n\n BackendMessage::EmptyQueryResponse => vec![EMPTY_QUERY_RESPONSE, 0, 0, 0, 4],\n\n BackendMessage::ErrorResponse(severity, code, message) => {\n\n let mut error_response_buff = Vec::new();\n\n error_response_buff.extend_from_slice(&[ERROR_RESPONSE]);\n\n let mut message_buff = Vec::new();\n\n if let Some(severity) = severity.as_ref() {\n\n message_buff.extend_from_slice(&[SEVERITY]);\n\n message_buff.extend_from_slice(severity.as_bytes());\n\n message_buff.extend_from_slice(&[0]);\n\n }\n\n if let Some(code) = code.as_ref() {\n\n message_buff.extend_from_slice(&[CODE]);\n\n message_buff.extend_from_slice(code.as_bytes());\n\n message_buff.extend_from_slice(&[0]);\n", "file_path": "src/messages.rs", "rank": 53, "score": 25445.02206772295 }, { "content": "const EXECUTE: u8 = b'E';\n\nconst FLUSH: u8 = b'H';\n\nconst PARSE: u8 = b'P';\n\nconst SYNC: u8 = b'S';\n\nconst TERMINATE: u8 = b'X';\n\n\n\n/// Frontend PostgreSQL Wire Protocol messages\n\n/// see [Protocol Flow](https://www.postgresql.org/docs/current/protocol-flow.html)\n\n/// PostgreSQL documentation section\n\n#[derive(Debug, PartialEq)]\n\npub enum FrontendMessage {\n\n /// Client requested GSSENC Request\n\n GssencRequest,\n\n /// Client requested SSL connection\n\n SslRequest,\n\n /// Connection setup message\n\n Setup {\n\n /// client parameters\n\n params: Vec<(String, String)>,\n\n },\n", "file_path": "src/messages.rs", "rank": 54, "score": 25445.00541211925 }, { "content": " for _ in 0..len {\n\n value.push(cursor.read_byte()?);\n\n }\n\n raw_params.push(Some(value));\n\n }\n\n }\n\n\n\n let mut result_formats = vec![];\n\n for _ in 0..cursor.read_i16()? {\n\n match PgFormat::try_from(cursor.read_i16()?) {\n\n Ok(format) => result_formats.push(format),\n\n Err(UnrecognizedFormat(code)) => return Err(Error::InvalidInput(format!(\"unknown format code: {}\", code))),\n\n }\n\n }\n\n\n\n Ok(FrontendMessage::Bind {\n\n portal_name,\n\n statement_name,\n\n param_formats,\n\n raw_params,\n\n result_formats,\n\n })\n\n}\n\n\n", "file_path": "src/messages.rs", "rank": 55, "score": 25444.83740350778 }, { "content": "// Copyright 2020 Alex Dukhno\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::{cursor::Cursor, types::PgType, ConnId, ConnSecretKey, Error, PgFormat, Result, UnrecognizedFormat};\n\nuse std::convert::TryFrom;\n\n\n\nconst COMMAND_COMPLETE: u8 = b'C';\n\nconst DATA_ROW: u8 = b'D';\n\nconst ERROR_RESPONSE: u8 = b'E';\n", "file_path": "src/messages.rs", "rank": 56, "score": 25444.53679059725 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn flush() {\n\n let message = FrontendMessage::decode(b'H', &[]);\n\n assert_eq!(message, Ok(FrontendMessage::Flush));\n\n }\n\n\n\n #[test]\n\n fn parse() {\n\n let buffer = [\n\n 0, 115, 101, 108, 101, 99, 116, 32, 42, 32, 102, 114, 111, 109, 32, 115, 99, 104, 101, 109, 97, 95, 110,\n\n 97, 109, 101, 46, 116, 97, 98, 108, 101, 95, 110, 97, 109, 101, 32, 119, 104, 101, 114, 101, 32, 115, 105,\n\n 95, 99, 111, 108, 117, 109, 110, 32, 61, 32, 36, 49, 59, 0, 0, 1, 0, 0, 0, 23,\n\n ];\n\n let message = FrontendMessage::decode(b'P', &buffer);\n\n assert_eq!(\n\n message,\n\n Ok(FrontendMessage::Parse {\n", "file_path": "src/messages.rs", "rank": 57, "score": 25444.398633649394 }, { "content": "\n\n #[test]\n\n fn no_data() {\n\n assert_eq!(BackendMessage::NoData.as_vec(), vec![NO_DATA, 0, 0, 0, 4])\n\n }\n\n\n\n #[test]\n\n fn parse_complete() {\n\n assert_eq!(BackendMessage::ParseComplete.as_vec(), vec![PARSE_COMPLETE, 0, 0, 0, 4])\n\n }\n\n\n\n #[test]\n\n fn bind_complete() {\n\n assert_eq!(BackendMessage::BindComplete.as_vec(), vec![BIND_COMPLETE, 0, 0, 0, 4])\n\n }\n\n\n\n #[test]\n\n fn close_complete() {\n\n assert_eq!(BackendMessage::CloseComplete.as_vec(), vec![CLOSE_COMPLETE, 0, 0, 0, 4])\n\n }\n\n}\n", "file_path": "src/messages.rs", "rank": 58, "score": 25444.182275942374 }, { "content": " }\n\n\n\n #[test]\n\n fn close_portal() {\n\n let buffer = [80, 112, 111, 114, 116, 97, 108, 95, 110, 97, 109, 101, 0];\n\n let message = FrontendMessage::decode(b'C', &buffer);\n\n assert_eq!(\n\n message,\n\n Ok(FrontendMessage::ClosePortal {\n\n name: \"portal_name\".to_owned(),\n\n })\n\n );\n\n }\n\n\n\n #[test]\n\n fn close_statement() {\n\n let buffer = [83, 115, 116, 97, 116, 101, 109, 101, 110, 116, 95, 110, 97, 109, 101, 0];\n\n let message = FrontendMessage::decode(b'C', &buffer);\n\n assert_eq!(\n\n message,\n", "file_path": "src/messages.rs", "rank": 59, "score": 25444.089653569543 }, { "content": " BackendMessage::AuthenticationMD5Password.as_vec(),\n\n vec![AUTHENTICATION, 0, 0, 0, 12, 0, 0, 0, 5, 1, 1, 1, 1]\n\n )\n\n }\n\n\n\n #[test]\n\n fn authentication_ok() {\n\n assert_eq!(\n\n BackendMessage::AuthenticationOk.as_vec(),\n\n vec![AUTHENTICATION, 0, 0, 0, 8, 0, 0, 0, 0]\n\n )\n\n }\n\n\n\n #[test]\n\n fn backend_key_data() {\n\n assert_eq!(\n\n BackendMessage::BackendKeyData(1, 2).as_vec(),\n\n vec![BACKEND_KEY_DATA, 0, 0, 0, 12, 0, 0, 0, 1, 0, 0, 0, 2]\n\n )\n\n }\n", "file_path": "src/messages.rs", "rank": 60, "score": 25443.947193912645 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn bind() {\n\n let buffer = [\n\n 112, 111, 114, 116, 97, 108, 95, 110, 97, 109, 101, 0, 115, 116, 97, 116, 101, 109, 101, 110, 116, 95, 110,\n\n 97, 109, 101, 0, 0, 2, 0, 1, 0, 1, 0, 2, 0, 0, 0, 4, 0, 0, 0, 1, 0, 0, 0, 4, 0, 0, 0, 2, 0, 0,\n\n ];\n\n let message = FrontendMessage::decode(b'B', &buffer);\n\n assert_eq!(\n\n message,\n\n Ok(FrontendMessage::Bind {\n\n portal_name: \"portal_name\".to_owned(),\n\n statement_name: \"statement_name\".to_owned(),\n\n param_formats: vec![PgFormat::Binary, PgFormat::Binary],\n\n raw_params: vec![Some(vec![0, 0, 0, 1]), Some(vec![0, 0, 0, 2])],\n\n result_formats: vec![],\n\n })\n\n );\n", "file_path": "src/messages.rs", "rank": 61, "score": 25443.917622506342 }, { "content": " /// This command is part of the extended query flow.\n\n CloseStatement {\n\n /// The name of the prepared statement to close.\n\n name: String,\n\n },\n\n\n\n /// Close the named portal.\n\n ///\n\n /// This command is part of the extended query flow.\n\n ClosePortal {\n\n /// The name of the portal to close.\n\n name: String,\n\n },\n\n\n\n /// Terminate a connection.\n\n Terminate,\n\n}\n\n\n\nimpl FrontendMessage {\n\n /// decodes buffer data to a frontend message\n", "file_path": "src/messages.rs", "rank": 62, "score": 25443.880020177214 }, { "content": " _ => {\n\n log::error!(\"unsupported frontend message tag {}\", tag);\n\n Err(Error::UnsupportedFrontendMessage)\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Backend PostgreSQL Wire Protocol messages\n\n/// see [Protocol Flow](https://www.postgresql.org/docs/current/protocol-flow.html)\n\n#[allow(dead_code)]\n\n#[derive(Debug, PartialEq)]\n\npub enum BackendMessage {\n\n /// A warning message has been issued. The frontend should display the message\n\n /// but continue listening for ReadyForQuery or ErrorResponse.\n\n NoticeResponse,\n\n /// The frontend must now send a PasswordMessage containing the password in\n\n /// clear-text form. If this is the correct password, the server responds\n\n /// with an AuthenticationOk, otherwise it responds with an ErrorResponse.\n\n AuthenticationCleartextPassword,\n", "file_path": "src/messages.rs", "rank": 63, "score": 25443.531773782655 }, { "content": " Ok(FrontendMessage::CloseStatement {\n\n name: \"statement_name\".to_owned(),\n\n })\n\n );\n\n }\n\n\n\n #[test]\n\n fn describe_portal() {\n\n let buffer = [80, 112, 111, 114, 116, 97, 108, 95, 110, 97, 109, 101, 0];\n\n let message = FrontendMessage::decode(b'D', &buffer);\n\n assert_eq!(\n\n message,\n\n Ok(FrontendMessage::DescribePortal {\n\n name: \"portal_name\".to_owned()\n\n })\n\n );\n\n }\n\n\n\n #[test]\n\n fn describe_statement() {\n", "file_path": "src/messages.rs", "rank": 64, "score": 25443.209739196165 }, { "content": " let buffer = [83, 115, 116, 97, 116, 101, 109, 101, 110, 116, 95, 110, 97, 109, 101, 0];\n\n let message = FrontendMessage::decode(b'D', &buffer);\n\n assert_eq!(\n\n message,\n\n Ok(FrontendMessage::DescribeStatement {\n\n name: \"statement_name\".to_owned()\n\n })\n\n );\n\n }\n\n\n\n #[test]\n\n fn execute() {\n\n let buffer = [112, 111, 114, 116, 97, 108, 95, 110, 97, 109, 101, 0, 0, 0, 0, 0];\n\n let message = FrontendMessage::decode(b'E', &buffer);\n\n assert_eq!(\n\n message,\n\n Ok(FrontendMessage::Execute {\n\n portal_name: \"portal_name\".to_owned(),\n\n max_rows: 0,\n\n })\n", "file_path": "src/messages.rs", "rank": 65, "score": 25443.117465501535 }, { "content": " /// the rows. This will be followed by a DataRow message for each row being\n\n /// returned to the frontend.\n\n RowDescription(Vec<ColumnMetadata>),\n\n /// An SQL command completed normally.\n\n CommandComplete(String),\n\n /// An empty query string was recognized.\n\n EmptyQueryResponse,\n\n /// An error has occurred. Contains (`Severity`, `Error Code`, `Error Message`)\n\n /// all of them are optional\n\n ErrorResponse(Option<&'static str>, Option<&'static str>, Option<String>),\n\n /// This message informs the frontend about the current (initial) setting of\n\n /// backend parameters, such as client_encoding or DateStyle\n\n ///\n\n /// see https://www.postgresql.org/docs/12/protocol-flow.html#PROTOCOL-ASYNC\n\n /// 3rd and 4th paragraph\n\n ParameterStatus(String, String),\n\n /// Indicates that parameters are needed by a prepared statement.\n\n ParameterDescription(Vec<PgType>),\n\n /// Indicates that the statement will not return rows.\n\n NoData,\n", "file_path": "src/messages.rs", "rank": 66, "score": 25442.953841768613 }, { "content": " 255,\n\n 255,\n\n 255,\n\n 0,\n\n 0\n\n ]\n\n );\n\n }\n\n\n\n #[test]\n\n fn command_complete() {\n\n assert_eq!(\n\n BackendMessage::CommandComplete(\"SELECT\".to_owned()).as_vec(),\n\n vec![COMMAND_COMPLETE, 0, 0, 0, 11, 83, 69, 76, 69, 67, 84, 0]\n\n )\n\n }\n\n\n\n #[test]\n\n fn empty_response() {\n\n assert_eq!(\n", "file_path": "src/messages.rs", "rank": 67, "score": 25442.941550012172 }, { "content": " /// This command is part of the extended query flow.\n\n Bind {\n\n /// The destination portal. An empty string selects the unnamed\n\n /// portal. The portal can later be executed with the `Execute` command.\n\n portal_name: String,\n\n /// The source prepared statement. An empty string selects the unnamed\n\n /// prepared statement.\n\n statement_name: String,\n\n /// The formats used to encode the parameters.\n\n param_formats: Vec<PgFormat>,\n\n /// The value of each parameter.\n\n raw_params: Vec<Option<Vec<u8>>>,\n\n /// The desired formats for the columns in the result set.\n\n result_formats: Vec<PgFormat>,\n\n },\n\n\n\n /// Execute a bound portal.\n\n ///\n\n /// This command is part of the extended query flow.\n\n Execute {\n", "file_path": "src/messages.rs", "rank": 68, "score": 25442.80363776972 }, { "content": " BackendMessage::EmptyQueryResponse.as_vec(),\n\n vec![EMPTY_QUERY_RESPONSE, 0, 0, 0, 4]\n\n )\n\n }\n\n\n\n #[test]\n\n fn error_response() {\n\n assert_eq!(\n\n BackendMessage::ErrorResponse(None, None, None).as_vec(),\n\n vec![ERROR_RESPONSE, 0, 0, 0, 5, 0]\n\n )\n\n }\n\n\n\n #[test]\n\n fn parameter_description() {\n\n assert_eq!(\n\n BackendMessage::ParameterDescription(vec![PgType::Integer]).as_vec(),\n\n vec![PARAMETER_DESCRIPTION, 0, 0, 0, 10, 0, 1, 0, 0, 0, 23]\n\n )\n\n }\n", "file_path": "src/messages.rs", "rank": 69, "score": 25440.3065613548 }, { "content": " 99,\n\n 111,\n\n 100,\n\n 105,\n\n 110,\n\n 103,\n\n 0,\n\n 85,\n\n 84,\n\n 70,\n\n 56,\n\n 0\n\n ]\n\n )\n\n }\n\n\n\n #[test]\n\n fn ready_for_query() {\n\n assert_eq!(\n\n BackendMessage::ReadyForQuery.as_vec(),\n", "file_path": "src/messages.rs", "rank": 70, "score": 25440.236724477618 }, { "content": " /// Execute the specified SQL.\n\n ///\n\n /// This is issued as part of the simple query flow.\n\n Query {\n\n /// The SQL to execute.\n\n sql: String,\n\n },\n\n\n\n /// Parse the specified SQL into a prepared statement.\n\n ///\n\n /// This starts the extended query flow.\n\n Parse {\n\n /// The name of the prepared statement to create. An empty string\n\n /// specifies the unnamed prepared statement.\n\n statement_name: String,\n\n /// The SQL to parse.\n\n sql: String,\n\n /// The number of specified parameter data types can be less than the\n\n /// number of parameters specified in the query.\n\n param_types: Vec<Option<PgType>>,\n", "file_path": "src/messages.rs", "rank": 71, "score": 25440.212061924 }, { "content": "\n\n #[test]\n\n fn parameter_status() {\n\n assert_eq!(\n\n BackendMessage::ParameterStatus(\"client_encoding\".to_owned(), \"UTF8\".to_owned()).as_vec(),\n\n vec![\n\n PARAMETER_STATUS,\n\n 0,\n\n 0,\n\n 0,\n\n 25,\n\n 99,\n\n 108,\n\n 105,\n\n 101,\n\n 110,\n\n 116,\n\n 95,\n\n 101,\n\n 110,\n", "file_path": "src/messages.rs", "rank": 72, "score": 25439.67805190745 }, { "content": " /// The name of the portal to execute.\n\n portal_name: String,\n\n /// The maximum number of rows to return before suspending.\n\n ///\n\n /// 0 or negative means infinite.\n\n max_rows: i32,\n\n },\n\n\n\n /// Flush any pending output.\n\n ///\n\n /// This command is part of the extended query flow.\n\n Flush,\n\n\n\n /// Finish an extended query.\n\n ///\n\n /// This command is part of the extended query flow.\n\n Sync,\n\n\n\n /// Close the named statement.\n\n ///\n", "file_path": "src/messages.rs", "rank": 73, "score": 25436.86311439782 }, { "content": " },\n\n\n\n /// Describe an existing prepared statement.\n\n ///\n\n /// This command is part of the extended query flow.\n\n DescribeStatement {\n\n /// The name of the prepared statement to describe.\n\n name: String,\n\n },\n\n\n\n /// Describe an existing portal.\n\n ///\n\n /// This command is part of the extended query flow.\n\n DescribePortal {\n\n /// The name of the portal to describe.\n\n name: String,\n\n },\n\n\n\n /// Bind an existing prepared statement to a portal.\n\n ///\n", "file_path": "src/messages.rs", "rank": 74, "score": 25434.614029626224 }, { "content": " 0,\n\n 27,\n\n 0,\n\n 1,\n\n 99,\n\n 49,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 0,\n\n 23,\n\n 0,\n\n 4,\n\n 255,\n", "file_path": "src/messages.rs", "rank": 75, "score": 25434.614029626224 }, { "content": " pub fn start() -> Process {\n\n Process { state: None }\n\n }\n\n\n\n /// Proceed to the next stage of client <-> server hand shake\n\n pub fn next_stage(&mut self, payload: Option<&[u8]>) -> Result<Status> {\n\n match self.state.take() {\n\n None => {\n\n self.state = Some(State::new());\n\n Ok(Status::Requesting(Request::Buffer(4)))\n\n }\n\n Some(state) => {\n\n if let Some(bytes) = payload {\n\n let new_state = state.try_step(bytes)?;\n\n let result = match new_state.clone() {\n\n State::ParseSetup(ReadSetupMessage(len)) => Ok(Status::Requesting(Request::Buffer(len))),\n\n State::MessageLen(MessageLen(len)) => Ok(Status::Requesting(Request::Buffer(len))),\n\n State::SetupParsed(SetupParsed::Established(props)) => Ok(Status::Done(props)),\n\n State::SetupParsed(SetupParsed::Secure) => Ok(Status::Requesting(Request::UpgradeToSsl)),\n\n State::SetupParsed(SetupParsed::Cancel(conn_id, secret_key)) => {\n", "file_path": "src/hand_shake/mod.rs", "rank": 76, "score": 24811.364958281985 }, { "content": "// Copyright 2020 Alex Dukhno\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::{ConnId, ConnSecretKey, Result};\n\nuse state::{MessageLen, ReadSetupMessage, SetupParsed, State};\n\n\n\nmod state;\n\n\n\n/// Encapsulate protocol hand shake process\n", "file_path": "src/hand_shake/mod.rs", "rank": 77, "score": 24798.9155227912 }, { "content": " fn init_hand_shake_process() {\n\n let mut process = Process::start();\n\n assert_eq!(process.next_stage(None), Ok(Status::Requesting(Request::Buffer(4))));\n\n }\n\n\n\n #[test]\n\n fn read_setup_message_length() {\n\n let mut process = Process::start();\n\n\n\n process.next_stage(None).expect(\"proceed to the next stage\");\n\n assert_eq!(\n\n process.next_stage(Some(&[0, 0, 0, 33])),\n\n Ok(Status::Requesting(Request::Buffer(29)))\n\n );\n\n }\n\n\n\n #[test]\n\n fn non_secure_connection_hand_shake() {\n\n let mut process = Process::start();\n\n\n", "file_path": "src/hand_shake/mod.rs", "rank": 78, "score": 24798.46233138381 }, { "content": " Done(Vec<(String, String)>),\n\n /// Hand shake is for canceling request that is executed on `ConnId`\n\n Cancel(ConnId, ConnSecretKey),\n\n}\n\n\n\n/// Hand shake request to a server process\n\n#[derive(Debug, PartialEq)]\n\npub enum Request {\n\n /// Server should provide `Process` with buffer of request size\n\n Buffer(usize),\n\n /// Server should use SSL protocol over current connection stream\n\n UpgradeToSsl,\n\n}\n\n\n\n#[cfg(test)]\n\nmod perform_hand_shake_loop {\n\n use super::*;\n\n use crate::request_codes::{CANCEL_REQUEST_CODE, SSL_REQUEST_CODE, VERSION_3_CODE};\n\n\n\n #[test]\n", "file_path": "src/hand_shake/mod.rs", "rank": 79, "score": 24798.406440866213 }, { "content": " Ok(Status::Cancel(conn_id, secret_key))\n\n }\n\n };\n\n self.state = Some(new_state);\n\n result\n\n } else {\n\n self.state = Some(state.try_step(&[])?);\n\n Ok(Status::Requesting(Request::Buffer(4)))\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Represents status of the [Process] stages\n\n#[derive(Debug, PartialEq)]\n\npub enum Status {\n\n /// Hand shake process requesting additional data or action to proceed further\n\n Requesting(Request),\n\n /// Hand shake is finished. Contains client runtime settings, e.g. database, username\n", "file_path": "src/hand_shake/mod.rs", "rank": 80, "score": 24798.04416563353 }, { "content": " let conn_id: ConnId = 1;\n\n let secret_key: ConnSecretKey = 2;\n\n\n\n let mut process = Process::start();\n\n\n\n process.next_stage(None).expect(\"proceed to the next stage\");\n\n process\n\n .next_stage(Some(&[0, 0, 0, 16]))\n\n .expect(\"proceed to the next stage\");\n\n\n\n let mut payload = vec![];\n\n payload.extend_from_slice(&Vec::from(CANCEL_REQUEST_CODE));\n\n payload.extend_from_slice(&conn_id.to_be_bytes());\n\n payload.extend_from_slice(&secret_key.to_be_bytes());\n\n\n\n assert_eq!(\n\n process.next_stage(Some(&payload)),\n\n Ok(Status::Cancel(conn_id, secret_key))\n\n );\n\n }\n\n}\n", "file_path": "src/hand_shake/mod.rs", "rank": 81, "score": 24797.516821254914 }, { "content": "///\n\n/// # Examples\n\n///\n\n/// ```ignore\n\n/// use pg_wire::{HandShakeProcess, HandShakeStatus, HandShakeRequest};\n\n///\n\n/// let mut stream = accept_tcp_connection();\n\n/// let mut process = HandShakeProcess::start();\n\n/// let mut buffer: Option<Vec<u8>> = None;\n\n/// loop {\n\n/// match process.next_stage(buffer.as_deref()) {\n\n/// Ok(HandShakeStatus::Requesting(HandShakeRequest::Buffer(len))) => {\n\n/// let mut buf = vec![b'0'; len];\n\n/// buffer = Some(stream.read(&mut buf));\n\n/// }\n\n/// Ok(HandShakeStatus::Requesting(HandShakeRequest::UpgradeToSsl)) => {\n\n/// stream.write_all(&[b'S']); // accepting tls connection from client\n\n/// stream = tls_stream(stream);\n\n/// }\n\n/// Ok(HandShakeStatus::Cancel(conn_id, secret_key)) => {\n", "file_path": "src/hand_shake/mod.rs", "rank": 82, "score": 24797.30843719369 }, { "content": " process.next_stage(None).expect(\"proceed to the next stage\");\n\n process\n\n .next_stage(Some(&[0, 0, 0, 33]))\n\n .expect(\"proceed to the next stage\");\n\n\n\n let mut payload = vec![];\n\n payload.extend_from_slice(&Vec::from(VERSION_3_CODE));\n\n payload.extend_from_slice(b\"key1\\0\");\n\n payload.extend_from_slice(b\"value1\\0\");\n\n payload.extend_from_slice(b\"key2\\0\");\n\n payload.extend_from_slice(b\"value2\\0\");\n\n payload.extend_from_slice(&[0]);\n\n\n\n assert_eq!(\n\n process.next_stage(Some(&payload)),\n\n Ok(Status::Done(vec![\n\n (\"key1\".to_owned(), \"value1\".to_owned()),\n\n (\"key2\".to_owned(), \"value2\".to_owned())\n\n ]))\n\n );\n", "file_path": "src/hand_shake/mod.rs", "rank": 83, "score": 24797.224160407935 }, { "content": "\n\n let mut payload = vec![];\n\n payload.extend_from_slice(&Vec::from(VERSION_3_CODE));\n\n payload.extend_from_slice(b\"key1\\0\");\n\n payload.extend_from_slice(b\"value1\\0\");\n\n payload.extend_from_slice(b\"key2\\0\");\n\n payload.extend_from_slice(b\"value2\\0\");\n\n payload.extend_from_slice(&[0]);\n\n\n\n assert_eq!(\n\n process.next_stage(Some(&payload)),\n\n Ok(Status::Done(vec![\n\n (\"key1\".to_owned(), \"value1\".to_owned()),\n\n (\"key2\".to_owned(), \"value2\".to_owned())\n\n ]))\n\n );\n\n }\n\n\n\n #[test]\n\n fn cancel_query_request() {\n", "file_path": "src/hand_shake/mod.rs", "rank": 84, "score": 24796.202062581116 }, { "content": " }\n\n\n\n #[test]\n\n fn ssl_secure_connection_hand_shake() {\n\n let mut process = Process::start();\n\n\n\n process.next_stage(None).expect(\"proceed to the next stage\");\n\n process\n\n .next_stage(Some(&[0, 0, 0, 8]))\n\n .expect(\"proceed to the next stage\");\n\n\n\n assert_eq!(\n\n process.next_stage(Some(&Vec::from(SSL_REQUEST_CODE))),\n\n Ok(Status::Requesting(Request::UpgradeToSsl))\n\n );\n\n\n\n process.next_stage(None).expect(\"proceed to the next stage\");\n\n process\n\n .next_stage(Some(&[0, 0, 0, 33]))\n\n .expect(\"proceed to the next stage\");\n", "file_path": "src/hand_shake/mod.rs", "rank": 85, "score": 24796.081437242712 }, { "content": "/// handle_request_cancellation(conn_id, secret_key);\n\n/// break;\n\n/// }\n\n/// Ok(HandShakeStatus::Done(props)) => {\n\n/// handle_authentication_and_other_stuff();\n\n/// break;\n\n/// }\n\n/// Err(protocol_error) => {\n\n/// handle_protocol_error(protocol_error);\n\n/// break;\n\n/// }\n\n/// }\n\n/// }\n\n/// ```\n\npub struct Process {\n\n state: Option<State>,\n\n}\n\n\n\nimpl Process {\n\n /// Creates new process to make client <-> server hand shake\n", "file_path": "src/hand_shake/mod.rs", "rank": 86, "score": 24793.942780339094 }, { "content": "\n\nimpl State {\n\n pub(crate) fn new() -> State {\n\n State::MessageLen(MessageLen(4))\n\n }\n\n\n\n pub(crate) fn try_step(self, buf: &[u8]) -> Result<State> {\n\n let mut buffer = Cursor::from(buf);\n\n match self {\n\n State::MessageLen(hand_shake) => Ok(State::ParseSetup(hand_shake.transit(&mut buffer)?)),\n\n State::ParseSetup(hand_shake) => Ok(State::SetupParsed(hand_shake.transit(&mut buffer)?)),\n\n State::SetupParsed(hand_shake) => match hand_shake {\n\n SetupParsed::Secure => Ok(State::MessageLen(MessageLen(4))),\n\n _ => Err(Error::VerificationFailed),\n\n },\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/hand_shake/state.rs", "rank": 87, "score": 24770.61984075031 }, { "content": "mod connection_state_machine {\n\n use super::*;\n\n use crate::request_codes::GSSENC_REQUEST_CODE;\n\n\n\n #[test]\n\n fn created_state() {\n\n let hand_shake = State::new();\n\n\n\n assert_eq!(hand_shake, State::MessageLen(MessageLen(4)));\n\n }\n\n\n\n #[test]\n\n fn read_setup_message_length() {\n\n let hand_shake = State::new();\n\n\n\n assert_eq!(\n\n hand_shake.try_step(&[0, 0, 0, 4]),\n\n Ok(State::ParseSetup(ReadSetupMessage(0)))\n\n );\n\n }\n", "file_path": "src/hand_shake/state.rs", "rank": 88, "score": 24766.167451233276 }, { "content": " #[test]\n\n fn connection_established_with_ssl_request() {\n\n let mut hand_shake = State::new();\n\n\n\n hand_shake = hand_shake.try_step(&[0, 0, 0, 8]).expect(\"proceed to the next step\");\n\n\n\n hand_shake = hand_shake\n\n .try_step(&Vec::from(SSL_REQUEST_CODE))\n\n .expect(\"proceed to the next step\");\n\n assert_eq!(hand_shake, State::SetupParsed(SetupParsed::Secure));\n\n\n\n hand_shake = hand_shake.try_step(&[]).expect(\"proceed to the next step\");\n\n\n\n hand_shake = hand_shake.try_step(&[0, 0, 0, 33]).expect(\"proceed to the next step\");\n\n\n\n let mut payload = vec![];\n\n payload.extend_from_slice(&Vec::from(VERSION_3_CODE));\n\n payload.extend_from_slice(b\"key1\\0\");\n\n payload.extend_from_slice(b\"value1\\0\");\n\n payload.extend_from_slice(b\"key2\\0\");\n", "file_path": "src/hand_shake/state.rs", "rank": 89, "score": 24756.328651850406 }, { "content": "\n\n #[test]\n\n fn non_recognizable_protocol_code() {\n\n let mut hand_shake = State::new();\n\n\n\n hand_shake = hand_shake.try_step(&[0, 0, 0, 25]).expect(\"proceed to the next step\");\n\n\n\n assert_eq!(\n\n hand_shake.try_step(b\"non_recognizable_code\"),\n\n Err(Error::UnsupportedRequest)\n\n );\n\n }\n\n\n\n #[test]\n\n fn version_one_is_not_supported() {\n\n let mut hand_shake = State::new();\n\n\n\n hand_shake = hand_shake.try_step(&[0, 0, 0, 8]).expect(\"proceed to the next step\");\n\n\n\n assert_eq!(\n", "file_path": "src/hand_shake/state.rs", "rank": 90, "score": 24755.786370809376 }, { "content": " Err(Error::UnsupportedRequest)\n\n );\n\n }\n\n\n\n #[test]\n\n fn cancel_query_request() {\n\n let conn_id: ConnId = 1;\n\n let secret_key: ConnSecretKey = 2;\n\n let mut hand_shake = State::new();\n\n\n\n hand_shake = hand_shake.try_step(&[0, 0, 0, 33]).expect(\"proceed to the next step\");\n\n\n\n let mut payload = vec![];\n\n payload.extend_from_slice(&Vec::from(CANCEL_REQUEST_CODE));\n\n payload.extend_from_slice(&conn_id.to_be_bytes());\n\n payload.extend_from_slice(&secret_key.to_be_bytes());\n\n\n\n assert_eq!(\n\n hand_shake.try_step(&payload),\n\n Ok(State::SetupParsed(SetupParsed::Cancel(conn_id, secret_key)))\n\n );\n\n }\n\n}\n", "file_path": "src/hand_shake/state.rs", "rank": 91, "score": 24755.568778454654 }, { "content": " }\n\n SSL_REQUEST_CODE => Ok(SetupParsed::Secure),\n\n _ => Err(Error::UnsupportedRequest),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub(crate) enum SetupParsed {\n\n Established(Vec<(String, String)>),\n\n Cancel(ConnId, ConnSecretKey),\n\n Secure,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub(crate) enum State {\n\n MessageLen(MessageLen),\n\n ParseSetup(ReadSetupMessage),\n\n SetupParsed(SetupParsed),\n\n}\n", "file_path": "src/hand_shake/state.rs", "rank": 92, "score": 24755.511173713174 }, { "content": " payload.extend_from_slice(b\"value2\\0\");\n\n payload.extend_from_slice(&[0]);\n\n\n\n assert_eq!(\n\n hand_shake.try_step(&payload),\n\n Ok(State::SetupParsed(SetupParsed::Established(vec![\n\n (\"key1\".to_owned(), \"value1\".to_owned()),\n\n (\"key2\".to_owned(), \"value2\".to_owned())\n\n ])))\n\n );\n\n }\n\n\n\n #[test]\n\n fn connection_established_with_gssenc_request() {\n\n let mut hand_shake = State::new();\n\n\n\n hand_shake = hand_shake.try_step(&[0, 0, 0, 8]).expect(\"proceed to the next step\");\n\n\n\n assert_eq!(\n\n hand_shake.try_step(&Vec::from(GSSENC_REQUEST_CODE)),\n", "file_path": "src/hand_shake/state.rs", "rank": 93, "score": 24755.395133762802 }, { "content": " hand_shake.try_step(&Vec::from(VERSION_1_CODE)),\n\n Err(Error::UnsupportedVersion)\n\n );\n\n }\n\n\n\n #[test]\n\n fn version_two_is_not_supported() {\n\n let mut hand_shake = State::new();\n\n\n\n hand_shake = hand_shake.try_step(&[0, 0, 0, 8]).expect(\"proceed to the next step\");\n\n\n\n assert_eq!(\n\n hand_shake.try_step(&Vec::from(VERSION_2_CODE)),\n\n Err(Error::UnsupportedVersion)\n\n );\n\n }\n\n\n\n #[test]\n\n fn setup_version_three_with_client_params() {\n\n let mut hand_shake = State::new();\n", "file_path": "src/hand_shake/state.rs", "rank": 94, "score": 24755.008947263206 }, { "content": "\n\n hand_shake = hand_shake.try_step(&[0, 0, 0, 33]).expect(\"proceed to the next step\");\n\n\n\n let mut payload = vec![];\n\n payload.extend_from_slice(&Vec::from(VERSION_3_CODE));\n\n payload.extend_from_slice(b\"key1\\0\");\n\n payload.extend_from_slice(b\"value1\\0\");\n\n payload.extend_from_slice(b\"key2\\0\");\n\n payload.extend_from_slice(b\"value2\\0\");\n\n payload.extend_from_slice(&[0]);\n\n\n\n assert_eq!(\n\n hand_shake.try_step(&payload),\n\n Ok(State::SetupParsed(SetupParsed::Established(vec![\n\n (\"key1\".to_owned(), \"value1\".to_owned()),\n\n (\"key2\".to_owned(), \"value2\".to_owned())\n\n ])))\n\n );\n\n }\n\n\n", "file_path": "src/hand_shake/state.rs", "rank": 95, "score": 24751.800452418203 }, { "content": " log::info!(\"Connection Code: {}\", code);\n\n match code {\n\n VERSION_1_CODE => Err(Error::UnsupportedVersion),\n\n VERSION_2_CODE => Err(Error::UnsupportedVersion),\n\n VERSION_3_CODE => {\n\n let mut props = vec![];\n\n loop {\n\n let key = cursor.read_cstr()?.to_owned();\n\n if key == \"\" {\n\n break;\n\n }\n\n let value = cursor.read_cstr()?.to_owned();\n\n props.push((key, value));\n\n }\n\n Ok(SetupParsed::Established(props))\n\n }\n\n CANCEL_REQUEST_CODE => {\n\n let conn_id = cursor.read_i32()?;\n\n let secret_key = cursor.read_i32()?;\n\n Ok(SetupParsed::Cancel(conn_id, secret_key))\n", "file_path": "src/hand_shake/state.rs", "rank": 96, "score": 24748.46142560134 }, { "content": "// Copyright 2020 Alex Dukhno\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::{\n\n cursor::Cursor,\n\n request_codes::{Code, CANCEL_REQUEST_CODE, SSL_REQUEST_CODE, VERSION_1_CODE, VERSION_2_CODE, VERSION_3_CODE},\n\n ConnId, ConnSecretKey, Error, Result,\n\n};\n\n\n", "file_path": "src/hand_shake/state.rs", "rank": 97, "score": 24748.08324986634 }, { "content": "trait ConnectionTransition<C> {\n\n fn transit(self, cursor: &mut Cursor) -> Result<C>;\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub(crate) struct MessageLen(pub(crate) usize);\n\n\n\nimpl ConnectionTransition<ReadSetupMessage> for MessageLen {\n\n fn transit(self, cursor: &mut Cursor) -> Result<ReadSetupMessage> {\n\n let len = cursor.read_i32()?;\n\n Ok(ReadSetupMessage((len - 4) as usize))\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub(crate) struct ReadSetupMessage(pub(crate) usize);\n\n\n\nimpl ConnectionTransition<SetupParsed> for ReadSetupMessage {\n\n fn transit(self, cursor: &mut Cursor) -> Result<SetupParsed> {\n\n let code = Code(cursor.read_i32()?);\n", "file_path": "src/hand_shake/state.rs", "rank": 98, "score": 21233.986460597753 }, { "content": "pub use messages::{BackendMessage, ColumnMetadata, FrontendMessage};\n\npub use result::{Error, Result};\n\npub use types::{PgType, Value};\n\n\n\nmod cursor;\n\nmod format;\n\nmod hand_shake;\n\nmod message_decoder;\n\n/// Module contains backend messages that could be send by server implementation\n\n/// to a client\n\nmod messages;\n\nmod request_codes;\n\nmod result;\n\nmod types;\n\n\n\n/// Connection key-value params\n\npub type ClientParams = Vec<(String, String)>;\n\n\n\n/// PostgreSQL OID [Object Identifier](https://www.postgresql.org/docs/current/datatype-oid.html)\n\npub type Oid = u32;\n\n/// Connection ID\n\npub type ConnId = i32;\n\n/// Connection secret key\n\npub type ConnSecretKey = i32;\n", "file_path": "src/lib.rs", "rank": 99, "score": 20.82523227731841 } ]
Rust
migrate/src/lib.rs
alekspickle/migrate
c0a95c0b81315e8588c58688bdb775b7dcf997d9
#![warn(missing_docs, unreachable_pub, rust_2018_idioms)] #![forbid(unsafe_code)] mod cli; mod error; pub use error::*; pub use migrate_core as core; use crate::core::MigrationRunMode; use error::{DynError, Error}; use migrate_core::{MigrationsSelection, PlanBuilder}; use structopt::StructOpt; #[cfg(doctest)] doc_comment::doctest!( concat!(env!("CARGO_MANIFEST_DIR"), "/../README.md"), root_readme ); #[derive(Debug)] pub struct MigrateCli(cli::Args); impl MigrateCli { pub fn from_cli_args() -> Self { Self(StructOpt::from_args()) } pub fn try_from_cli_args() -> Result<Self, DynError> { Ok(Self(StructOpt::from_args_safe()?)) } pub async fn run(self, plan_builder: PlanBuilder) -> Result<(), Error> { let (cli::PlanArgGroup { no_commit, no_run }, plan) = match self.0 { cli::Args::Up(cmd) => { let plan = plan_builder .build(&MigrationsSelection::Up { inclusive_bound: cmd.inclusive_bound.as_deref(), }) .await .map_err(ErrorKind::PlanBuild)?; (cmd.plan, plan) } cli::Args::Down(cmd) => { let plan = plan_builder .build(&MigrationsSelection::Down { inclusive_bound: &cmd.inclusive_bound, }) .await .map_err(ErrorKind::PlanBuild)?; (cmd.plan, plan) } cli::Args::List => { tracing::info!( "Listing registered migrations in order:\n{}", plan_builder.display().build() ); return Ok(()); } }; let run_mode = match (no_commit, no_run) { (false, false) => MigrationRunMode::Commit, (true, false) => MigrationRunMode::NoCommit, (false, true) => { let plan = plan.display(); let plan = plan.build(); tracing::info!("The following migration plan is generated:\n{}", plan); return Ok(()); } (true, true) => unreachable!( "BUG: `structopt` should have `conflicts_with` clause that \ prevents this invalid arguments state" ), }; plan.exec(run_mode).await.map_err(ErrorKind::PlanExec)?; Ok(()) } }
#![warn(missing_docs, unreachable_pub, rust_2018_idioms)] #![forbid(unsafe_code)] mod cli; mod error; pub use error::*; pub use migrate_core as core; use crate::core::MigrationRunMode; use error::{DynError, Error}; use migrate_core::{MigrationsSelection, PlanBuilder}; use structopt::StructOpt; #[cfg(doctest)] doc_comment::doctest!( concat!(env!("CARGO_MANIFEST_DIR"), "/../README.md"), root_readme ); #[derive(Debug)] pub struct MigrateCli(cli::Args); impl MigrateCli { pub fn from_cli_args() -> Self { Self(StructOpt::from_args()) } pub fn try_from_cli_args() -> Result<Self, DynError> { Ok(Self(StructOpt::from_args_safe()?)) }
}
pub async fn run(self, plan_builder: PlanBuilder) -> Result<(), Error> { let (cli::PlanArgGroup { no_commit, no_run }, plan) = match self.0 { cli::Args::Up(cmd) => { let plan = plan_builder .build(&MigrationsSelection::Up { inclusive_bound: cmd.inclusive_bound.as_deref(), }) .await .map_err(ErrorKind::PlanBuild)?; (cmd.plan, plan) } cli::Args::Down(cmd) => { let plan = plan_builder .build(&MigrationsSelection::Down { inclusive_bound: &cmd.inclusive_bound, }) .await .map_err(ErrorKind::PlanBuild)?; (cmd.plan, plan) } cli::Args::List => { tracing::info!( "Listing registered migrations in order:\n{}", plan_builder.display().build() ); return Ok(()); } }; let run_mode = match (no_commit, no_run) { (false, false) => MigrationRunMode::Commit, (true, false) => MigrationRunMode::NoCommit, (false, true) => { let plan = plan.display(); let plan = plan.build(); tracing::info!("The following migration plan is generated:\n{}", plan); return Ok(()); } (true, true) => unreachable!( "BUG: `structopt` should have `conflicts_with` clause that \ prevents this invalid arguments state" ), }; plan.exec(run_mode).await.map_err(ErrorKind::PlanExec)?; Ok(()) }
function_block-full_function
[]
Rust
wasm/lib/src/browser_module.rs
alexpantyukhin/RustPython
b0ee1947c775145db055413dce217dca1613712d
use crate::{convert, vm_class::AccessibleVM, wasm_builtins::window}; use futures::{future, Future}; use js_sys::Promise; use num_traits::cast::ToPrimitive; use rustpython_vm::obj::{objint, objstr}; use rustpython_vm::pyobject::{PyContext, PyFuncArgs, PyObjectRef, PyResult, TypeProtocol}; use rustpython_vm::VirtualMachine; use wasm_bindgen::{prelude::*, JsCast}; use wasm_bindgen_futures::{future_to_promise, JsFuture}; enum FetchResponseFormat { Json, Text, ArrayBuffer, } impl FetchResponseFormat { fn from_str(vm: &mut VirtualMachine, s: &str) -> Result<Self, PyObjectRef> { match s { "json" => Ok(FetchResponseFormat::Json), "text" => Ok(FetchResponseFormat::Text), "array_buffer" => Ok(FetchResponseFormat::ArrayBuffer), _ => Err(vm.new_type_error("Unkown fetch response_format".into())), } } fn get_response(&self, response: &web_sys::Response) -> Result<Promise, JsValue> { match self { FetchResponseFormat::Json => response.json(), FetchResponseFormat::Text => response.text(), FetchResponseFormat::ArrayBuffer => response.array_buffer(), } } } fn browser_fetch(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!( vm, args, required = [ (url, Some(vm.ctx.str_type())), (handler, Some(vm.ctx.function_type())) ], optional = [(reject_handler, Some(vm.ctx.function_type()))] ); let response_format = args.get_optional_kwarg_with_type("response_format", vm.ctx.str_type(), vm)?; let method = args.get_optional_kwarg_with_type("method", vm.ctx.str_type(), vm)?; let headers = args.get_optional_kwarg_with_type("headers", vm.ctx.dict_type(), vm)?; let body = args.get_optional_kwarg("body"); let content_type = args.get_optional_kwarg_with_type("content_type", vm.ctx.str_type(), vm)?; let response_format = match response_format { Some(s) => FetchResponseFormat::from_str(vm, &objstr::get_value(&s))?, None => FetchResponseFormat::Text, }; let mut opts = web_sys::RequestInit::new(); match method { Some(s) => opts.method(&objstr::get_value(&s)), None => opts.method("GET"), }; if let Some(body) = body { opts.body(Some(&convert::py_to_js(vm, body))); } let request = web_sys::Request::new_with_str_and_init(&objstr::get_value(url), &opts) .map_err(|err| convert::js_py_typeerror(vm, err))?; if let Some(headers) = headers { let h = request.headers(); for (key, value) in rustpython_vm::obj::objdict::get_key_value_pairs(&headers) { let key = objstr::get_value(&vm.to_str(&key)?); let value = objstr::get_value(&vm.to_str(&value)?); h.set(&key, &value) .map_err(|err| convert::js_py_typeerror(vm, err))?; } } if let Some(content_type) = content_type { request .headers() .set("Content-Type", &objstr::get_value(&content_type)) .map_err(|err| convert::js_py_typeerror(vm, err))?; } let window = window(); let request_prom = window.fetch_with_request(&request); let handler = handler.clone(); let reject_handler = reject_handler.cloned(); let acc_vm = AccessibleVM::from_vm(vm); let future = JsFuture::from(request_prom) .and_then(move |val| { let response = val .dyn_into::<web_sys::Response>() .expect("val to be of type Response"); response_format.get_response(&response) }) .and_then(JsFuture::from) .then(move |val| { let vm = &mut acc_vm .upgrade() .expect("that the VM *not* be destroyed while promise is being resolved"); match val { Ok(val) => { let val = convert::js_to_py(vm, val); let args = PyFuncArgs::new(vec![val], vec![]); let _ = vm.invoke(handler, args); } Err(val) => { if let Some(reject_handler) = reject_handler { let val = convert::js_to_py(vm, val); let args = PyFuncArgs::new(vec![val], vec![]); let _ = vm.invoke(reject_handler, args); } } } future::ok(JsValue::UNDEFINED) }); future_to_promise(future); Ok(vm.get_none()) } fn browser_request_animation_frame(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!(vm, args, required = [(func, Some(vm.ctx.function_type()))]); use std::{cell::RefCell, rc::Rc}; let f = Rc::new(RefCell::new(None)); let g = f.clone(); let func = func.clone(); let acc_vm = AccessibleVM::from_vm(vm); *g.borrow_mut() = Some(Closure::wrap(Box::new(move |time: f64| { let vm = &mut acc_vm .upgrade() .expect("that the vm is valid from inside of request_animation_frame"); let func = func.clone(); let args = PyFuncArgs { args: vec![vm.ctx.new_float(time)], kwargs: vec![], }; let _ = vm.invoke(func, args); let closure = f.borrow_mut().take(); drop(closure); }) as Box<Fn(f64)>)); let id = window() .request_animation_frame(&js_sys::Function::from( g.borrow().as_ref().unwrap().as_ref().clone(), )) .map_err(|err| convert::js_py_typeerror(vm, err))?; Ok(vm.ctx.new_int(id)) } fn browser_cancel_animation_frame(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!(vm, args, required = [(id, Some(vm.ctx.int_type()))]); let id = objint::get_value(id).to_i32().ok_or_else(|| { vm.new_exception( vm.ctx.exceptions.value_error.clone(), "Integer too large to convert to i32 for animationFrame id".into(), ) })?; window() .cancel_animation_frame(id) .map_err(|err| convert::js_py_typeerror(vm, err))?; Ok(vm.get_none()) } fn browser_alert(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!(vm, args, required = [(message, Some(vm.ctx.str_type()))]); window() .alert_with_message(&objstr::get_value(message)) .expect("alert() not to fail"); Ok(vm.get_none()) } fn browser_confirm(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!(vm, args, required = [(message, Some(vm.ctx.str_type()))]); let result = window() .confirm_with_message(&objstr::get_value(message)) .expect("confirm() not to fail"); Ok(vm.new_bool(result)) } fn browser_prompt(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!( vm, args, required = [(message, Some(vm.ctx.str_type()))], optional = [(default, Some(vm.ctx.str_type()))] ); let result = if let Some(default) = default { window().prompt_with_message_and_default( &objstr::get_value(message), &objstr::get_value(default), ) } else { window().prompt_with_message(&objstr::get_value(message)) }; let result = match result.expect("prompt() not to fail") { Some(result) => vm.new_str(result), None => vm.get_none(), }; Ok(result) } const BROWSER_NAME: &str = "browser"; pub fn mk_module(ctx: &PyContext) -> PyObjectRef { py_module!(ctx, BROWSER_NAME, { "fetch" => ctx.new_rustfunc(browser_fetch), "request_animation_frame" => ctx.new_rustfunc(browser_request_animation_frame), "cancel_animation_frame" => ctx.new_rustfunc(browser_cancel_animation_frame), "alert" => ctx.new_rustfunc(browser_alert), "confirm" => ctx.new_rustfunc(browser_confirm), "prompt" => ctx.new_rustfunc(browser_prompt), }) } pub fn setup_browser_module(vm: &mut VirtualMachine) { vm.stdlib_inits .insert(BROWSER_NAME.to_string(), Box::new(mk_module)); }
use crate::{convert, vm_class::AccessibleVM, wasm_builtins::window}; use futures::{future, Future}; use js_sys::Promise; use num_traits::cast::ToPrimitive; use rustpython_vm::obj::{objint, objstr}; use rustpython_vm::pyobject::{PyContext, PyFuncArgs, PyObjectRef, PyResult, TypeProtocol}; use rustpython_vm::VirtualMachine; use wasm_bindgen::{prelude::*, JsCast}; use wasm_bindgen_futures::{future_to_promise, JsFuture}; enum FetchResponseFormat { Json, Text, ArrayBuffer, } impl FetchResponseFormat { fn from_str(vm: &mut VirtualMachine, s: &str) -> Result<Self, PyObjectRef> { match s { "json" => Ok(FetchResponseFormat::Json), "text" => Ok(FetchResponseFormat::Text), "array_buffer" => Ok(FetchResponseFormat::ArrayBuffer), _ => Err(vm.new_type_error("Unkown fetch response_format".into())), } } fn get_response(&self, response: &web_sys::Response) -> Result<Promise, JsValue> { match self { FetchResponseFormat::Json => response.json(), FetchResponseFormat::Text => response.text(), FetchResponseFormat::ArrayBuffer => response.array_buffer(), } } } fn browser_fetch(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!( vm, args, required = [ (url, Some(vm.ctx.str_type())), (handler, Some(vm.ctx.function_type())) ], optional = [(reject_handler, Some(vm.ctx.function_type()))] ); let response_format = args.get_optional_kwarg_with_type("response_format", vm.ctx.str_type(), vm)?; let method = args.get_optional_kwarg_with_type("method", vm.ctx.str_type(), vm)?; let headers = args.get_optional_kwarg_with_type("headers", vm.ctx.dict_type(), vm)?; let body = args.get_optional_kwarg("body"); let content_type = args.get_optional_kwarg_with_type("content_type", vm.ctx.str_type(), vm)?; let response_format = match response_format { Some(s) => FetchResponseFormat::from_str(vm, &objstr::get_value(&s))?, None => FetchResponseFormat::Text, }; let mut opts = web_sys::RequestInit::new(); match method { Some(s) => opts.method(&objstr::get_value(&s)), None => opts.method("GET"), }; if let Some(body) = body { opts.body(Some(&convert::py_to_js(vm, body))); } let request = web_sys::Request::new_with_str_and_init(&objstr::get_value(url), &opts) .map_err(|err| convert::js_py_typeerror(vm, err))?; if let Some(headers) = headers { let h = request.headers(); for (key, value) in rustpython_vm::obj::objdict::get_key_value_pairs(&headers) { let key = objstr::get_value(&vm.to_str(&key)?); let value = objstr::get_value(&vm.to_str(&value)?); h.set(&key, &value) .map_err(|err| convert::js_py_typeerror(vm, err))?; } } if let Some(content_type) = content_type { request .headers() .set("Content-Type", &objstr::get_value(&content_type)) .map_err(|err| convert::js_py_typeerror(vm, err))?; } let window = window(); let request_prom = window.fetch_with_request(&request); let handler = handler.clone(); let reject_handler = reject_handler.cloned(); let acc_vm = AccessibleVM::from_vm(vm); let future = JsFuture::from(request_prom) .and_then(move |val| { let response = val .dyn_into::<web_sys::Response>() .expect("val to be of type Response"); response_format.get_response(&response) }) .and_then(JsFuture::from) .then(move |val| { let vm = &mut acc_vm .upgrade() .expect("that the VM *not* be destroyed while promise is being resolved"); match val { Ok(val) => { let val = convert::js_to_py(vm, val); let args = PyFuncArgs::new(vec![val], vec![]); let _ = vm.invoke(handler, args); } Err(val) => { if let Some(reject_handler) = reject_handler { let val = convert::js_to_py(vm, val); let args = PyFuncArgs::new(vec![val], vec![]); let _ = vm.invoke(reject_handler, args); } } } future::ok(JsValue::UNDEFINED) }); future_to_promise(future); Ok(vm.get_none()) } fn browser_request_animation_frame(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!(vm, args, required = [(func, Some(vm.ctx.function_type()))]); use std::{cell::RefCell, rc::Rc}; let f = Rc::new(RefCell::new(None)); let g = f.clone(); let func = func.clone(); let acc_vm = AccessibleVM::from_vm(vm); *g.borrow_mut() = Some(Closure::wrap(Box::new(move |time: f64| { let vm = &mut acc_vm .upgrade() .expect("that the vm is valid from inside of request_animation_frame"); let func = func.clone(); let args = PyFuncArgs { args: vec![vm.ctx.new_float(time)], kwargs: vec![], }; let _ = vm.invoke(func, args); let closure = f.borrow_mut().take(); drop(closure); }) as Box<Fn(f64)>)); let id = window() .request_animation_frame(&js_sys::Function::from( g.borrow().as_ref().unwrap().as_ref().clone(), )) .map_err(|err| convert::js_py_typeerror(vm, err))?; Ok(vm.ctx.new_int(id)) } fn browser_cancel_animation_frame(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!(vm, args, required = [(id, Some(vm.ctx.int_type()))]); let id = objint::get_value(id).to_i32().ok_or_else(|| { vm.new_exception( vm.ctx.exceptions.value_error.clone(), "Integer too large to convert to i32 for animationFrame id".into(), ) })?; window() .cancel_animation_frame(id) .map_err(|err| convert::js_py_typeerror(vm, err))?; Ok(vm.get_none()) } fn browser_alert(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!(vm, args, required = [(message, Some(vm.ctx.str_type()))]); window() .alert_with_message(&objstr::get_value(message)) .expect("alert() not to fail"); Ok(vm.get_none()) } fn browser_confirm(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!(vm, args, required = [(message, Some(vm.ctx.str_type()))]); let result = window() .confirm_with_message(&objstr::get_value(message)) .expect("confirm() not to fail"); Ok(vm.new_bool(result)) } fn browser_prompt(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult { arg_check!( vm, args, required = [(message, Some(vm.ctx.str_type()))], optional = [(default, Some(vm.ctx.str_type()))] ); let result =
; let result = match result.expect("prompt() not to fail") { Some(result) => vm.new_str(result), None => vm.get_none(), }; Ok(result) } const BROWSER_NAME: &str = "browser"; pub fn mk_module(ctx: &PyContext) -> PyObjectRef { py_module!(ctx, BROWSER_NAME, { "fetch" => ctx.new_rustfunc(browser_fetch), "request_animation_frame" => ctx.new_rustfunc(browser_request_animation_frame), "cancel_animation_frame" => ctx.new_rustfunc(browser_cancel_animation_frame), "alert" => ctx.new_rustfunc(browser_alert), "confirm" => ctx.new_rustfunc(browser_confirm), "prompt" => ctx.new_rustfunc(browser_prompt), }) } pub fn setup_browser_module(vm: &mut VirtualMachine) { vm.stdlib_inits .insert(BROWSER_NAME.to_string(), Box::new(mk_module)); }
if let Some(default) = default { window().prompt_with_message_and_default( &objstr::get_value(message), &objstr::get_value(default), ) } else { window().prompt_with_message(&objstr::get_value(message)) }
if_condition
[ { "content": "fn str_str(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n Ok(s.clone())\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 0, "score": 466286.87047549686 }, { "content": "pub fn subscript(vm: &mut VirtualMachine, value: &str, b: PyObjectRef) -> PyResult {\n\n if objtype::isinstance(&b, &vm.ctx.int_type()) {\n\n match objint::get_value(&b).to_i32() {\n\n Some(pos) => {\n\n let graphemes = to_graphemes(value);\n\n if let Some(idx) = graphemes.get_pos(pos) {\n\n Ok(vm.new_str(graphemes[idx].to_string()))\n\n } else {\n\n Err(vm.new_index_error(\"string index out of range\".to_string()))\n\n }\n\n }\n\n None => {\n\n Err(vm.new_index_error(\"cannot fit 'int' into an index-sized integer\".to_string()))\n\n }\n\n }\n\n } else {\n\n match b.payload {\n\n PyObjectPayload::Slice { .. } => {\n\n let string = value.to_string().get_slice_items(vm, &b)?;\n\n Ok(vm.new_str(string))\n\n }\n\n _ => panic!(\n\n \"TypeError: indexing type {:?} with index {:?} is not supported (yet?)\",\n\n value, b\n\n ),\n\n }\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 1, "score": 459052.5043323308 }, { "content": "// TODO: should with following format\n\n// class str(object='')\n\n// class str(object=b'', encoding='utf-8', errors='strict')\n\nfn str_new(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n if args.args.len() == 1 {\n\n return Ok(vm.new_str(\"\".to_string()));\n\n }\n\n\n\n if args.args.len() > 2 {\n\n panic!(\"str expects exactly one parameter\");\n\n };\n\n\n\n vm.to_str(&args.args[1])\n\n}\n\n\n\nimpl PySliceableSequence for String {\n\n fn do_slice(&self, range: Range<usize>) -> Self {\n\n to_graphemes(self)\n\n .get(range)\n\n .map_or(String::default(), |c| c.join(\"\"))\n\n }\n\n\n\n fn do_slice_reverse(&self, range: Range<usize>) -> Self {\n", "file_path": "vm/src/obj/objstr.rs", "rank": 2, "score": 453923.53827772744 }, { "content": "// cpython's isspace ignores whitespace, including \\t and \\n, etc, unless the whole string is empty\n\n// which is why isspace is using is_ascii_whitespace. Same for isupper & islower\n\nfn str_isspace(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n Ok(vm\n\n .ctx\n\n .new_bool(!value.is_empty() && value.chars().all(|c| c.is_ascii_whitespace())))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 3, "score": 453922.4983344757 }, { "content": "fn str_repr(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(s);\n\n let quote_char = if count_char(&value, '\\'') > count_char(&value, '\"') {\n\n '\"'\n\n } else {\n\n '\\''\n\n };\n\n let mut formatted = String::new();\n\n formatted.push(quote_char);\n\n for c in value.chars() {\n\n if c == quote_char || c == '\\\\' {\n\n formatted.push('\\\\');\n\n formatted.push(c);\n\n } else if c == '\\n' {\n\n formatted.push('\\\\');\n\n formatted.push('n');\n\n } else if c == '\\t' {\n\n formatted.push('\\\\');\n\n formatted.push('t');\n", "file_path": "vm/src/obj/objstr.rs", "rank": 4, "score": 453918.4022009565 }, { "content": "fn str_gt(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(i, Some(vm.ctx.str_type())), (i2, None)]\n\n );\n\n\n\n let v1 = get_value(i);\n\n if objtype::isinstance(i2, &vm.ctx.str_type()) {\n\n Ok(vm.ctx.new_bool(v1 > get_value(i2)))\n\n } else {\n\n Err(vm.new_type_error(format!(\"Cannot compare {} and {}\", i, i2)))\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 5, "score": 453918.4022009566 }, { "content": "fn str_rindex(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (sub, Some(vm.ctx.str_type()))],\n\n optional = [\n\n (start, Some(vm.ctx.int_type())),\n\n (end, Some(vm.ctx.int_type()))\n\n ]\n\n );\n\n let value = get_value(&s);\n\n let sub = get_value(&sub);\n\n let (start, end) = match get_slice(start, end, value.len()) {\n\n Ok((start, end)) => (start, end),\n\n Err(e) => return Err(vm.new_index_error(e)),\n\n };\n\n let ind: i64 = match value[start..=end].rfind(&sub) {\n\n Some(num) => num as i64,\n\n None => {\n\n return Err(vm.new_value_error(\"substring not found\".to_string()));\n\n }\n\n };\n\n Ok(vm.ctx.new_int(ind))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 6, "score": 453918.4022009566 }, { "content": "fn str_istitle(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n\n\n let is_titled = if value.is_empty() {\n\n false\n\n } else {\n\n value.split(' ').all(|word| word == make_title(word))\n\n };\n\n\n\n Ok(vm.ctx.new_bool(is_titled))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 7, "score": 453918.4022009566 }, { "content": "fn str_isnumeric(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n Ok(vm\n\n .ctx\n\n .new_bool(!value.is_empty() && value.chars().all(|c| c.is_numeric())))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 8, "score": 453918.4022009566 }, { "content": "fn str_isidentifier(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n let mut is_identifier: bool = true;\n\n // a string is not an identifier if it has whitespace or starts with a number\n\n if !value.chars().any(|c| c.is_ascii_whitespace())\n\n && !value.chars().nth(0).unwrap().is_digit(10)\n\n {\n\n for c in value.chars() {\n\n if c != \"_\".chars().nth(0).unwrap() && !c.is_digit(10) && !c.is_alphabetic() {\n\n is_identifier = false;\n\n }\n\n }\n\n } else {\n\n is_identifier = false;\n\n }\n\n Ok(vm.ctx.new_bool(is_identifier))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 9, "score": 453918.4022009565 }, { "content": "fn str_expandtabs(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type()))],\n\n optional = [(size, Some(vm.ctx.int_type()))]\n\n );\n\n let value = get_value(&s);\n\n let tab_stop = match size {\n\n Some(num) => objint::get_value(&num).to_usize().unwrap(),\n\n None => 8 as usize,\n\n };\n\n let mut expanded_str = String::new();\n\n let mut tab_size = tab_stop;\n\n let mut col_count = 0 as usize;\n\n for ch in value.chars() {\n\n // 0x0009 is tab\n\n if ch == 0x0009 as char {\n\n let num_spaces = tab_size - col_count;\n\n col_count += num_spaces;\n", "file_path": "vm/src/obj/objstr.rs", "rank": 10, "score": 453918.4022009566 }, { "content": "fn str_zfill(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (len, Some(vm.ctx.int_type()))]\n\n );\n\n let value = get_value(&s);\n\n let len = objint::get_value(&len).to_usize().unwrap();\n\n let new_str = if len <= value.len() {\n\n value\n\n } else {\n\n format!(\"{}{}\", \"0\".repeat(len - value.len()), value)\n\n };\n\n Ok(vm.ctx.new_str(new_str))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 11, "score": 453918.4022009565 }, { "content": "fn str_rpartition(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (sub, Some(vm.ctx.str_type()))]\n\n );\n\n let value = get_value(&s);\n\n let sub = get_value(&sub);\n\n let mut new_tup = Vec::new();\n\n if value.contains(&sub) {\n\n new_tup = value\n\n .rsplitn(2, &sub)\n\n .map(|s| vm.ctx.new_str(s.to_string()))\n\n .collect();\n\n new_tup.swap(0, 1); // so it's in the right order\n\n new_tup.insert(1, vm.ctx.new_str(sub));\n\n } else {\n\n new_tup.push(vm.ctx.new_str(value));\n\n new_tup.push(vm.ctx.new_str(\"\".to_string()));\n\n new_tup.push(vm.ctx.new_str(\"\".to_string()));\n\n }\n\n Ok(vm.ctx.new_tuple(new_tup))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 12, "score": 453918.4022009566 }, { "content": "fn str_len(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let sv = get_value(s);\n\n Ok(vm.ctx.new_int(sv.chars().count()))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 13, "score": 453918.4022009566 }, { "content": "fn str_title(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n Ok(vm.ctx.new_str(make_title(&get_value(&s))))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 14, "score": 453918.4022009566 }, { "content": "fn str_lstrip(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s).trim_start().to_string();\n\n Ok(vm.ctx.new_str(value))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 15, "score": 453918.4022009566 }, { "content": "fn str_isdecimal(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n\n\n let value = get_value(&s);\n\n\n\n let is_decimal = if !value.is_empty() {\n\n value.chars().all(|c| c.is_ascii_digit())\n\n } else {\n\n false\n\n };\n\n\n\n Ok(vm.ctx.new_bool(is_decimal))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 16, "score": 453918.4022009566 }, { "content": "fn str_rjust(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (num, Some(vm.ctx.int_type()))],\n\n optional = [(rep, Some(vm.ctx.str_type()))]\n\n );\n\n let value = get_value(&s);\n\n let num = objint::get_value(&num).to_usize().unwrap();\n\n let rep = match rep {\n\n Some(st) => {\n\n let rep_str = get_value(&st);\n\n if rep_str.len() == 1 {\n\n rep_str\n\n } else {\n\n return Err(vm.new_type_error(\n\n \"The fill character must be exactly one character long\".to_string(),\n\n ));\n\n }\n\n }\n\n None => \" \".to_string(),\n\n };\n\n let new_str = format!(\"{}{}\", rep.repeat(num), value);\n\n Ok(vm.ctx.new_str(new_str))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 17, "score": 453918.4022009566 }, { "content": "fn str_rsplit(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type()))],\n\n optional = [\n\n (pat, Some(vm.ctx.str_type())),\n\n (num, Some(vm.ctx.int_type()))\n\n ]\n\n );\n\n let value = get_value(&s);\n\n let pat = match pat {\n\n Some(s) => get_value(&s),\n\n None => \" \".to_string(),\n\n };\n\n let num_splits = match num {\n\n Some(n) => objint::get_value(&n).to_usize().unwrap(),\n\n None => value.split(&pat).count(),\n\n };\n\n let elements = value\n\n .rsplitn(num_splits + 1, &pat)\n\n .map(|o| vm.ctx.new_str(o.to_string()))\n\n .collect();\n\n Ok(vm.ctx.new_list(elements))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 18, "score": 453918.4022009566 }, { "content": "fn str_getitem(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (needle, None)]\n\n );\n\n let value = get_value(&s);\n\n subscript(vm, &value, needle.clone())\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 19, "score": 453918.4022009566 }, { "content": "fn str_isupper(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n Ok(vm.ctx.new_bool(\n\n !value.is_empty()\n\n && value\n\n .chars()\n\n .filter(|x| !x.is_ascii_whitespace())\n\n .all(|c| c.is_uppercase()),\n\n ))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 20, "score": 453918.4022009566 }, { "content": "fn str_lt(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(i, Some(vm.ctx.str_type())), (i2, None)]\n\n );\n\n\n\n let v1 = get_value(i);\n\n if objtype::isinstance(i2, &vm.ctx.str_type()) {\n\n Ok(vm.ctx.new_bool(v1 < get_value(i2)))\n\n } else {\n\n Err(vm.new_type_error(format!(\"Cannot compare {} and {}\", i, i2)))\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 21, "score": 453918.4022009566 }, { "content": "fn str_isalpha(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n Ok(vm\n\n .ctx\n\n .new_bool(!value.is_empty() && value.chars().all(|c| c.is_alphanumeric())))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 22, "score": 453918.4022009566 }, { "content": "fn str_lower(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s).to_lowercase();\n\n Ok(vm.ctx.new_str(value))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 23, "score": 453918.4022009566 }, { "content": "fn str_split(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type()))],\n\n optional = [\n\n (pat, Some(vm.ctx.str_type())),\n\n (num, Some(vm.ctx.int_type()))\n\n ]\n\n );\n\n let value = get_value(&s);\n\n let pat = match pat {\n\n Some(s) => get_value(&s),\n\n None => \" \".to_string(),\n\n };\n\n let num_splits = match num {\n\n Some(n) => objint::get_value(&n).to_usize().unwrap(),\n\n None => value.split(&pat).count(),\n\n };\n\n let elements = value\n\n .splitn(num_splits + 1, &pat)\n\n .map(|o| vm.ctx.new_str(o.to_string()))\n\n .collect();\n\n Ok(vm.ctx.new_list(elements))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 24, "score": 453918.4022009565 }, { "content": "fn str_index(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (sub, Some(vm.ctx.str_type()))],\n\n optional = [\n\n (start, Some(vm.ctx.int_type())),\n\n (end, Some(vm.ctx.int_type()))\n\n ]\n\n );\n\n let value = get_value(&s);\n\n let sub = get_value(&sub);\n\n let (start, end) = match get_slice(start, end, value.len()) {\n\n Ok((start, end)) => (start, end),\n\n Err(e) => return Err(vm.new_index_error(e)),\n\n };\n\n let ind: usize = match value[start..=end].find(&sub) {\n\n Some(num) => num,\n\n None => {\n\n return Err(vm.new_value_error(\"substring not found\".to_string()));\n\n }\n\n };\n\n Ok(vm.ctx.new_int(ind))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 25, "score": 453918.4022009566 }, { "content": "fn str_swapcase(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n let mut swapped_str = String::with_capacity(value.len());\n\n for c in value.chars() {\n\n // to_uppercase returns an iterator, to_ascii_uppercase returns the char\n\n if c.is_lowercase() {\n\n swapped_str.push(c.to_ascii_uppercase());\n\n } else if c.is_uppercase() {\n\n swapped_str.push(c.to_ascii_lowercase());\n\n } else {\n\n swapped_str.push(c);\n\n }\n\n }\n\n Ok(vm.ctx.new_str(swapped_str))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 26, "score": 453918.4022009566 }, { "content": "fn str_replace(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [\n\n (s, Some(vm.ctx.str_type())),\n\n (old, Some(vm.ctx.str_type())),\n\n (rep, Some(vm.ctx.str_type()))\n\n ],\n\n optional = [(n, Some(vm.ctx.int_type()))]\n\n );\n\n let s = get_value(&s);\n\n let old_str = get_value(&old);\n\n let rep_str = get_value(&rep);\n\n let num_rep: usize = match n {\n\n Some(num) => objint::get_value(&num).to_usize().unwrap(),\n\n None => 1,\n\n };\n\n let new_str = s.replacen(&old_str, &rep_str, num_rep);\n\n Ok(vm.ctx.new_str(new_str))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 27, "score": 453918.4022009565 }, { "content": "fn str_isdigit(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n // python's isdigit also checks if exponents are digits, these are the unicodes for exponents\n\n let valid_unicodes: [u16; 10] = [\n\n 0x2070, 0x00B9, 0x00B2, 0x00B3, 0x2074, 0x2075, 0x2076, 0x2077, 0x2078, 0x2079,\n\n ];\n\n\n\n let is_digit = if value.is_empty() {\n\n false\n\n } else {\n\n value\n\n .chars()\n\n .filter(|c| !c.is_digit(10))\n\n .all(|c| valid_unicodes.contains(&(c as u16)))\n\n };\n\n\n\n Ok(vm.ctx.new_bool(is_digit))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 28, "score": 453918.4022009566 }, { "content": "fn str_eq(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(a, Some(vm.ctx.str_type())), (b, None)]\n\n );\n\n\n\n let result = if objtype::isinstance(b, &vm.ctx.str_type()) {\n\n get_value(a) == get_value(b)\n\n } else {\n\n false\n\n };\n\n Ok(vm.ctx.new_bool(result))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 29, "score": 453918.4022009565 }, { "content": "fn str_ljust(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (num, Some(vm.ctx.int_type()))],\n\n optional = [(rep, Some(vm.ctx.str_type()))]\n\n );\n\n let value = get_value(&s);\n\n let num = objint::get_value(&num).to_usize().unwrap();\n\n let rep = match rep {\n\n Some(st) => {\n\n let rep_str = get_value(&st);\n\n if rep_str.len() == 1 {\n\n rep_str\n\n } else {\n\n return Err(vm.new_type_error(\n\n \"The fill character must be exactly one character long\".to_string(),\n\n ));\n\n }\n\n }\n\n None => \" \".to_string(),\n\n };\n\n let new_str = format!(\"{}{}\", value, rep.repeat(num));\n\n Ok(vm.ctx.new_str(new_str))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 30, "score": 453918.4022009566 }, { "content": "fn str_hash(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(zelf, Some(vm.ctx.str_type()))]);\n\n let value = get_value(zelf);\n\n let mut hasher = std::collections::hash_map::DefaultHasher::new();\n\n value.hash(&mut hasher);\n\n let hash = hasher.finish();\n\n Ok(vm.ctx.new_int(hash))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 31, "score": 453918.4022009565 }, { "content": "fn str_partition(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (sub, Some(vm.ctx.str_type()))]\n\n );\n\n let value = get_value(&s);\n\n let sub = get_value(&sub);\n\n let mut new_tup = Vec::new();\n\n if value.contains(&sub) {\n\n new_tup = value\n\n .splitn(2, &sub)\n\n .map(|s| vm.ctx.new_str(s.to_string()))\n\n .collect();\n\n new_tup.insert(1, vm.ctx.new_str(sub));\n\n } else {\n\n new_tup.push(vm.ctx.new_str(value));\n\n new_tup.push(vm.ctx.new_str(\"\".to_string()));\n\n new_tup.push(vm.ctx.new_str(\"\".to_string()));\n\n }\n\n Ok(vm.ctx.new_tuple(new_tup))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 32, "score": 453918.4022009566 }, { "content": "fn str_rstrip(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s).trim_end().to_string();\n\n Ok(vm.ctx.new_str(value))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 33, "score": 453918.4022009566 }, { "content": "fn str_contains(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [\n\n (s, Some(vm.ctx.str_type())),\n\n (needle, Some(vm.ctx.str_type()))\n\n ]\n\n );\n\n let value = get_value(&s);\n\n let needle = get_value(&needle);\n\n Ok(vm.ctx.new_bool(value.contains(needle.as_str())))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 34, "score": 453918.4022009566 }, { "content": "fn str_isalnum(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n Ok(vm\n\n .ctx\n\n .new_bool(!value.is_empty() && value.chars().all(|c| c.is_alphanumeric())))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 35, "score": 453918.4022009566 }, { "content": "fn str_join(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (iterable, None)]\n\n );\n\n let value = get_value(&s);\n\n let elements: Vec<String> = vm\n\n .extract_elements(iterable)?\n\n .iter()\n\n .map(|w| get_value(&w))\n\n .collect();\n\n let joined = elements.join(&value);\n\n Ok(vm.ctx.new_str(joined))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 36, "score": 453918.4022009566 }, { "content": "// doesn't implement keep new line delimiter just yet\n\nfn str_splitlines(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let elements = get_value(&s)\n\n .split('\\n')\n\n .map(|e| vm.ctx.new_str(e.to_string()))\n\n .collect();\n\n Ok(vm.ctx.new_list(elements))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 37, "score": 453918.4022009566 }, { "content": "fn str_add(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (s2, None)]\n\n );\n\n if objtype::isinstance(s2, &vm.ctx.str_type()) {\n\n Ok(vm\n\n .ctx\n\n .new_str(format!(\"{}{}\", get_value(&s), get_value(&s2))))\n\n } else {\n\n Err(vm.new_type_error(format!(\"Cannot add {} and {}\", s, s2)))\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 38, "score": 453918.4022009566 }, { "content": "fn str_islower(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n Ok(vm.ctx.new_bool(\n\n !value.is_empty()\n\n && value\n\n .chars()\n\n .filter(|x| !x.is_ascii_whitespace())\n\n .all(|c| c.is_lowercase()),\n\n ))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 39, "score": 453918.4022009566 }, { "content": "fn str_find(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (sub, Some(vm.ctx.str_type()))],\n\n optional = [\n\n (start, Some(vm.ctx.int_type())),\n\n (end, Some(vm.ctx.int_type()))\n\n ]\n\n );\n\n let value = get_value(&s);\n\n let sub = get_value(&sub);\n\n let (start, end) = match get_slice(start, end, value.len()) {\n\n Ok((start, end)) => (start, end),\n\n Err(e) => return Err(vm.new_index_error(e)),\n\n };\n\n let ind: i128 = match value[start..=end].find(&sub) {\n\n Some(num) => num as i128,\n\n None => -1 as i128,\n\n };\n\n Ok(vm.ctx.new_int(ind))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 40, "score": 453918.4022009566 }, { "content": "fn str_isascii(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n Ok(vm\n\n .ctx\n\n .new_bool(!value.is_empty() && value.chars().all(|c| c.is_ascii())))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 41, "score": 453918.4022009566 }, { "content": "fn str_le(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(i, Some(vm.ctx.str_type())), (i2, None)]\n\n );\n\n\n\n let v1 = get_value(i);\n\n if objtype::isinstance(i2, &vm.ctx.str_type()) {\n\n Ok(vm.ctx.new_bool(v1 <= get_value(i2)))\n\n } else {\n\n Err(vm.new_type_error(format!(\"Cannot compare {} and {}\", i, i2)))\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 42, "score": 453918.4022009566 }, { "content": "fn str_count(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (sub, Some(vm.ctx.str_type()))],\n\n optional = [\n\n (start, Some(vm.ctx.int_type())),\n\n (end, Some(vm.ctx.int_type()))\n\n ]\n\n );\n\n let value = get_value(&s);\n\n let sub = get_value(&sub);\n\n let (start, end) = match get_slice(start, end, value.len()) {\n\n Ok((start, end)) => (start, end),\n\n Err(e) => return Err(vm.new_index_error(e)),\n\n };\n\n let num_occur: usize = value[start..end].matches(&sub).count();\n\n Ok(vm.ctx.new_int(num_occur))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 43, "score": 453918.4022009565 }, { "content": "fn str_upper(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s).to_uppercase();\n\n Ok(vm.ctx.new_str(value))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 44, "score": 453918.4022009566 }, { "content": "// casefold is much more aggressive than lower\n\nfn str_casefold(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n let folded_str: String = caseless::default_case_fold_str(&value);\n\n Ok(vm.ctx.new_str(folded_str))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 45, "score": 453918.4022009566 }, { "content": "fn str_center(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (len, Some(vm.ctx.int_type()))],\n\n optional = [(chars, Some(vm.ctx.str_type()))]\n\n );\n\n let value = get_value(&s);\n\n let len = objint::get_value(&len).to_usize().unwrap();\n\n let rep_char = match chars {\n\n Some(c) => get_value(&c),\n\n None => \" \".to_string(),\n\n };\n\n let left_buff: usize = (len - value.len()) / 2;\n\n let right_buff = len - value.len() - left_buff;\n\n let new_str = format!(\n\n \"{}{}{}\",\n\n rep_char.repeat(left_buff),\n\n value,\n\n rep_char.repeat(right_buff)\n\n );\n\n Ok(vm.ctx.new_str(new_str))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 46, "score": 453918.4022009566 }, { "content": "fn str_strip(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s).trim().to_string();\n\n Ok(vm.ctx.new_str(value))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 47, "score": 453918.4022009566 }, { "content": "fn str_ge(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(i, Some(vm.ctx.str_type())), (i2, None)]\n\n );\n\n\n\n let v1 = get_value(i);\n\n if objtype::isinstance(i2, &vm.ctx.str_type()) {\n\n Ok(vm.ctx.new_bool(v1 >= get_value(i2)))\n\n } else {\n\n Err(vm.new_type_error(format!(\"Cannot compare {} and {}\", i, i2)))\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 48, "score": 453918.4022009566 }, { "content": "fn str_capitalize(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(s, Some(vm.ctx.str_type()))]);\n\n let value = get_value(&s);\n\n let (first_part, lower_str) = value.split_at(1);\n\n let capitalized = format!(\"{}{}\", first_part.to_uppercase(), lower_str);\n\n Ok(vm.ctx.new_str(capitalized))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 49, "score": 453918.4022009566 }, { "content": "fn str_rfind(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (sub, Some(vm.ctx.str_type()))],\n\n optional = [\n\n (start, Some(vm.ctx.int_type())),\n\n (end, Some(vm.ctx.int_type()))\n\n ]\n\n );\n\n let value = get_value(&s);\n\n let sub = get_value(&sub);\n\n let (start, end) = match get_slice(start, end, value.len()) {\n\n Ok((start, end)) => (start, end),\n\n Err(e) => return Err(vm.new_index_error(e)),\n\n };\n\n let ind = match value[start..=end].rfind(&sub) {\n\n Some(num) => num as i128,\n\n None => -1 as i128,\n\n };\n\n Ok(vm.ctx.new_int(ind))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 50, "score": 453918.4022009566 }, { "content": "fn str_startswith(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (pat, Some(vm.ctx.str_type()))]\n\n );\n\n let value = get_value(&s);\n\n let pat = get_value(&pat);\n\n Ok(vm.ctx.new_bool(value.starts_with(pat.as_str())))\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 51, "score": 453918.4022009566 }, { "content": "fn str_mul(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(s, Some(vm.ctx.str_type())), (s2, None)]\n\n );\n\n if objtype::isinstance(s2, &vm.ctx.int_type()) {\n\n let value1 = get_value(&s);\n\n let value2 = objint::get_value(s2).to_i32().unwrap();\n\n let mut result = String::new();\n\n for _x in 0..value2 {\n\n result.push_str(value1.as_str());\n\n }\n\n Ok(vm.ctx.new_str(result))\n\n } else {\n\n Err(vm.new_type_error(format!(\"Cannot multiply {} and {}\", s, s2)))\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 52, "score": 453918.4022009566 }, { "content": "fn str_format(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n if args.args.is_empty() {\n\n return Err(\n\n vm.new_type_error(\"descriptor 'format' of 'str' object needs an argument\".to_string())\n\n );\n\n }\n\n\n\n let zelf = &args.args[0];\n\n if !objtype::isinstance(&zelf, &vm.ctx.str_type()) {\n\n let zelf_typ = zelf.typ();\n\n let actual_type = vm.to_pystr(&zelf_typ)?;\n\n return Err(vm.new_type_error(format!(\n\n \"descriptor 'format' requires a 'str' object but received a '{}'\",\n\n actual_type\n\n )));\n\n }\n\n let format_string_text = get_value(zelf);\n\n match FormatString::from_str(format_string_text.as_str()) {\n\n Ok(format_string) => perform_format(vm, &format_string, &args),\n\n Err(err) => match err {\n\n FormatParseError::UnmatchedBracket => {\n\n Err(vm.new_value_error(\"expected '}' before end of string\".to_string()))\n\n }\n\n _ => Err(vm.new_value_error(\"Unexpected error parsing format string\".to_string())),\n\n },\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objstr.rs", "rank": 53, "score": 453918.4022009566 }, { "content": "pub fn pyresult_to_jsresult(vm: &mut VirtualMachine, result: PyResult) -> Result<JsValue, JsValue> {\n\n result\n\n .map(|value| py_to_js(vm, value))\n\n .map_err(|err| py_str_err(vm, &err).into())\n\n}\n\n\n", "file_path": "wasm/lib/src/convert.rs", "rank": 54, "score": 447495.015150854 }, { "content": "fn time_time(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args);\n\n let x = match SystemTime::now().duration_since(UNIX_EPOCH) {\n\n Ok(v) => duration_to_f64(v),\n\n Err(err) => panic!(\"Error: {:?}\", err),\n\n };\n\n let value = vm.ctx.new_float(x);\n\n Ok(value)\n\n}\n\n\n", "file_path": "vm/src/stdlib/time_module.rs", "rank": 55, "score": 430661.9212999416 }, { "content": "/// Implement json.dumps\n\nfn json_dumps(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n // TODO: Implement non-trivial serialisation case\n\n arg_check!(vm, args, required = [(obj, None)]);\n\n let res = {\n\n let serializer = PyObjectSerializer { pyobject: obj, vm };\n\n serde_json::to_string(&serializer)\n\n };\n\n let string = res.map_err(|err| vm.new_type_error(format!(\"{}\", err)))?;\n\n Ok(vm.context().new_str(string))\n\n}\n\n\n", "file_path": "vm/src/stdlib/json.rs", "rank": 56, "score": 428818.33762163157 }, { "content": "/// Implement json.loads\n\nfn json_loads(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n // TODO: Implement non-trivial deserialization case\n\n arg_check!(vm, args, required = [(string, Some(vm.ctx.str_type()))]);\n\n let res = {\n\n let de = PyObjectDeserializer { vm };\n\n // TODO: Support deserializing string sub-classes\n\n de.deserialize(&mut serde_json::Deserializer::from_str(&objstr::get_value(\n\n &string,\n\n )))\n\n };\n\n\n\n res.map_err(|err| {\n\n let json_decode_error = vm\n\n .sys_module\n\n .get_item(\"modules\")\n\n .unwrap()\n\n .get_item(\"json\")\n\n .unwrap()\n\n .get_item(\"JSONDecodeError\")\n\n .unwrap();\n\n let exc = vm.new_exception(json_decode_error, format!(\"{}\", err));\n\n vm.ctx.set_attr(&exc, \"lineno\", vm.ctx.new_int(err.line()));\n\n vm.ctx.set_attr(&exc, \"colno\", vm.ctx.new_int(err.column()));\n\n exc\n\n })\n\n}\n\n\n", "file_path": "vm/src/stdlib/json.rs", "rank": 57, "score": 428818.33762163157 }, { "content": "fn parse_format_type(text: &str) -> (Option<FormatType>, &str) {\n\n let mut chars = text.chars();\n\n match chars.next() {\n\n Some('b') => (Some(FormatType::Binary), chars.as_str()),\n\n Some('c') => (Some(FormatType::Character), chars.as_str()),\n\n Some('d') => (Some(FormatType::Decimal), chars.as_str()),\n\n Some('o') => (Some(FormatType::Octal), chars.as_str()),\n\n Some('x') => (Some(FormatType::HexLower), chars.as_str()),\n\n Some('X') => (Some(FormatType::HexUpper), chars.as_str()),\n\n Some('e') => (Some(FormatType::ExponentLower), chars.as_str()),\n\n Some('E') => (Some(FormatType::ExponentUpper), chars.as_str()),\n\n Some('f') => (Some(FormatType::FixedPointLower), chars.as_str()),\n\n Some('F') => (Some(FormatType::FixedPointUpper), chars.as_str()),\n\n Some('g') => (Some(FormatType::GeneralFormatLower), chars.as_str()),\n\n Some('G') => (Some(FormatType::GeneralFormatUpper), chars.as_str()),\n\n Some('n') => (Some(FormatType::Number), chars.as_str()),\n\n _ => (None, text),\n\n }\n\n}\n\n\n", "file_path": "vm/src/format.rs", "rank": 58, "score": 428242.15808355703 }, { "content": "fn contains(vm: &mut VirtualMachine, args: PyFuncArgs, iter_type: PyObjectRef) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(iter, Some(iter_type)), (needle, None)]\n\n );\n\n loop {\n\n if let Some(element) = get_next_object(vm, iter)? {\n\n let equal = vm._eq(needle.clone(), element.clone())?;\n\n if objbool::get_value(&equal) {\n\n return Ok(vm.new_bool(true));\n\n } else {\n\n continue;\n\n }\n\n } else {\n\n return Ok(vm.new_bool(false));\n\n }\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objiter.rs", "rank": 59, "score": 426024.00553042133 }, { "content": "fn time_sleep(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(seconds, Some(vm.ctx.float_type()))]);\n\n let seconds = objfloat::get_value(seconds);\n\n let secs: u64 = seconds.trunc() as u64;\n\n let nanos: u32 = (seconds.fract() * 1e9) as u32;\n\n let duration = Duration::new(secs, nanos);\n\n thread::sleep(duration);\n\n Ok(vm.get_none())\n\n}\n\n\n", "file_path": "vm/src/stdlib/time_module.rs", "rank": 60, "score": 424374.5510269846 }, { "content": "fn types_new_class(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(name, Some(vm.ctx.str_type()))],\n\n optional = [(bases, None), (_kwds, None), (_exec_body, None)]\n\n );\n\n\n\n let name = objstr::get_value(name);\n\n\n\n let bases = match bases {\n\n Some(b) => {\n\n if objtype::isinstance(b, &vm.ctx.tuple_type()) {\n\n objsequence::get_elements(b).to_vec()\n\n } else {\n\n return Err(vm.new_type_error(\"Bases must be a tuple\".to_string()));\n\n }\n\n }\n\n None => vec![vm.ctx.object()],\n\n };\n\n\n\n objtype::new(vm.ctx.type_type(), &name, bases, PyAttributes::new())\n\n}\n\n\n", "file_path": "vm/src/stdlib/types.rs", "rank": 61, "score": 424271.02978553646 }, { "content": "fn builtin_id(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(obj, None)]);\n\n\n\n Ok(vm.context().new_int(obj.get_id()))\n\n}\n\n\n\n// builtin_input\n\n\n", "file_path": "vm/src/builtins.rs", "rank": 62, "score": 421124.16189497337 }, { "content": "pub fn type_call(vm: &mut VirtualMachine, mut args: PyFuncArgs) -> PyResult {\n\n debug!(\"type_call: {:?}\", args);\n\n let cls = args.shift();\n\n let new = cls.get_attr(\"__new__\").unwrap();\n\n let new_wrapped = vm.call_get_descriptor(new, cls)?;\n\n let obj = vm.invoke(new_wrapped, args.clone())?;\n\n\n\n if let Ok(init) = vm.get_method(obj.clone(), \"__init__\") {\n\n let res = vm.invoke(init, args)?;\n\n if !res.is(&vm.get_none()) {\n\n return Err(vm.new_type_error(\"__init__ must return None\".to_string()));\n\n }\n\n }\n\n Ok(obj)\n\n}\n\n\n", "file_path": "vm/src/obj/objtype.rs", "rank": 63, "score": 420918.67362137605 }, { "content": "fn exception_str(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(exc, Some(vm.ctx.exceptions.exception_type.clone()))]\n\n );\n\n let type_name = objtype::get_type_name(&exc.typ());\n\n let msg = if let Some(m) = exc.get_attr(\"msg\") {\n\n match vm.to_pystr(&m) {\n\n Ok(msg) => msg,\n\n _ => \"<exception str() failed>\".to_string(),\n\n }\n\n } else {\n\n panic!(\"Error message must be set\");\n\n };\n\n let s = format!(\"{}: {}\", type_name, msg);\n\n Ok(vm.new_str(s))\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "vm/src/exceptions.rs", "rank": 64, "score": 420852.5521199148 }, { "content": "fn float_is_integer(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(i, Some(vm.ctx.float_type()))]);\n\n let v = get_value(i);\n\n let result = v == v.round();\n\n Ok(vm.ctx.new_bool(result))\n\n}\n\n\n", "file_path": "vm/src/obj/objfloat.rs", "rank": 65, "score": 416409.592795121 }, { "content": "fn bind_method(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(function, None), (obj, None), (cls, None)]\n\n );\n\n\n\n if obj.is(&vm.get_none()) && !cls.is(&obj.typ()) {\n\n Ok(function.clone())\n\n } else {\n\n Ok(vm.ctx.new_bound_method(function.clone(), obj.clone()))\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objfunction.rs", "rank": 66, "score": 416409.5927951209 }, { "content": "fn re_match(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n // TODO:\n\n error!(\"TODO: implement match\");\n\n re_search(vm, args)\n\n}\n\n\n", "file_path": "vm/src/stdlib/re.rs", "rank": 67, "score": 416406.4373419866 }, { "content": "fn none_new(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(_zelf, Some(vm.ctx.type_type.clone()))]\n\n );\n\n Ok(vm.get_none())\n\n}\n\n\n", "file_path": "vm/src/obj/objnone.rs", "rank": 68, "score": 416397.7708233858 }, { "content": "fn none_repr(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(_zelf, Some(vm.ctx.none().typ()))]);\n\n Ok(vm.ctx.new_str(\"None\".to_string()))\n\n}\n", "file_path": "vm/src/obj/objnone.rs", "rank": 69, "score": 416397.7708233858 }, { "content": "fn type_repr(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(obj, Some(vm.ctx.type_type()))]);\n\n let type_name = get_type_name(&obj);\n\n Ok(vm.new_str(format!(\"<class '{}'>\", type_name)))\n\n}\n\n\n", "file_path": "vm/src/obj/objtype.rs", "rank": 70, "score": 416318.83017171756 }, { "content": "fn type_prepare(vm: &mut VirtualMachine, _args: PyFuncArgs) -> PyResult {\n\n Ok(vm.new_dict())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{linearise_mro, new};\n\n use super::{HashMap, IdProtocol, PyContext, PyObjectRef};\n\n\n\n fn map_ids(obj: Option<Vec<PyObjectRef>>) -> Option<Vec<usize>> {\n\n match obj {\n\n Some(vec) => Some(vec.into_iter().map(|x| x.get_id()).collect()),\n\n None => None,\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_linearise() {\n\n let context = PyContext::new();\n\n let object = context.object;\n", "file_path": "vm/src/obj/objtype.rs", "rank": 71, "score": 416318.83017171756 }, { "content": "fn type_mro(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [\n\n (cls, Some(vm.ctx.type_type())),\n\n (_typ, Some(vm.ctx.type_type()))\n\n ]\n\n );\n\n match _mro(cls.clone()) {\n\n Some(mro) => Ok(vm.context().new_tuple(mro)),\n\n None => Err(vm.new_type_error(\"Only classes have an MRO.\".to_string())),\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objtype.rs", "rank": 72, "score": 416318.83017171756 }, { "content": "fn object_str(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(zelf, Some(vm.ctx.object()))]);\n\n vm.call_method(zelf, \"__repr__\", vec![])\n\n}\n\n\n", "file_path": "vm/src/obj/objobject.rs", "rank": 73, "score": 416139.98907505046 }, { "content": "/// Retrieve a key from dict contents:\n\npub fn content_get_key_str(elements: &DictContentType, key: &str) -> Option<PyObjectRef> {\n\n // TODO: let hash: usize = key;\n\n match elements.get(key) {\n\n Some(v) => Some(v.1.clone()),\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objdict.rs", "rank": 74, "score": 415959.6216477321 }, { "content": "fn int_pass_value(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(i, Some(vm.ctx.int_type()))]);\n\n Ok(vm.ctx.new_int(get_value(i)))\n\n}\n\n\n", "file_path": "vm/src/obj/objint.rs", "rank": 75, "score": 411767.0169058291 }, { "content": "fn get_property_value(vm: &mut VirtualMachine, value: &Option<BigInt>) -> PyResult {\n\n if let Some(value) = value {\n\n Ok(vm.ctx.new_int(value.clone()))\n\n } else {\n\n Ok(vm.get_none())\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objslice.rs", "rank": 76, "score": 410641.32730937074 }, { "content": "fn text_io_wrapper_init(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(text_io_wrapper, None), (buffer, None)]\n\n );\n\n\n\n vm.ctx.set_attr(&text_io_wrapper, \"buffer\", buffer.clone());\n\n Ok(vm.get_none())\n\n}\n\n\n", "file_path": "vm/src/stdlib/io.rs", "rank": 77, "score": 407414.37988065265 }, { "content": "fn text_io_base_read(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(text_io_base, None)]);\n\n\n\n let raw = vm.ctx.get_attr(&text_io_base, \"buffer\").unwrap();\n\n let read = vm.get_method(raw.clone(), &\"read\".to_string());\n\n\n\n if let Ok(bytes) = vm.invoke(read.unwrap(), PyFuncArgs::default()) {\n\n let value = objbytes::get_value(&bytes).to_vec();\n\n\n\n //format bytes into string\n\n let rust_string = String::from_utf8(value).unwrap();\n\n Ok(vm.ctx.new_str(rust_string))\n\n } else {\n\n Err(vm.new_value_error(\"Error unpacking Bytes\".to_string()))\n\n }\n\n}\n\n\n", "file_path": "vm/src/stdlib/io.rs", "rank": 78, "score": 407414.37988065265 }, { "content": "pub fn type_new(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n debug!(\"type.__new__ {:?}\", args);\n\n if args.args.len() == 2 {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [(_typ, Some(vm.ctx.type_type())), (obj, None)]\n\n );\n\n Ok(obj.typ())\n\n } else if args.args.len() == 4 {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [\n\n (typ, Some(vm.ctx.type_type())),\n\n (name, Some(vm.ctx.str_type())),\n\n (bases, None),\n\n (dict, Some(vm.ctx.dict_type()))\n\n ]\n\n );\n", "file_path": "vm/src/obj/objtype.rs", "rank": 79, "score": 407081.5618800345 }, { "content": "pub fn type_getattribute(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [\n\n (cls, Some(vm.ctx.object())),\n\n (name_str, Some(vm.ctx.str_type()))\n\n ]\n\n );\n\n let name = objstr::get_value(&name_str);\n\n trace!(\"type.__getattribute__({:?}, {:?})\", cls, name);\n\n let mcl = cls.typ();\n\n\n\n if let Some(attr) = mcl.get_attr(&name) {\n\n let attr_class = attr.typ();\n\n if attr_class.has_attr(\"__set__\") {\n\n if let Some(descriptor) = attr_class.get_attr(\"__get__\") {\n\n return vm.invoke(\n\n descriptor,\n\n PyFuncArgs {\n", "file_path": "vm/src/obj/objtype.rs", "rank": 80, "score": 407081.5618800345 }, { "content": "pub fn builtin_print_html(vm: &mut VirtualMachine, args: PyFuncArgs, selector: &str) -> PyResult {\n\n let output = format_print_args(vm, args)?;\n\n print_to_html(&output, selector).map_err(|err| convert::js_to_py(vm, err))?;\n\n Ok(vm.get_none())\n\n}\n\n\n", "file_path": "wasm/lib/src/wasm_builtins.rs", "rank": 81, "score": 406103.01296760666 }, { "content": "fn calc_hash(vm: &mut VirtualMachine, key: &PyObjectRef) -> Result<usize, PyObjectRef> {\n\n let hash = vm.call_method(key, \"__hash__\", vec![])?;\n\n Ok(objint::get_value(&hash).to_usize().unwrap())\n\n}\n\n\n", "file_path": "vm/src/dictdatatype.rs", "rank": 82, "score": 402382.9135258538 }, { "content": "fn bool_repr(vm: &mut VirtualMachine, args: PyFuncArgs) -> Result<PyObjectRef, PyObjectRef> {\n\n arg_check!(vm, args, required = [(obj, Some(vm.ctx.bool_type()))]);\n\n let v = get_value(obj);\n\n let s = if v {\n\n \"True\".to_string()\n\n } else {\n\n \"False\".to_string()\n\n };\n\n Ok(vm.new_str(s))\n\n}\n\n\n", "file_path": "vm/src/obj/objbool.rs", "rank": 83, "score": 401434.9219122359 }, { "content": "fn builtin_sorted(vm: &mut VirtualMachine, mut args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(iterable, None)]);\n\n let items = vm.extract_elements(iterable)?;\n\n let lst = vm.ctx.new_list(items);\n\n\n\n args.shift();\n\n vm.call_method_pyargs(&lst, \"sort\", args)?;\n\n Ok(lst)\n\n}\n\n\n", "file_path": "vm/src/builtins.rs", "rank": 85, "score": 396054.67048357823 }, { "content": "fn linearise_mro(mut bases: Vec<Vec<PyObjectRef>>) -> Option<Vec<PyObjectRef>> {\n\n debug!(\"Linearising MRO: {:?}\", bases);\n\n let mut result = vec![];\n\n loop {\n\n if (&bases).iter().all(|x| x.is_empty()) {\n\n break;\n\n }\n\n match take_next_base(bases) {\n\n Some((head, new_bases)) => {\n\n result.push(head);\n\n bases = new_bases;\n\n }\n\n None => return None,\n\n }\n\n }\n\n Some(result)\n\n}\n\n\n", "file_path": "vm/src/obj/objtype.rs", "rank": 86, "score": 395391.5709210001 }, { "content": "fn get_int(vm: &mut VirtualMachine, arg: &PyObjectRef) -> Result<BigInt, PyObjectRef> {\n\n objint::to_int(vm, arg, 10)\n\n}\n\n\n", "file_path": "vm/src/stdlib/pystruct.rs", "rank": 87, "score": 395126.1467932501 }, { "content": "// Special case for the case when requesting a str key from a dict:\n\npub fn get_key_str(dict: &PyObjectRef, key: &str) -> Option<PyObjectRef> {\n\n let elements = get_elements(dict);\n\n content_get_key_str(&elements, key)\n\n}\n\n\n", "file_path": "vm/src/obj/objdict.rs", "rank": 88, "score": 394266.36185585975 }, { "content": "fn frame_idx(vm: &mut VirtualMachine, offset: Option<&PyObjectRef>) -> Result<usize, PyObjectRef> {\n\n if let Some(int) = offset {\n\n if let Some(offset) = objint::get_value(&int).to_usize() {\n\n if offset > vm.frames.len() - 1 {\n\n return Err(vm.new_value_error(\"call stack is not deep enough\".to_string()));\n\n }\n\n return Ok(offset);\n\n }\n\n }\n\n return Ok(0);\n\n}\n\n\n", "file_path": "vm/src/sysmodule.rs", "rank": 89, "score": 392956.56815314514 }, { "content": "fn member_get(vm: &mut VirtualMachine, mut args: PyFuncArgs) -> PyResult {\n\n match args.shift().get_attr(\"function\") {\n\n Some(function) => vm.invoke(function, args),\n\n None => {\n\n let attribute_error = vm.context().exceptions.attribute_error.clone();\n\n Err(vm.new_exception(attribute_error, String::from(\"Attribute Error\")))\n\n }\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objfunction.rs", "rank": 91, "score": 392283.4592862773 }, { "content": "pub fn make_float(vm: &mut VirtualMachine, obj: &PyObjectRef) -> Result<f64, PyObjectRef> {\n\n if objtype::isinstance(obj, &vm.ctx.float_type()) {\n\n Ok(get_value(obj))\n\n } else if let Ok(method) = vm.get_method(obj.clone(), \"__float__\") {\n\n let res = vm.invoke(\n\n method,\n\n PyFuncArgs {\n\n args: vec![],\n\n kwargs: vec![],\n\n },\n\n )?;\n\n Ok(get_value(&res))\n\n } else {\n\n Err(vm.new_type_error(format!(\"Cannot cast {} to float\", obj)))\n\n }\n\n}\n\n\n", "file_path": "vm/src/obj/objfloat.rs", "rank": 92, "score": 390109.1879912873 }, { "content": "fn parse_grouping_option(text: &str) -> (Option<FormatGrouping>, &str) {\n\n let mut chars = text.chars();\n\n match chars.next() {\n\n Some('_') => (Some(FormatGrouping::Underscore), chars.as_str()),\n\n Some(',') => (Some(FormatGrouping::Comma), chars.as_str()),\n\n _ => (None, text),\n\n }\n\n}\n\n\n", "file_path": "vm/src/format.rs", "rank": 93, "score": 387242.8830281856 }, { "content": "fn builtin_all(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(iterable, None)]);\n\n let items = vm.extract_elements(iterable)?;\n\n for item in items {\n\n let result = objbool::boolval(vm, item)?;\n\n if !result {\n\n return Ok(vm.new_bool(false));\n\n }\n\n }\n\n Ok(vm.new_bool(true))\n\n}\n\n\n", "file_path": "vm/src/builtins.rs", "rank": 94, "score": 386528.1075137666 }, { "content": "fn builtin_any(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(vm, args, required = [(iterable, None)]);\n\n let items = vm.extract_elements(iterable)?;\n\n for item in items {\n\n let result = objbool::boolval(vm, item)?;\n\n if result {\n\n return Ok(vm.new_bool(true));\n\n }\n\n }\n\n Ok(vm.new_bool(false))\n\n}\n\n\n\n// builtin_ascii\n\n\n", "file_path": "vm/src/builtins.rs", "rank": 95, "score": 386528.1075137666 }, { "content": "fn getframe(vm: &mut VirtualMachine, args: PyFuncArgs) -> PyResult {\n\n arg_check!(\n\n vm,\n\n args,\n\n required = [],\n\n optional = [(offset, Some(vm.ctx.int_type()))]\n\n );\n\n\n\n let idx = frame_idx(vm, offset)?;\n\n let idx = vm.frames.len() - idx - 1;\n\n let frame = &vm.frames[idx];\n\n Ok(frame.clone())\n\n}\n\n\n", "file_path": "vm/src/sysmodule.rs", "rank": 96, "score": 386528.1075137666 }, { "content": "pub fn builtin_build_class_(vm: &mut VirtualMachine, mut args: PyFuncArgs) -> PyResult {\n\n let function = args.shift();\n\n let name_arg = args.shift();\n\n let bases = args.args.clone();\n\n let mut metaclass = args.get_kwarg(\"metaclass\", vm.get_type());\n\n\n\n for base in bases.clone() {\n\n if objtype::issubclass(&base.typ(), &metaclass) {\n\n metaclass = base.typ();\n\n } else if !objtype::issubclass(&metaclass, &base.typ()) {\n\n return Err(vm.new_type_error(\"metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases\".to_string()));\n\n }\n\n }\n\n\n\n let bases = vm.context().new_tuple(bases);\n\n\n\n // Prepare uses full __getattribute__ resolution chain.\n\n let prepare_name = vm.new_str(\"__prepare__\".to_string());\n\n let prepare = vm.get_attribute(metaclass.clone(), prepare_name)?;\n\n let namespace = vm.invoke(\n", "file_path": "vm/src/builtins.rs", "rank": 97, "score": 384410.267199525 }, { "content": "pub fn new_instance(vm: &mut VirtualMachine, mut args: PyFuncArgs) -> PyResult {\n\n // more or less __new__ operator\n\n let type_ref = args.shift();\n\n let obj = vm.ctx.new_instance(type_ref.clone(), None);\n\n Ok(obj)\n\n}\n\n\n", "file_path": "vm/src/obj/objobject.rs", "rank": 98, "score": 384410.267199525 }, { "content": "fn parse_number(text: &str) -> (Option<usize>, &str) {\n\n let num_digits: usize = get_num_digits(text);\n\n if num_digits == 0 {\n\n return (None, text);\n\n }\n\n // This should never fail\n\n (\n\n Some(text[..num_digits].parse::<usize>().unwrap()),\n\n &text[num_digits..],\n\n )\n\n}\n\n\n", "file_path": "vm/src/format.rs", "rank": 99, "score": 383363.28553117375 } ]
Rust
src/serializer.rs
aep/korhal-image
6aa0cce631efe530d5f94a1b893c0efdc7712fec
use blockstore::{Block, BlockStore, BlockShard}; use chunker::*; use index::*; use pbr::ProgressBar; use readchain::{Take,Chain}; use serde::{Serialize, Deserialize}; use std::ffi::OsString; use std::io::{Stdout, Seek, SeekFrom, BufReader}; use std::path::Path; use std::fs::File; use elfkit; use sha2::{Sha256, Digest}; use std::io::Read; macro_rules! kb_fmt { ($n: ident) => {{ let kb = 1024f64; match $n as f64{ $n if $n >= kb.powf(4_f64) => format!("{:.*} TB", 2, $n / kb.powf(4_f64)), $n if $n >= kb.powf(3_f64) => format!("{:.*} GB", 2, $n / kb.powf(3_f64)), $n if $n >= kb.powf(2_f64) => format!("{:.*} MB", 2, $n / kb.powf(2_f64)), $n if $n >= kb => format!("{:.*} KB", 2, $n / kb), _ => format!("{:.*} B", 0, $n) } }} } impl Index { pub fn store_inodes(&mut self, blockstore: &mut BlockStore) { let total_bytes = self.i.iter().fold(0, |acc, ref x| acc + x.size); let mut bar = ProgressBar::new(total_bytes); bar.set_units(::pbr::Units::Bytes); let mut new_bytes = 0; let mut new_blocks = 0; let mut total_blocks = 0; let mut inodes = self.i.to_vec(); for i in &mut inodes { if i.kind != 2 { continue; } let mut host_file = File::open(&i.host_path).unwrap(); let cuts = match elfkit::Elf::from_reader(&mut host_file) { Err(_) => None, Ok(mut elf) => { let mut r = None; for sec in elf.sections.drain(..) { if sec.header.shtype == elfkit::types::SectionType(0x6fffff01) { let mut rr = Vec::new(); let mut io = &sec.content.into_raw().unwrap()[..]; while let Ok(o) = elf_read_u32!(&elf.header, io) { rr.push(o as usize); } r = Some(rr); } } r } }; match cuts { None => {}, Some(mut cuts) => { i.kind = 3; let mut at = 0; cuts.push(host_file.metadata().unwrap().len() as usize); cuts.sort_unstable(); host_file.seek(SeekFrom::Start(0)); for cut in cuts { let mut buf = vec![0;cut - at]; host_file.read_exact(&mut buf).unwrap(); let hash = Sha256::digest(&buf).as_slice().to_vec(); if blockstore.insert(hash.clone(), Block { shards: vec![BlockShard{ file: i.host_path.clone(), offset: at, size: buf.len(), }], size: buf.len(), }) { new_blocks +=1; new_bytes += buf.len(); } total_blocks += 1; bar.add(buf.len() as u64); if let None = self.i[i.inode as usize].content { self.i[i.inode as usize].content = Some(Vec::new()); } self.i[i.inode as usize].content.as_mut().unwrap().push(ContentBlockEntry{ h: hash, o: 0, l: buf.len() as u64, }); at = cut; } let mut buf = Vec::new(); assert!(host_file.read_to_end(&mut buf).unwrap() == 0); } } } let it = inodes.iter().filter(|i|i.kind == 2).map(|i| { (BufReader::new(File::open(&i.host_path).unwrap()), i.inode) }); let mut ci = Chunker::new(Box::new(it), ::rollsum::Bup::new(), 9); while let Some(c) = ci.next() { bar.add((c.len) as u64); let mut block_shards = Vec::new(); for ibr in c.parts { block_shards.push(BlockShard{ file: self.i[ibr.i as usize].host_path.clone(), offset: ibr.file_start, size: ibr.file_end - ibr.file_start, }); if let None = self.i[ibr.i as usize].content { self.i[ibr.i as usize].content = Some(Vec::new()); } self.i[ibr.i as usize].content.as_mut().unwrap().push(ContentBlockEntry{ h: c.hash.clone(), o: ibr.block_start as u64, l: (ibr.file_end - ibr.file_start) as u64, }); print_progress_bar(&mut bar, &self.i[ibr.i as usize].host_path); } if blockstore.insert(c.hash, Block{ shards: block_shards, size: c.len, }) { new_blocks +=1; new_bytes += c.len; } total_blocks += 1; } bar.finish(); println!("done indexing {} inodes to {} blocks", self.i.len(), total_blocks); println!(" + {} blocks {}", new_blocks, kb_fmt!(new_bytes)); } pub fn store_index(&mut self, blockstore: &mut BlockStore) -> Index { let mut tmpindex = ::tempfile::NamedTempFile::new_in(".").unwrap(); self.serialize(&mut ::rmps::Serializer::new(&mut tmpindex)).unwrap(); tmpindex.seek(SeekFrom::Start(0)).unwrap(); let path = OsString::from(tmpindex.path().to_str().unwrap()); let tv= vec![tmpindex]; let it = tv.iter().map(|i|(i,0)); let mut ci = Chunker::new(Box::new(it), ::rollsum::Bup::new(), 12); let mut total_blocks = 0; let mut new_blocks = 0; let mut cbrs = Vec::new(); while let Some(c) = ci.next() { let mut block_shards = Vec::new(); for ibr in c.parts { block_shards.push(BlockShard{ file: path.clone(), offset: ibr.file_start, size: ibr.file_end - ibr.file_start, }); cbrs.push(ContentBlockEntry{ h: c.hash.clone(), o: ibr.block_start as u64, l: (ibr.file_end - ibr.file_start) as u64, }); } if blockstore.insert(c.hash, Block{ shards: block_shards, size: c.len, }) { new_blocks += 1; } total_blocks += 1; } println!("done serializing index to {} blocks ({} new)", total_blocks, new_blocks); Index{ v: 1, i: Vec::new(), c: Some(cbrs), } } pub fn load_index(&self, blockstore: &BlockStore) -> Index { let it = self.c.as_ref().unwrap().iter().map(|c| { let block = blockstore.get(&c.h).expect("block not found"); let mut re = block.chain(); re.seek(SeekFrom::Current(c.o as i64)).unwrap(); Take::limit(re, c.l as usize) }); let mut f = Chain::new(Box::new(it)); Index::deserialize(&mut ::rmps::Deserializer::new(&mut f)).unwrap() } pub fn save_to_file(&mut self, path: &Path) { let mut f = File::create(path).unwrap(); self.serialize(&mut ::rmps::Serializer::new(&mut f)).unwrap(); } pub fn load_from_file(path: &Path) -> Index { let mut f = File::open(path).unwrap(); Index::deserialize(&mut ::rmps::Deserializer::new(&mut f)).unwrap() } } fn print_progress_bar(bar: &mut ProgressBar<Stdout>, path: &OsString){ let s = path.to_str().unwrap(); if s.len() > 50 { bar.message(&format!("indexing ..{:48} ", &s[s.len()-48..])); } else { bar.message(&format!("indexing {:50} ", &s)); } }
use blockstore::{Block, BlockStore, BlockShard}; use chunker::*; use index::*; use pbr::ProgressBar; use readchain::{Take,Chain}; use serde::{Serialize, Deserialize}; use std::ffi::OsString; use std::io::{Stdout, Seek, SeekFrom, BufReader}; use std::path::Path; use std::fs::File; use elfkit; use sha2::{Sha256, Digest}; use std::io::Read; macro_rules! kb_fmt { ($n: ident) => {{ let kb = 1024f64; match $n as f64{ $n if $n >= kb.powf(4_f64) => format!("{:.*} TB", 2, $n / kb.powf(4_f64)), $n if $n >= kb.powf(3_f64) => format!("{:.*} GB", 2, $n / kb.powf(3_f64)), $n if $n >= kb.powf(2_f64) => format!("{:.*} MB", 2, $n / kb.powf(2_f64)), $n if $n >= kb => format!("{:.*} KB", 2, $n / kb), _ => format!("{:.*} B", 0, $n) } }} } impl Index { pub fn store_inodes(&mut self, blockstore: &mut BlockStore) { let total_bytes = self.i.iter().fold(0, |acc, ref x| acc + x.size); let mut bar = ProgressBar::new(total_bytes); bar.set_units(::pbr::Units::Bytes); let mut new_bytes = 0; let mut new_blocks = 0; let mut total_blocks = 0; let mut inodes = self.i.to_vec(); for i in &mut inodes { if i.kind != 2 { continue; } let mut host_file = File::open(&i.host_path).unwrap(); let cuts = match elfkit::Elf::from_reader(&mut host_file) { Err(_) => None, Ok(mut elf) => { let mut r = None; for sec in elf.sections.drain(..) { if sec.header.shtype == elfkit::types::SectionType(0x6fffff01) { let mut rr = Vec::new(); let mut io = &sec.content.into_raw().unwrap()[..]; while let Ok(o) = elf_read_u32!(&elf.header, io) { rr.push(o as usize); } r = Some(rr); } } r } }; match cuts { None => {}, Some(mut cuts) => { i.kind = 3; let mut at = 0; cuts.push(host_file.metadata().unwrap().len() as usize); cuts.sort_unstable(); host_file.seek(SeekFrom::Start(0)); for cut in cuts { let mut buf = vec![0;cut - at]; host_file.read_exact(&mut buf).unwrap(); let hash = Sha256::digest(&buf).as_slice().to_vec(); if blockstore.insert(hash.clone(), Block { shards: vec![BlockShard{ file: i.host_path.clone(), offset: at, size: buf.len(), }], size: buf.len(), }) { new_blocks +=1; new_bytes += buf.len(); } total_blocks += 1; bar.add(buf.len() as u64); if let None = self.i[i.inode as usize].content { self.i[i.inode as usize].content = Some(Vec::new()); } self.i[i.inode as usize].content.as_mut().unwrap().push(ContentBlockEntry{ h: hash, o: 0, l: buf.len() as u64, }); at = cut; } let mut buf = Vec::new(); assert!(host_file.read_to_end(&mut buf).unwrap() == 0); } } } let it = inodes.iter().filter(|i|i.kind == 2).map(|i| { (BufReader::new(File::open(&i.host_path).unwrap()), i.inode) }); let mut ci = Chunker::new(Box::new(it), ::rollsum::Bup::new(), 9); while let Some(c) = ci.next() { bar.add((c.len) as u64); let mut block_shards = Vec::new(); for ibr in c.parts { block_shards.push(BlockShard{ file: self.i[ibr.i as usize].host_path.clone(), offset: ibr.file_start, size: ibr.file_end - ibr.file_start, }); if let None = self.i[ibr.i as usize].content { self.i[ibr.i as usize].content = Some(Vec::new()); } self.i[ibr.i as usize].content.as_mut().unwrap().push(ContentBlockEntry{ h: c.hash.clone(), o: ibr.block_start as u64, l: (ibr.file_end - ibr.file_start) as u64, }); print_progress_bar(&mut bar, &self.i[ibr.i as usize].host_path); } if blockstore.insert(c.hash, Block{ shards: block_shards, size: c.len, }) { new_blocks +=1; new_bytes += c.len; } total_blocks += 1; } bar.finish(); println!("done indexing {} inodes to {} blocks", self.i.len(), total_blocks); println!(" + {} blocks {}", new_blocks, kb_fmt!(new_bytes)); } pub fn store_index(&mut self, blockstore: &mut BlockStore) -> Index { let mut tmpindex = ::tempfile::NamedTempFile::new_in(".").unwrap(); self.serialize(&mut ::rmps::Serializer::new(&mut tmpindex)).unwrap(); tmpindex.seek(SeekFrom::Start(0)).unwrap(); let path = OsString::from(tmpindex.path().to_str().unwrap()); let tv= vec![tmpindex]; let it = tv.iter().map(|i|(i,0)); let mut ci = Chunker::new(Box::new(it), ::rollsum::Bup::new(), 12); let mut total_blocks = 0; let mut new_blocks = 0; let mut cbrs = Vec::new(); while let Some(c) = ci.next() { let mut block_shards = Vec::new(); for ibr in c.parts { block_shards.push(BlockShard{ file: path.clone(), offset: ibr.file_start, size: ibr.file_end - ibr.file_start, }); cbrs.push(ContentBlockEntry{ h: c.hash.clone(), o: ibr.block_start as u64, l: (ibr.file_end - ibr.file_start) as u64, }); } if blockstore.insert(c.hash, Block{ shards: block_shards, size: c.len, }) { new_blocks += 1; } total_blocks += 1; } println!("done serializing index to {} blocks ({} new)", total_blocks, new_blocks); Index{ v: 1, i: Vec::new(), c: Some(cbrs), } } pub fn load_index(&self, blockstore: &BlockStore) -> Index { let it = self.c.as_ref().unwrap().iter().map(|c| { let block = blockstore.get(&c.h).expect("block not found"); let mut re = block.chain(); re.seek(SeekFrom::Current(c.o as i64)).unwrap(); Take::limit(re, c.l as usize) }); let mut f = Chain::new(Box::new(it)); Index::deserialize(&mut ::rmps::Deserializer::new(&mut f)).unwrap() } pub fn save_to_file(&mut self, path: &Path) { let mut f = File::create(path).unwrap(); self.serialize(&mut ::rmps::Serializer::new(&mut f)).unwrap(); } pub fn load_from_file(path: &Path) -> Index { let mut f = File::open(path).unwrap(); Index::deserialize(&mut ::rmps::Deserializer::new(&mut f)).unwrap() } }
fn print_progress_bar(bar: &mut ProgressBar<Stdout>, path: &OsString){ let s = path.to_str().unwrap(); if s.len() > 50 { bar.message(&format!("indexing ..{:48} ", &s[s.len()-48..])); } else { bar.message(&format!("indexing {:50} ", &s)); } }
function_block-full_function
[ { "content": "pub fn new(path: String) -> BlockStore {\n\n let mut bs = BlockStore{\n\n path: path,\n\n blocks: HashMap::new(),\n\n };\n\n bs.load();\n\n bs\n\n}\n\n\n\n\n\nimpl BlockStore {\n\n pub fn get<'a>(&'a self, hash: &Vec<u8>) -> Option<&'a Block> {\n\n self.blocks.get(hash)\n\n }\n\n pub fn insert(&mut self, hash: Vec<u8>, block: Block) -> bool {\n\n //sanity check on hash\n\n #[cfg(debug_assertions)]\n\n {\n\n let mut br = BufReader::new(block.chain());\n\n let hs = Sha256::digest_reader(&mut br).unwrap().as_slice().to_vec();\n", "file_path": "src/blockstore.rs", "rank": 0, "score": 123156.47503877198 }, { "content": "pub fn from_host(host: ::std::ffi::OsString) -> Index{\n\n let mut index = Index{\n\n v: 1,\n\n i: Vec::new(),\n\n c: None,\n\n };\n\n\n\n\n\n index.i.push(Inode{\n\n inode: 0,\n\n parent: 0,\n\n size: 0,\n\n kind: 1,\n\n access: 0o775,\n\n\n\n dir: None,\n\n hash: None,\n\n content: None,\n\n\n\n host_path: host.clone(),\n", "file_path": "src/index.rs", "rank": 2, "score": 68112.79497851085 }, { "content": "#[test]\n\nfn block() {\n\n let bl = Block{\n\n shards: vec![\n\n BlockShard{file: String::from(\"test/readchain/a\"), offset: 0, size:4},\n\n BlockShard{file: String::from(\"test/readchain/b\"), offset: 0, size:4},\n\n ]\n\n };\n\n let mut content = String::new();\n\n bl.chain().read_to_string(&mut content).unwrap();\n\n assert_eq!(content, \"yayacool\");\n\n}\n", "file_path": "src/readchain.rs", "rank": 3, "score": 61602.902946133414 }, { "content": "fn collect_dir(path: ::std::ffi::OsString) -> ::std::io::Result<Vec<::std::fs::DirEntry>> {\n\n let entry_set = try!(::std::fs::read_dir(path));\n\n let mut entries = try!(entry_set.collect::<Result<Vec<_>, _>>());\n\n entries.sort_by(|a, b| a.path().cmp(&b.path()));\n\n Ok(entries)\n\n}\n\n\n\nimpl Index {\n\n fn add_from_dir_entry(&mut self, parent_inode: u64, path: ::std::fs::DirEntry) -> (String, ContentDirEntry) {\n\n let meta = path.metadata().unwrap();\n\n let i = (self.i.len()) as u64;\n\n\n\n let kind = match meta.is_dir() {\n\n true => 1,\n\n false => 2,\n\n };\n\n\n\n let entry = Inode{\n\n inode: i,\n\n parent: parent_inode,\n", "file_path": "src/index.rs", "rank": 4, "score": 55098.833385782666 }, { "content": "fn entry_to_file_attr(entry: &Inode) -> FileAttr{\n\n FileAttr {\n\n ino: entry.inode + 1,\n\n size: entry.size,\n\n blocks: entry.size * 512,\n\n atime: CREATE_TIME,\n\n mtime: CREATE_TIME,\n\n ctime: CREATE_TIME,\n\n crtime: CREATE_TIME,\n\n kind: match entry.kind {\n\n 1 => FileType::Directory,\n\n _ => FileType::RegularFile,\n\n },\n\n perm: entry.access,\n\n nlink: match entry.dir {\n\n Some(ref d) => d.len() + 1,\n\n _ => 1,\n\n } as u32,\n\n uid: 1000,\n\n gid: 1000,\n", "file_path": "src/fs.rs", "rank": 5, "score": 52745.80405200887 }, { "content": "#[test]\n\nfn some_files() {\n\n\n\n let files = vec![\n\n (\"test/readchain/a\", 0, 4),\n\n (\"test/readchain/b\", 0, 4),\n\n ].into_iter().map(|(f,o,l)| {\n\n let mut f = File::open(f).unwrap();\n\n f.seek(SeekFrom::Start(o)).unwrap();\n\n f.take(l as u64)\n\n });\n\n\n\n let mut content = String::new();\n\n Chain::new(Box::new(files)).read_to_string(&mut content).unwrap();\n\n assert_eq!(content, \"yayacool\");\n\n}\n\n\n", "file_path": "src/readchain.rs", "rank": 6, "score": 46129.84157959629 }, { "content": "fn ordered_map<S>(value: &Option<HashMap<String, ContentDirEntry>>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere S: Serializer\n\n{\n\n match *value {\n\n Some(ref val) => {\n\n let ordered: BTreeMap<_, _> = val.iter().collect();\n\n ordered.serialize(serializer)\n\n },\n\n None => {\n\n let fake : Option<HashMap<String, ContentDirEntry>> = None;\n\n fake.serialize(serializer)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone)]\n\npub struct ContentBlockEntry {\n\n pub h: Vec<u8>, //block hash\n\n pub o: u64, //offset into block\n\n pub l: u64, //length into block\n", "file_path": "src/index.rs", "rank": 7, "score": 36498.346631786 }, { "content": "#[cfg(test)]\n\nstruct Block {\n\n shards: Vec<BlockShard>,\n\n}\n\n\n\n#[cfg(test)]\n\nimpl<'a> Block {\n\n fn chain(&'a self) -> Chain<'a, Take<File>> {\n\n let it = self.shards.iter().map(|shard| {\n\n let mut f = File::open(&shard.file).unwrap();\n\n f.seek(SeekFrom::Current(shard.offset as i64)).unwrap();\n\n Take::limit(f, shard.size)\n\n });\n\n Chain::new(Box::new(it))\n\n }\n\n}\n\n\n", "file_path": "src/readchain.rs", "rank": 8, "score": 35551.378262431805 }, { "content": "#[test]\n\nfn nested() {\n\n\n\n let cl = |(f,o,l)| {\n\n let mut f = File::open(f).unwrap();\n\n f.seek(SeekFrom::Start(o)).unwrap();\n\n Take::limit(f, l)\n\n };\n\n\n\n let fa = vec![\n\n (\"test/readchain/a\", 0, 4),\n\n (\"test/readchain/a\", 0, 4),\n\n ].into_iter().map(&cl);\n\n\n\n let fb = vec![\n\n (\"test/readchain/b\", 0, 10),\n\n (\"test/readchain/b\", 0, 10),\n\n ].into_iter().map(&cl);\n\n\n\n let files = vec![\n\n (fa, 0, 4),\n", "file_path": "src/readchain.rs", "rank": 9, "score": 26051.524683701613 }, { "content": "fn main() {\n\n\n\n let matches = App::new(\"korhal-image\")\n\n .setting(AppSettings::ArgRequiredElseHelp)\n\n .setting(AppSettings::UnifiedHelpMessage)\n\n .setting(AppSettings::DisableHelpSubcommand)\n\n .version(\"1.0\")\n\n .about(\"content addressable image indexer\")\n\n .subcommand(\n\n SubCommand::with_name(\"rm\")\n\n .about(\"remove index from store\")\n\n .arg(Arg::with_name(\"name\")\n\n .required(true)\n\n .help(\"name of index\")\n\n .takes_value(true)\n\n .index(1)\n\n )\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"store\")\n", "file_path": "src/main.rs", "rank": 10, "score": 26051.524683701613 }, { "content": "#[test]\n\nfn overshoot() {\n\n\n\n let files = vec![\n\n (\"test/readchain/a\", 0, 4123123213),\n\n ].into_iter().map(|(f,o,l)| {\n\n let mut f = File::open(f).unwrap();\n\n f.seek(SeekFrom::Start(o)).unwrap();\n\n f.take(l)\n\n });\n\n\n\n let mut content = String::new();\n\n let mut rr = Chain::new(Box::new(files));\n\n let mut void = [0;2];\n\n rr.read(&mut void).unwrap();\n\n rr.read_to_string(&mut content).unwrap();\n\n assert_eq!(content, \"ya\");\n\n}\n\n\n", "file_path": "src/readchain.rs", "rank": 11, "score": 26051.524683701613 }, { "content": "#[test]\n\nfn overslimit() {\n\n\n\n let files = vec![\n\n (\"test/readchain/a\", 0, 3),\n\n ].into_iter().map(|(f,o,l)| {\n\n let mut f = File::open(f).unwrap();\n\n f.seek(SeekFrom::Start(o)).unwrap();\n\n f.take(l)\n\n });\n\n\n\n let mut content = String::new();\n\n let mut rr = Chain::new(Box::new(files));\n\n let mut void = [0;2];\n\n rr.read(&mut void).unwrap();\n\n rr.read_to_string(&mut content).unwrap();\n\n assert_eq!(content, \"y\");\n\n}\n\n\n", "file_path": "src/readchain.rs", "rank": 12, "score": 26051.524683701613 }, { "content": "use hex::{ToHex, FromHex};\n\nuse readchain::{Take,Chain};\n\nuse sha2::{Sha256, Digest};\n\nuse std::collections::HashMap;\n\nuse std::ffi::OsString;\n\nuse std::fs::{File, create_dir_all};\n\nuse std::io::{Read, Seek, BufReader, SeekFrom};\n\nuse std::path::Path;\n\n\n\npub struct BlockStore {\n\n pub path: String,\n\n pub blocks: HashMap<Vec<u8>, Block>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Block {\n\n pub shards: Vec<BlockShard>,\n\n pub size: usize,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct BlockShard {\n\n pub file: OsString,\n\n pub offset: usize,\n\n pub size: usize,\n\n}\n\n\n", "file_path": "src/blockstore.rs", "rank": 13, "score": 21410.219373990774 }, { "content": " let entry2 = entry2.unwrap();\n\n let hash = entry.file_name().to_string_lossy().into_owned() + &entry2.file_name().to_string_lossy().into_owned();\n\n let hash = Vec::<u8>::from_hex(hash).unwrap();\n\n let size = entry2.metadata().unwrap().len() as usize;\n\n\n\n self.insert(hash, Block {\n\n shards: vec![BlockShard{\n\n file: entry2.path().into_os_string(),\n\n offset: 0,\n\n size: size,\n\n }],\n\n size: size,\n\n });\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Block {\n\n pub fn chain<'a>(&'a self) -> Chain<'a, Take<File>> {\n\n let it = self.shards.iter().map(|shard| {\n\n let mut f = File::open(&shard.file).unwrap();\n\n f.seek(SeekFrom::Current(shard.offset as i64)).unwrap();\n\n Take::limit(f, shard.size)\n\n });\n\n Chain::new(Box::new(it))\n\n }\n\n}\n", "file_path": "src/blockstore.rs", "rank": 14, "score": 21407.017659803936 }, { "content": " if hs != hash {\n\n\n\n let mut br = BufReader::new(block.chain());\n\n let mut content = Vec::new();\n\n let rs = br.read_to_end(&mut content).unwrap();\n\n\n\n if rs != block.size {\n\n panic!(format!(\"BUG: block should be {} bytes but did read {}\", block.size, content.len()));\n\n }\n\n\n\n\n\n let hs2 = Sha256::digest(&content).as_slice().to_vec();\n\n if hs2 != hs2 {\n\n panic!(\"BUG: in chainreader: hash from read_to_end doesn't match digest_reader\");\n\n }\n\n\n\n panic!(format!(\"BUG: inserted block hash id doesn't match its content. expected {} got {}\", hash.to_hex(), hs.to_hex()));\n\n }\n\n }\n\n\n", "file_path": "src/blockstore.rs", "rank": 15, "score": 21400.66733467161 }, { "content": " //collision check\n\n if self.blocks.contains_key(&hash) {\n\n let mut ra = BufReader::new(block.chain());\n\n let mut rb = BufReader::new(self.blocks[&hash].chain());\n\n loop {\n\n let mut a: [u8;4096] = [0; 4096];\n\n let mut b: [u8;4096] = [0; 4096];\n\n ra.read(&mut a).unwrap();\n\n let rs = rb.read(&mut b).unwrap();\n\n\n\n if a[..] != b[..] {\n\n println!(\"!!!!!! HASH COLLISION !!!!!!!!!!!!!!!!!!!!!\");\n\n println!(\"this is extremly unlikely and might be a bug, save your block store for research.\");\n\n println!(\"{}\", hash.to_hex());\n\n panic!(\"hash collision\");\n\n }\n\n\n\n if rs < 1 {\n\n break;\n\n }\n", "file_path": "src/blockstore.rs", "rank": 16, "score": 21399.512348833374 }, { "content": " size: block.size,\n\n shards: vec![\n\n BlockShard {\n\n file: OsString::from(p.to_str().unwrap()),\n\n offset: 0,\n\n size: block.size,\n\n }\n\n ]\n\n });\n\n\n\n return true;\n\n }\n\n\n\n fn load(&mut self) {\n\n println!(\"loading content from {}\", self.path);\n\n let entry_set = ::std::fs::read_dir(&self.path).unwrap();\n\n for entry in entry_set {\n\n let entry = entry.unwrap();\n\n let entry_set2 = ::std::fs::read_dir(entry.path()).unwrap();\n\n for entry2 in entry_set2 {\n", "file_path": "src/blockstore.rs", "rank": 17, "score": 21399.14146094866 }, { "content": " }\n\n return false;\n\n }\n\n\n\n //TODO sometimes we want to store the original block rather than saving it to disk\n\n //the current interface will be weird later\n\n\n\n let hs = hash.to_hex();\n\n let mut p = Path::new(&self.path).join(&hs[0..2]);\n\n create_dir_all(&p).unwrap();\n\n p = p.join(&hs[2..]);\n\n if p.exists() {\n\n //TODO collision check?\n\n } else {\n\n //TODO: write to tempfile then move to avoid half written entries\n\n let mut f = File::create(&p).unwrap();\n\n ::std::io::copy(&mut block.chain(), &mut f).unwrap();\n\n }\n\n\n\n self.blocks.insert(hash, Block{\n", "file_path": "src/blockstore.rs", "rank": 18, "score": 21397.828105718792 }, { "content": "use std::io::{Read};\n\nuse sha2::{Sha256, Digest};\n\n\n\n/// takes an iterator over tuple (Read, I)\n\n/// and provides an iterator over Chunk{hash, parts<I>}\n\n///\n\npub struct Chunker<'a, R, C, I> where R : Read, C: ::rollsum::Engine {\n\n it: Box<Iterator<Item=(R, I)> + 'a>,\n\n current_read: Option<(R,I)>,\n\n current_parts: Vec<ChunkPart<I>>,\n\n current_block_len: usize,\n\n current_file_pos: usize,\n\n\n\n chunker: C,\n\n bits: u32,\n\n\n\n hasher: Sha256,\n\n\n\n buf: [u8;4096],\n\n buflen : usize,\n", "file_path": "src/chunker.rs", "rank": 19, "score": 21328.71244292324 }, { "content": " bufpos : usize,\n\n bufsincelastblock: usize,\n\n}\n\n\n\npub struct Chunk<I> {\n\n pub len: usize,\n\n pub hash: Vec<u8>,\n\n pub parts: Vec<ChunkPart<I>>\n\n}\n\n\n\npub struct ChunkPart<I> {\n\n pub i: I,\n\n pub file_start: usize, //where the file was when the block started\n\n pub file_end: usize, //where the file completed inside the block\n\n pub block_start: usize, //where the block was when the file started\n\n}\n\n\n\nimpl<'a, R, C, I> Chunker<'a, R, C, I> where I: Copy, R: Read, C: ::rollsum::Engine {\n\n pub fn new(it: Box<Iterator<Item=(R, I)> + 'a>, c: C, bits: u32) -> Chunker<'a, R, C, I>{\n\n Chunker{\n", "file_path": "src/chunker.rs", "rank": 20, "score": 21326.425190336402 }, { "content": " return self.fill();\n\n } else {\n\n self.buflen = some;\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\nimpl<'a, R, C, I> Iterator for Chunker<'a, R, C, I> where I: Copy, R: Read, C: ::rollsum::Engine<Digest = u32> {\n\n type Item = Chunk<I>;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let chunk_mask = (1 << self.bits) - 1;\n\n loop {\n\n if self.bufpos >= self.buflen {\n\n\n\n self.current_block_len += self.bufpos-self.bufsincelastblock;\n\n self.current_file_pos += self.bufpos-self.bufsincelastblock;\n", "file_path": "src/chunker.rs", "rank": 21, "score": 21325.44305658034 }, { "content": " }\n\n }\n\n debug_assert!(self.current_parts.len() > 0, format!(\n\n \"continuing to iterate when current_parts is empty. bufpos: {}, buflen: {}\", self.bufpos, self.buflen));\n\n\n\n self.chunker.roll_byte(self.buf[self.bufpos]);\n\n self.bufpos += 1;\n\n\n\n if self.chunker.digest() & chunk_mask == chunk_mask {\n\n\n\n self.current_block_len += self.bufpos-self.bufsincelastblock;\n\n self.current_file_pos += self.bufpos-self.bufsincelastblock;\n\n self.hasher.input(&self.buf[self.bufsincelastblock..self.bufpos]);\n\n\n\n let hash = self.hasher.result().as_slice().to_vec();\n\n self.hasher = Sha256::default();\n\n\n\n self.current_parts.last_mut().as_mut().unwrap().file_end = self.current_file_pos;\n\n let rr = Chunk{\n\n len: self.current_block_len,\n", "file_path": "src/chunker.rs", "rank": 22, "score": 21322.42067519501 }, { "content": " it: it,\n\n current_read: None,\n\n current_parts: Vec::new(),\n\n current_block_len: 0,\n\n current_file_pos: 0,\n\n\n\n chunker: c,\n\n bits: bits,\n\n\n\n hasher: Sha256::default(),\n\n\n\n buf: [0;4096],\n\n buflen: 0,\n\n bufpos: 0,\n\n bufsincelastblock: 0,\n\n }\n\n }\n\n\n\n fn fill(&mut self) -> bool {\n\n if let None = self.current_read {\n", "file_path": "src/chunker.rs", "rank": 23, "score": 21321.690094993977 }, { "content": " hash: hash,\n\n parts: ::std::mem::replace(&mut self.current_parts, Vec::new()),\n\n };\n\n self.current_parts.push(ChunkPart{\n\n i: self.current_read.as_ref().unwrap().1,\n\n file_start: self.current_file_pos,\n\n file_end: 0,\n\n block_start: 0,\n\n });\n\n\n\n self.current_block_len = 0;\n\n self.bufsincelastblock = self.bufpos;\n\n\n\n return Some(rr);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/chunker.rs", "rank": 24, "score": 21319.559016718136 }, { "content": " self.hasher.input(&self.buf[self.bufsincelastblock..self.bufpos]);\n\n\n\n self.bufsincelastblock = 0;\n\n self.bufpos = 0;\n\n self.buflen = 0;\n\n\n\n if !self.fill() {\n\n //rest\n\n if self.current_parts.len() > 0 {\n\n let hash = self.hasher.result().as_slice().to_vec();\n\n self.current_parts.last_mut().as_mut().unwrap().file_end = self.current_file_pos;\n\n return Some(Chunk{\n\n len: self.current_block_len,\n\n hash: hash,\n\n parts: ::std::mem::replace(&mut self.current_parts, Vec::new()),\n\n });\n\n } else {\n\n debug_assert!(self.bufsincelastblock == 0 && self.bufpos == 0, \"end of iterator with leftover bytes\");\n\n return None;\n\n }\n", "file_path": "src/chunker.rs", "rank": 25, "score": 21319.40277530815 }, { "content": " match self.it.next() {\n\n None => return false,\n\n Some(r) => {\n\n self.current_parts.push(ChunkPart{\n\n i: r.1,\n\n file_start: 0,\n\n file_end: 0,\n\n block_start: self.current_block_len,\n\n });\n\n self.current_file_pos = 0;\n\n self.current_read = Some(r);\n\n }\n\n }\n\n }\n\n match self.current_read.as_mut().unwrap().0.read(&mut self.buf) {\n\n Err(e) => panic!(e),\n\n Ok(some) => {\n\n if some < 1 {\n\n self.current_parts.last_mut().as_mut().unwrap().file_end = self.current_file_pos;\n\n self.current_read = None;\n", "file_path": "src/chunker.rs", "rank": 26, "score": 21319.068055998683 }, { "content": "use serde::{Serialize, Serializer};\n\nuse std::collections::{HashMap, BTreeMap};\n\nuse std::path::Path;\n\nuse std::fs::metadata;\n\n\n\n#[derive(Serialize, Deserialize, Clone)]\n\npub struct Inode {\n\n pub inode: u64,\n\n pub parent: u64,\n\n pub size: u64,\n\n pub kind: u16,\n\n pub access: u16,\n\n\n\n #[serde(serialize_with = \"ordered_map\")]\n\n pub dir: Option<HashMap<String, ContentDirEntry>>, //directory\n\n pub hash: Option<String>, //file hash\n\n pub content: Option<Vec<ContentBlockEntry>>, //content blocks\n\n\n\n #[serde(skip)]\n\n pub host_path: ::std::ffi::OsString, // full path. will not be stored\n\n}\n\n\n", "file_path": "src/index.rs", "rank": 27, "score": 21167.321446960064 }, { "content": "}\n\n\n\n#[derive(Serialize, Deserialize, Clone)]\n\npub struct ContentDirEntry {\n\n pub i: u64, //inode\n\n pub k: u16, //kind\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Index {\n\n pub v: u16, //version\n\n pub i: Vec<Inode>, //inodes. i or c cannot exist at the same time\n\n pub c: Option<Vec<ContentBlockEntry>>, //content blocks that compose another index\n\n}\n\n\n", "file_path": "src/index.rs", "rank": 28, "score": 21163.337813872382 }, { "content": " });\n\n\n\n let meta = metadata(host.clone()).unwrap();\n\n if meta.is_file() {\n\n let mut contentdirmap : HashMap<String, ContentDirEntry> = HashMap::new();\n\n contentdirmap.insert(Path::new(host.as_os_str()).file_name().unwrap().to_string_lossy().into_owned(),\n\n ContentDirEntry {\n\n i: 1,\n\n k: 2,\n\n });\n\n index.i[0].dir = Some(contentdirmap);\n\n index.i.push(Inode{\n\n inode: 1,\n\n parent: 0,\n\n size: meta.len(),\n\n kind: 2,\n\n access: 0o775,\n\n\n\n dir: None,\n\n hash: None,\n", "file_path": "src/index.rs", "rank": 29, "score": 21162.157927549393 }, { "content": " k: kind,\n\n },\n\n )\n\n }\n\n\n\n fn descend(&mut self, parent_inode: u64, path: ::std::ffi::OsString) {\n\n\n\n let dirs = collect_dir(path).unwrap();\n\n\n\n let inode_start = self.i.len() as u64;\n\n let inode_len = dirs.len() as u64;\n\n\n\n // 1 iteration to create all the inodes\n\n let mut contentdirmap : HashMap<String, ContentDirEntry> = HashMap::new();\n\n for path in dirs {\n\n let (name, cde) = self.add_from_dir_entry(parent_inode, path);\n\n contentdirmap.insert(name, cde);\n\n }\n\n\n\n // insert the dirmap into the current parent node\n", "file_path": "src/index.rs", "rank": 30, "score": 21159.245308129866 }, { "content": " self.i[parent_inode as usize].dir = Some(contentdirmap);\n\n\n\n // 2. iteration to descend into the subdirs\n\n for x in inode_start..(inode_start+inode_len) {\n\n let (kind, inode, path) = {\n\n let ref e = self.i[x as usize];\n\n (e.kind, e.inode, e.host_path.clone())\n\n };\n\n if kind == 1 {\n\n self.descend(inode, path);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/index.rs", "rank": 31, "score": 21157.43407575644 }, { "content": " size: meta.len(),\n\n kind: kind,\n\n access: 0o775,\n\n\n\n dir: None,\n\n hash: None,\n\n content: Some(Vec::new()),\n\n\n\n host_path: path.path().into_os_string(),\n\n };\n\n\n\n\n\n\n\n\n\n self.i.push(entry);\n\n\n\n (\n\n path.file_name().to_string_lossy().into_owned(),\n\n ContentDirEntry {\n\n i: i,\n", "file_path": "src/index.rs", "rank": 32, "score": 21157.31637062948 }, { "content": " content: None,\n\n\n\n host_path: host.clone(),\n\n });\n\n } else {\n\n index.descend(0, host);\n\n }\n\n index\n\n}\n\n\n", "file_path": "src/index.rs", "rank": 33, "score": 21154.849174572475 }, { "content": " rdev: 0,\n\n flags: 0,\n\n }\n\n}\n\n\n\n\n\npub struct Fuse<'a> {\n\n index: &'a Index,\n\n blockstore: &'a BlockStore,\n\n open_files: HashMap<u64, Box<Read + 'a>>,\n\n}\n\n\n\nimpl<'a> Fuse<'a> {\n\n pub fn new(index: &'a Index, blockstore: &'a BlockStore) -> Fuse<'a> {\n\n Fuse{\n\n index: index,\n\n blockstore: blockstore,\n\n open_files: HashMap::new(),\n\n }\n\n }\n", "file_path": "src/fs.rs", "rank": 46, "score": 19.92726421338009 }, { "content": " reply.data(&buf[..r]);\n\n }\n\n\n\n fn readdir (&mut self, _req: &Request, ino: u64, _fh: u64, offset: u64, mut reply: ReplyDirectory) {\n\n println!(\"readdir {:?}\", ino);\n\n if offset != 0 {\n\n reply.error(ENOENT);\n\n return;\n\n }\n\n match self.index.i.get((ino - 1) as usize) {\n\n None => reply.error(ENOENT),\n\n Some(entry) => {\n\n reply.add(1, 0, FileType::Directory, \".\"); //FIXME\n\n reply.add(1, 1, FileType::Directory, \"..\");\n\n\n\n let mut offset = 2;\n\n\n\n match entry.dir {\n\n None => reply.ok(),\n\n Some(ref dir) => {\n", "file_path": "src/fs.rs", "rank": 47, "score": 18.8306863170516 }, { "content": " fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n\n // Don't call into inner reader at all at EOF because it may still block\n\n if self.limit == 0 {\n\n return Ok(0);\n\n }\n\n\n\n let max = cmp::min(buf.len(), self.limit) as usize;\n\n let n = self.inner.read(&mut buf[..max])?;\n\n self.limit -= n;\n\n Ok(n)\n\n }\n\n}\n\n\n\nimpl<R> Seek for Take<R> where R: Read+Seek {\n\n fn seek(&mut self, pos: SeekFrom) -> Result<u64> {\n\n match pos {\n\n SeekFrom::End(_) | SeekFrom::Start(_) => {\n\n return Err(Error::new(ErrorKind::NotFound, \"cannot seek end/start on Take\"));\n\n },\n\n SeekFrom::Current(seek) => {\n", "file_path": "src/readchain.rs", "rank": 48, "score": 18.571627777291884 }, { "content": " for (s,d) in dir {\n\n reply.add(d.i, offset, match d.k {\n\n 1 => FileType::Directory,\n\n _ => FileType::RegularFile,\n\n }, s);\n\n offset += 1;\n\n }\n\n reply.ok();\n\n }\n\n };\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Inode {\n\n pub fn chain<'a>(&'a self, blockstore: &'a BlockStore) -> Chain<'a, Take<Chain<'a, Take<File>>>> {\n\n let c = self.content.as_ref().unwrap();\n\n let it = c.iter().map(move |c| {\n\n println!(\"reading from block {:?} offset {} limit {}\", c.h, c.o, c.l);\n", "file_path": "src/fs.rs", "rank": 49, "score": 18.356693581793223 }, { "content": " if didread >= buf.len(){\n\n return Ok(didread);\n\n }\n\n self.cur = None;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, R> Seek for Chain<'a, R> where R : Read + Seek {\n\n fn seek(&mut self, pos: SeekFrom) -> Result<u64> {\n\n match pos {\n\n SeekFrom::End(_) | SeekFrom::Start(_) => {\n\n return Err(Error::new(ErrorKind::NotFound, \"cannot seek on iterator\"));\n\n },\n\n SeekFrom::Current(start) => {\n\n if start < 0 {\n\n return Err(Error::new(ErrorKind::NotFound, \"cannot seek backwards on iterator\"));\n\n }\n", "file_path": "src/readchain.rs", "rank": 50, "score": 17.968575766273094 }, { "content": " self.open_files.insert(fh, Box::new(entry.chain(self.blockstore)));\n\n reply.opened(fh, 0);\n\n },\n\n };\n\n }\n\n fn release(&mut self, _req: &Request, ino: u64, fh: u64, _flags: u32, \n\n _lock_owner: u64, _flush: bool, reply: ReplyEmpty) {\n\n println!(\"close {:?}\", ino);\n\n self.open_files.remove(&fh);\n\n reply.ok();\n\n }\n\n\n\n fn read (&mut self, _req: &Request, ino: u64, fh: u64, offset: u64, size: u32, reply: ReplyData) {\n\n //TODO: i dont know if offset can be different than the last returned read size\n\n println!(\"read {:?} {} {}\", ino, offset, size);\n\n\n\n let file = self.open_files.get_mut(&fh).unwrap();\n\n\n\n let mut buf = vec![0; size as usize];\n\n let r = file.read(&mut buf).unwrap();\n", "file_path": "src/fs.rs", "rank": 51, "score": 16.702829833004618 }, { "content": "use blockstore::{BlockStore};\n\nuse fuse::*;\n\nuse index::{Index, Inode};\n\nuse libc::ENOENT;\n\nuse readchain::{Take,Chain};\n\nuse std::collections::HashMap;\n\nuse std::ffi::OsStr;\n\nuse std::fs::File;\n\nuse std::io::{Read, Seek, SeekFrom};\n\nuse time::Timespec;\n\nuse std::boxed::Box;\n\n\n\nconst TTL: Timespec = Timespec { sec: 1, nsec: 0 }; // 1 second\n\n\n\nconst CREATE_TIME: Timespec = Timespec { sec: 1381237736, nsec: 0 }; // 2013-10-08 08:56\n\n\n", "file_path": "src/fs.rs", "rank": 52, "score": 16.652966169775873 }, { "content": " (fb, 4, 6),\n\n ].into_iter().map(|(f,o,l)| {\n\n let mut f = Chain::new(Box::new(f));\n\n f.seek(SeekFrom::Current(o)).unwrap();\n\n f.take(l)\n\n });\n\n let mut content = String::new();\n\n Chain::new(Box::new(files)).read_to_string(&mut content).unwrap();\n\n assert_eq!(content, \"yaya stuff\");\n\n}\n\n\n\n\n\n#[cfg(test)]\n\npub struct BlockShard {\n\n file: String,\n\n offset: usize,\n\n size: usize,\n\n}\n\n\n", "file_path": "src/readchain.rs", "rank": 53, "score": 16.37272989519991 }, { "content": "use std::cmp;\n\nuse std::io::{Result, Read, Seek, SeekFrom, Error, ErrorKind};\n\nuse std::iter::Iterator;\n\n\n\n/// like std::io::Take but with Seek\n\npub struct Take<R> where R: Read {\n\n inner: R,\n\n limit: usize,\n\n}\n\n\n\nimpl<R> Take<R> where R: Read{\n\n pub fn limit(r: R, limit: usize) -> Take<R> {\n\n Take{\n\n inner: r,\n\n limit: limit,\n\n }\n\n }\n\n}\n\n\n\nimpl<R> Read for Take<R> where R: Read{\n", "file_path": "src/readchain.rs", "rank": 54, "score": 15.970013578244679 }, { "content": " println!(\"getattr {:?}\", ino);\n\n\n\n match self.index.i.get((ino - 1) as usize) {\n\n None => reply.error(ENOENT),\n\n Some(entry) => {\n\n reply.attr(&TTL, &entry_to_file_attr(entry));\n\n }\n\n }\n\n }\n\n\n\n\n\n fn open(&mut self, _req: &Request, ino: u64, _flags: u32, reply: ReplyOpen) {\n\n println!(\"open {:?}\", ino);\n\n match self.index.i.get((ino - 1) as usize) {\n\n None => {reply.error(ENOENT);},\n\n Some(entry) => {\n\n let mut fh = entry.inode;\n\n while self.open_files.contains_key(&fh) {\n\n fh += 1;\n\n }\n", "file_path": "src/fs.rs", "rank": 55, "score": 15.78060330970517 }, { "content": "}\n\n\n\nimpl<'a> Filesystem for Fuse<'a> {\n\n fn lookup (&mut self, _req: &Request, parent: u64, name: &OsStr, reply: ReplyEntry) {\n\n\n\n let mb = self.index.i.get((parent - 1) as usize)\n\n .and_then(|entry| entry.dir.as_ref())\n\n .and_then(|d| d.get(&name.to_string_lossy().into_owned()))\n\n .and_then(|e| self.index.i.get(e.i as usize));\n\n\n\n match mb {\n\n None => reply.error(ENOENT),\n\n Some(entry) => {\n\n let fa = &entry_to_file_attr(entry);\n\n reply.entry(&TTL, fa, 0)\n\n }\n\n }\n\n }\n\n\n\n fn getattr (&mut self, _req: &Request, ino: u64, reply: ReplyAttr) {\n", "file_path": "src/fs.rs", "rank": 56, "score": 15.596219974918952 }, { "content": "\n\n let block = blockstore.get(&c.h).expect(\"block not found\");\n\n let mut re = block.chain();\n\n re.seek(SeekFrom::Current(c.o as i64)).unwrap();\n\n Take::limit(re, c.l as usize)\n\n\n\n });\n\n Chain::new(Box::new(it))\n\n }\n\n}\n", "file_path": "src/fs.rs", "rank": 57, "score": 14.920185639909825 }, { "content": " }\n\n}\n\n\n\nimpl<'a, R> Read for Chain<'a, R> where R : Read {\n\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n\n let mut didread = 0;\n\n loop {\n\n if let None = self.cur {\n\n match self.it.next() {\n\n None => return Ok(didread),\n\n Some(r) => {\n\n self.cur = Some(r);\n\n }\n\n }\n\n }\n\n let n = buf.len() - didread;\n\n match self.cur.as_mut().unwrap().read(&mut buf[didread..(didread+n)]) {\n\n Err(e) => return Err(e),\n\n Ok(rs) => {\n\n didread += rs;\n", "file_path": "src/readchain.rs", "rank": 58, "score": 14.881954239676219 }, { "content": " self.inner.seek(SeekFrom::Current(cmp::min(seek, self.limit as i64)))\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\n/// like std::io::Chain but on an Iterator which may contain a lambda and with Seek\n\npub struct Chain<'a, R> where R : Read {\n\n it: Box<Iterator<Item=R> + 'a>,\n\n cur: Option<R>,\n\n}\n\n\n\n\n\nimpl<'a, R> Chain<'a, R> where R : Read {\n\n pub fn new(it: Box<Iterator<Item=R> + 'a>) -> Chain<'a, R>{\n\n Chain{\n\n it: it,\n\n cur: None,\n\n }\n", "file_path": "src/readchain.rs", "rank": 59, "score": 13.7183433683534 }, { "content": " hi = hi.store_index(&mut bs);\n\n if hi.c.as_ref().unwrap().len() == 1 {\n\n break;\n\n }\n\n }\n\n\n\n hi.save_to_file(&store_path.join(name));\n\n println!(\"input stored into index {} with name {:?}\",\n\n hi.c.as_ref().unwrap().first().unwrap().h.to_hex(),\n\n name\n\n )\n\n },\n\n (\"mount\", Some(submatches)) =>{\n\n let name = submatches.value_of(\"name\").unwrap();\n\n let target_path = submatches.value_of(\"target\").unwrap();\n\n let store_path = Path::new(&content_store_path);\n\n let bsp = store_path.join(\"content\");\n\n\n\n let mut hi = index::Index::load_from_file(&store_path.join(name));\n\n let bs = blockstore::new(bsp.to_str().unwrap().to_owned());\n", "file_path": "src/main.rs", "rank": 60, "score": 13.600947858908299 }, { "content": " println!(\"{}: {}\", key, e);\n\n ::std::process::exit(1);\n\n },\n\n };\n\n\n\n match matches.subcommand() {\n\n (\"store\", Some(submatches)) =>{\n\n\n\n let root_path = submatches.value_of(\"root\").unwrap();\n\n let name = submatches.value_of(\"name\").unwrap();\n\n let store_path = Path::new(&content_store_path);\n\n let bsp = store_path.join(\"content\");\n\n\n\n create_dir_all(&bsp);\n\n let mut bs = blockstore::new(bsp.to_str().unwrap().to_owned());\n\n\n\n let mut hi = index::from_host(OsString::from(root_path));\n\n hi.store_inodes(&mut bs);\n\n\n\n loop {\n", "file_path": "src/main.rs", "rank": 61, "score": 13.271834057345488 }, { "content": " let mut seeked = 0 as i64;\n\n loop {\n\n if let None = self.cur {\n\n match self.it.next() {\n\n None => return Ok(seeked as u64),\n\n Some(r) => {\n\n self.cur = Some(r);\n\n }\n\n }\n\n }\n\n\n\n let n = start - seeked;\n\n match self.cur.as_mut().unwrap().seek(SeekFrom::Current(n)) {\n\n Err(e) => return Err(e),\n\n Ok(rs) => {\n\n seeked += rs as i64;\n\n if seeked >= start {\n\n return Ok(seeked as u64);\n\n }\n\n self.cur = None;\n", "file_path": "src/readchain.rs", "rank": 62, "score": 12.53189381337307 }, { "content": "power overwhelming\n\n===================\n\n\n\n\n\nA resarch stage project by Korhal to enable content addressable storage of system images and embedded applications.\n\n\n\n\n\nThis is nowhere usable yet, so here's just a quick demo:\n\n\n\n```\n\n$ cargo build --release\n\n$ export ARCHON_STORE=/tmp/store\n\n$ ./target/release/archon store . myspace\n\nloading content from /tmp/store/content\n\ndone serializing 19921 inodes to 123452 blocks (48987 new)\n\ndone serializing index to 2632 blocks (2231 new)\n\ndone serializing index to 35 blocks (35 new)\n\ndone serializing index to 1 blocks (1 new)\n\ninput stored into index .. with name \"myspace\"\n\n$\n\n$ mkdir /tmp/mnt\n\n$ ./target/release/archon mount myspace /tmp/bla\n\nloading content from /tmp/store/content\n\nmounting index \"myspace\" with 19922 inodes to /tmp/bla\n\n\n\n$ ls /tmp/bla\n\n$ Cargo.toml src target test ...\n\n\n\n```\n\n\n\n\n", "file_path": "README.md", "rank": 63, "score": 9.95894530731 }, { "content": "mod fs;\n\nmod index;\n\nmod readchain;\n\nmod serializer;\n\n\n\nuse clap::{Arg, App, SubCommand, AppSettings};\n\nuse hex::ToHex;\n\nuse std::env;\n\nuse std::ffi::OsStr;\n\nuse std::ffi::OsString;\n\nuse std::fs::{create_dir_all};\n\nuse std::path::Path;\n\nuse url::{Url};\n\nuse elfkit::types;\n\n\n", "file_path": "src/main.rs", "rank": 64, "score": 9.67511287133374 }, { "content": " while let Some(_) = hi.c.as_ref() {\n\n hi = hi.load_index(&bs);\n\n }\n\n\n\n println!(\"mounting index {:?} with {} inodes to {}\", name, hi.i.len(), target_path);\n\n\n\n let fs = fs::Fuse::new(&hi, &bs);\n\n let fuse_args: Vec<&OsStr> = vec![&OsStr::new(\"-o\"), &OsStr::new(\"auto_unmount\")];\n\n fuse::mount(fs, &target_path, &fuse_args).unwrap();\n\n }\n\n (\"rm\", Some(submatches)) =>{\n\n let name = submatches.value_of(\"name\").unwrap();\n\n\n\n },\n\n _ => unreachable!()\n\n }\n\n\n\n\n\n\n\n //let j = serde_json::to_string(&hi).unwrap();\n", "file_path": "src/main.rs", "rank": 65, "score": 9.52831981396779 }, { "content": " .takes_value(true)\n\n .index(1)\n\n )\n\n .arg(Arg::with_name(\"target\")\n\n .required(true)\n\n .help(\"path where to mount image\")\n\n .takes_value(true)\n\n .index(2)\n\n )\n\n )\n\n .get_matches();\n\n\n\n\n\n let key = \"ARCHON_STORE\";\n\n let content_store_path = match env::var(key) {\n\n Ok(val) => {\n\n println!(\"{}: {:?}\", key, val);\n\n val\n\n },\n\n Err(e) => {\n", "file_path": "src/main.rs", "rank": 66, "score": 8.25415433702046 }, { "content": "extern crate clap;\n\nextern crate digest;\n\nextern crate fuse;\n\nextern crate generic_array;\n\nextern crate hex;\n\nextern crate libc;\n\nextern crate pbr;\n\nextern crate rmp_serde as rmps;\n\nextern crate rollsum;\n\nextern crate serde;\n\n#[macro_use] extern crate serde_derive;\n\nextern crate sha2;\n\nextern crate tempfile;\n\nextern crate time;\n\nextern crate url;\n\n#[macro_use] extern crate elfkit;\n\nextern crate byteorder;\n\n\n\nmod blockstore;\n\nmod chunker;\n", "file_path": "src/main.rs", "rank": 67, "score": 6.76766131015991 }, { "content": " }\n\n }\n\n }\n\n },\n\n };\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nuse std::fs::File;\n\n\n\n#[test]\n", "file_path": "src/readchain.rs", "rank": 68, "score": 5.050070260093558 }, { "content": " .about(\"write image into content store\")\n\n .arg(Arg::with_name(\"root\")\n\n .required(true)\n\n .help(\"build image from this path\")\n\n .takes_value(true)\n\n .index(1)\n\n )\n\n .arg(Arg::with_name(\"name\")\n\n .required(true)\n\n .help(\"name of index\")\n\n .takes_value(true)\n\n .index(2)\n\n )\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"mount\")\n\n .about(\"fuse mount image at a given destination\")\n\n .arg(Arg::with_name(\"name\")\n\n .required(true)\n\n .help(\"name of index\")\n", "file_path": "src/main.rs", "rank": 69, "score": 3.8758087261107623 }, { "content": " //println!(\"{}\", j);\n\n\n\n\n\n return;\n\n\n\n\n\n}\n\n\n\n\n", "file_path": "src/main.rs", "rank": 70, "score": 2.952676866204677 } ]
Rust
crates/category/tests/product_bug.rs
Nertsal/categories
3fd0a8b4f5c9a3df78c35126bb4af3a9ed10bae3
use category::{axioms, Bindings}; use category::{prelude::*, Equality}; use std::fmt::Debug; #[test] fn test_bug() { let rule_product = axioms::rule_product::<&str>().unwrap(); let mut category = Category::new(); let object_a = category.new_object(Object { tags: vec![], inner: (), }); let result = category.apply_rule(&rule_product, Bindings::new(), |_| (), |_, _| (), |_| ()); assert!(result.1); print_category(&category); assert_eq!(2, category.objects.len()); assert_eq!(6, category.morphisms.len()); assert_eq!(6, category.equalities.len()); for action in result.0 { category.action_do(action); } assert_eq!(1, category.objects.len()); assert_eq!(0, category.morphisms.len()); assert_eq!(0, category.equalities.len()); let object_1 = category.new_object(Object { tags: vec![ObjectTag::Terminal], inner: (), }); let object_ax1 = category.new_object(Object { tags: vec![ObjectTag::Product(object_a, object_1)], inner: (), }); let morphism_id_a = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_a, to: object_a, }, tags: vec![MorphismTag::Identity(object_a)], inner: (), }) .unwrap(); let _morphism_id_ax1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_ax1, to: object_ax1, }, tags: vec![MorphismTag::Identity(object_ax1)], inner: (), }) .unwrap(); let morphism_a_1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_a, to: object_1, }, tags: vec![MorphismTag::Unique], inner: (), }) .unwrap(); let morphism_ax1_1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_ax1, to: object_1, }, tags: vec![MorphismTag::Unique, MorphismTag::ProductP2], inner: (), }) .unwrap(); let morphism_a_ax1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_a, to: object_ax1, }, tags: vec![], inner: (), }) .unwrap(); let morphism_ax1_a = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_ax1, to: object_a, }, tags: vec![MorphismTag::ProductP1], inner: (), }) .unwrap(); let morphism_ax1_ax1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_ax1, to: object_ax1, }, tags: vec![MorphismTag::Composition { first: morphism_ax1_a, second: morphism_a_ax1, }], inner: (), }) .unwrap(); let _morphism_ax1_ax1_1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_ax1, to: object_1, }, tags: vec![MorphismTag::Composition { first: morphism_ax1_ax1, second: morphism_ax1_1, }], inner: (), }) .unwrap(); category.equalities.new_equality( Equality::new( vec![morphism_ax1_a, morphism_a_ax1, morphism_ax1_1], vec![morphism_ax1_1], ) .unwrap(), (), ); category.equalities.new_equality( Equality::new( vec![morphism_ax1_a], vec![morphism_ax1_a, morphism_a_ax1, morphism_ax1_a], ) .unwrap(), (), ); category.equalities.new_equality( Equality::new(vec![morphism_id_a], vec![morphism_a_ax1, morphism_ax1_a]).unwrap(), (), ); category.equalities.new_equality( Equality::new(vec![morphism_a_1], vec![morphism_a_ax1, morphism_ax1_1]).unwrap(), (), ); print_category(&category); assert_eq!(3, category.objects.len()); assert_eq!(8, category.morphisms.len()); assert_eq!(4, category.equalities.len()); let bindings = Bindings::from_objects([("A", object_a), ("B", object_1)]); let result = category.apply_rule(&rule_product, bindings, |_| (), |_, _| (), |_| ()); assert!(result.1); print_category(&category); assert_eq!(3, category.objects.len()); assert_eq!(8, category.morphisms.len()); assert_eq!(5, category.equalities.len()); } fn print_category<O: Debug, M: Debug, E: Debug>(category: &Category<O, M, E>) { println!("\n----- Category -----"); println!("Objects:"); for (id, object) in category.objects.iter() { println!("{:4} - {:?}", id.raw(), object) } println!("Morphisms:"); for (id, morphism) in category.morphisms.iter() { println!("{:4} - {:?}", id.raw(), morphism) } println!("Equalities:"); for (equality, inner) in category.equalities.iter() { println!( " {:?} = {:?}: {inner:?}", equality.left(), equality.right() ); } println!(""); }
use category::{axioms, Bindings}; use category::{prelude::*, Equality}; use std::fmt::Debug; #[test] fn test_bug() { let rule_product = axioms::rule_product::<&str>().unwrap(); let mut category = Category::new(); let object_a = category.new_object(Object { tags: vec![], inner: (), }); let result = category.apply_rule(&rule_product, Bindings::new(), |_| (), |_, _| (), |_| ()); assert!(result.1); print_category(&category); assert_eq!(2, category.objects.len()); assert_eq!(6, category.morphisms.len()); assert_eq!(6, category.equalities.len()); for action in result.0 { category.action_do(action); } assert_eq!(1, category.objects.len()); assert_eq!(0, category.morphisms.len()); assert_eq!(0, category.equalities.len()); let object_1 = category.new_object(Object { tags: vec![ObjectTag::Terminal], inner: (), }); let object_ax1 = category.new_object(Object { tags: vec![ObjectTag::Product(object_a, object_1)], inner: (), }); let morphism_id_a = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_a, to: object_a, }, tags: vec![MorphismTag::Identity(object_a)], inner: (), }) .unwrap(); let _morphism_id_ax1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_ax1, to: object_ax1, }, tags: vec![MorphismTag::Identity(object_ax1)], inner: (), }) .unwrap(); let morphism_a_1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_a, to: object_1, }, tags: vec![MorphismTag::Unique], inner: (), }) .unwrap(); let morphism_ax1_1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_ax1, to: object_1, }, tags: vec![MorphismTag::Unique, MorphismTag::ProductP2], inner: (), }) .unwrap(); let morphism_a_ax1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_a, to: object_ax1, }, tags: vec![], inner: (), }) .unwrap(); let morphism_ax1_a = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_ax1, to: object_a, }, tags: vec![MorphismTag::ProductP1], inner: (), }) .unwrap(); let morphism_ax1_ax1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_ax1, to: object_ax1, }, tags: vec![MorphismTag::Composition { first: morphism_ax1_a, second: morphism_a_ax1, }], inner: (), }) .unwrap(); let _morphism_ax1_ax1_1 = category .new_morphism(Morphism { connection: MorphismConnection::Regular { from: object_ax1, to: object_1, }, tags: vec![MorphismTag::Composition { first: morphism_ax1_ax1, second: morphism_ax1_1, }], inner: (), }) .unwrap(); category.equalities.new_equality( Equality::new( vec![morphism_ax1_a, morphism_a_ax1, morphism_ax1_1], vec![morphism_ax1_1], ) .unwrap(), (), ); category.equalities.new_equality( Equality::new( vec![morphism_ax1_a], vec![morphism_ax1_a, morphism_a_ax1, morphism_ax1_a], ) .unwrap(), (), ); category.equalities.new_equality( Equality::new(vec![morphism_id_a], vec![morphism_a_ax1, morphism_ax1_a]).unwrap(), (), ); category.equalities.new_equality( Equality::new(vec![morphism_a_1], vec![morphism_a_ax1, morphism_ax1_1]).unwrap(), (), ); print_category(&category); assert_eq!(3, category.objects.len()); assert_eq!(8, category.morphisms.len()); assert_eq!(4, category.equalities.len()); let bindings = Bindings::from_objects([("A", object_a), ("B", object_1)]); let result = category.apply_rule(&rule_product, bindings, |_| (), |_, _| (), |_| ()); assert!(result.1); print_category(&category); assert_eq!(3, category.objects.len()); assert_eq!(8, category.morphisms.len()); assert_eq!(5, category.equalities.len()); } fn print_category<O: Debug, M: Debug, E: Debug>(category: &Category<O, M, E>) { println!("\n----- Category -----"); println!("Objects:"); for (id, object) in category.objects.iter() { println!("{:4} - {:?}", id.raw(), object) } println!("Morphisms:");
for (id, morphism) in category.morphisms.iter() { println!("{:4} - {:?}", id.raw(), morphism) } println!("Equalities:"); for (equality, inner) in category.equalities.iter() { println!( " {:?} = {:?}: {inner:?}", equality.left(), equality.right() ); } println!(""); }
function_block-function_prefix_line
[ { "content": "fn print_category<O: Debug, M: Debug, E: Debug>(category: &Category<O, M, E>) {\n\n println!(\"\\n----- Category -----\");\n\n println!(\"Objects:\");\n\n for (id, object) in category.objects.iter() {\n\n println!(\"{:4} - {:?}\", id.raw(), object)\n\n }\n\n println!(\"Morphisms:\");\n\n for (id, morphism) in category.morphisms.iter() {\n\n println!(\"{:4} - {:?}\", id.raw(), morphism)\n\n }\n\n println!(\"Equalities:\");\n\n for (equality, inner) in category.equalities.iter() {\n\n println!(\n\n \" {:?} = {:?}: {inner:?}\",\n\n equality.left(),\n\n equality.right()\n\n );\n\n }\n\n println!(\"\");\n\n}\n", "file_path": "crates/category/tests/product.rs", "rank": 0, "score": 230603.960146163 }, { "content": "fn print_category<O: Debug, M: Debug, E: Debug>(category: &Category<O, M, E>) {\n\n println!(\"\\n----- Category -----\");\n\n println!(\"Objects:\");\n\n for (id, object) in category.objects.iter() {\n\n println!(\"{:4} - {:?}\", id.raw(), object)\n\n }\n\n println!(\"Morphisms:\");\n\n for (id, morphism) in category.morphisms.iter() {\n\n println!(\"{:4} - {:?}\", id.raw(), morphism)\n\n }\n\n println!(\"Equalities:\");\n\n for (equality, inner) in category.equalities.iter() {\n\n println!(\n\n \" {:?} = {:?}: {inner:?}\",\n\n equality.left(),\n\n equality.right()\n\n );\n\n }\n\n println!(\"\");\n\n}\n", "file_path": "crates/category/tests/substitution.rs", "rank": 1, "score": 230603.960146163 }, { "content": "fn print_category<O: Debug, M: Debug, E: Debug>(category: &Category<O, M, E>) {\n\n println!(\"\\n----- Category -----\");\n\n println!(\"Objects:\");\n\n for (id, object) in category.objects.iter() {\n\n println!(\"{:4} - {:?}\", id.raw(), object)\n\n }\n\n println!(\"Morphisms:\");\n\n for (id, morphism) in category.morphisms.iter() {\n\n println!(\"{:4} - {:?}\", id.raw(), morphism)\n\n }\n\n println!(\"Equalities:\");\n\n for (equality, inner) in category.equalities.iter() {\n\n println!(\n\n \" {:?} = {:?}: {inner:?}\",\n\n equality.left(),\n\n equality.right()\n\n );\n\n }\n\n println!(\"\");\n\n}\n", "file_path": "crates/category/tests/substitution_bug.rs", "rank": 3, "score": 228087.66758552144 }, { "content": "fn print_category<O: Debug, M: Debug, E: Debug>(category: &Category<O, M, E>) {\n\n println!(\"\\n----- Category -----\");\n\n println!(\"Objects:\");\n\n for (id, object) in category.objects.iter() {\n\n println!(\"{:4} - {:?}\", id.raw(), object)\n\n }\n\n println!(\"Morphisms:\");\n\n for (id, morphism) in category.morphisms.iter() {\n\n println!(\"{:4} - {:?}\", id.raw(), morphism)\n\n }\n\n println!(\"Equalities:\");\n\n for (equality, inner) in category.equalities.iter() {\n\n println!(\n\n \" {:?} = {:?}: {inner:?}\",\n\n equality.left(),\n\n equality.right()\n\n );\n\n }\n\n println!(\"\");\n\n}\n", "file_path": "crates/category/examples/simple.rs", "rank": 4, "score": 204900.39134912647 }, { "content": "fn remove_ids<O, M, E>(\n\n morphisms: Vec<MorphismId>,\n\n category: &Category<O, M, E>,\n\n) -> Vec<MorphismId> {\n\n let len = morphisms.len();\n\n let mut morphisms = morphisms.into_iter();\n\n let mut result = (0..len - 1)\n\n .map(|_| morphisms.next().unwrap())\n\n .filter(|morphism| check_identity(morphism, category).is_none())\n\n .collect::<Vec<_>>();\n\n let morphism = morphisms.next().unwrap();\n\n if result.is_empty() || check_identity(&morphism, category).is_none() {\n\n result.push(morphism);\n\n }\n\n result\n\n}\n\n\n", "file_path": "crates/category/src/rule/constraint/equality.rs", "rank": 5, "score": 184228.06608308622 }, { "content": "fn is_isomorphism<O, M, E>(morphism: &MorphismId, category: &Category<O, M, E>) -> bool {\n\n match &category.morphisms.get(morphism).unwrap().connection {\n\n MorphismConnection::Isomorphism(_, _) => true,\n\n MorphismConnection::Regular { .. } => false,\n\n }\n\n}\n\n\n", "file_path": "crates/category/src/rule/constraint/equality.rs", "rank": 6, "score": 177807.43460912735 }, { "content": "fn solve_equality<O, M, E>(\n\n mut left: Vec<MorphismId>,\n\n mut right: Vec<MorphismId>,\n\n category: &Category<O, M, E>,\n\n) -> bool {\n\n if left == right {\n\n return true;\n\n }\n\n if left.len() < right.len() {\n\n std::mem::swap(&mut left, &mut right);\n\n }\n\n\n\n for equality in category.equalities.iter_equalities() {\n\n let mut left_eq = equality.left();\n\n let mut right_eq = equality.right();\n\n if left_eq.len() < right_eq.len() {\n\n std::mem::swap(&mut left_eq, &mut right_eq);\n\n }\n\n\n\n if let Some(new_left) = apply_equality(&left, left_eq, right_eq) {\n", "file_path": "crates/category/src/rule/constraint/equality.rs", "rank": 7, "score": 163860.32534688056 }, { "content": "fn check_equality<O, M, E, L: Label>(\n\n left: impl IntoIterator<Item = (L, MorphismId)>,\n\n right: impl IntoIterator<Item = (L, MorphismId)>,\n\n category: &Category<O, M, E>,\n\n) -> Option<Bindings<L>> {\n\n let mut bindings = Bindings::new();\n\n let left = left.into_iter().fold(Vec::new(), |mut acc, (label, id)| {\n\n bindings.bind_morphism(label, id);\n\n acc.push(id);\n\n acc\n\n });\n\n let right = right.into_iter().fold(Vec::new(), |mut acc, (label, id)| {\n\n bindings.bind_morphism(label, id);\n\n acc.push(id);\n\n acc\n\n });\n\n\n\n if left\n\n .iter()\n\n .chain(right.iter())\n", "file_path": "crates/category/src/rule/constraint/equality.rs", "rank": 8, "score": 154429.9862863645 }, { "content": "fn check_identity<O, M, E>(\n\n morphism: &MorphismId,\n\n category: &Category<O, M, E>,\n\n) -> Option<ObjectId> {\n\n category\n\n .morphisms\n\n .get(morphism)\n\n .unwrap()\n\n .tags\n\n .iter()\n\n .find_map(|tag| match tag {\n\n &MorphismTag::Identity(object) => Some(object),\n\n _ => None,\n\n })\n\n}\n\n\n", "file_path": "crates/category/src/rule/constraint/equality.rs", "rank": 9, "score": 154186.7915110076 }, { "content": "fn check_composability<O, M, E>(\n\n morphisms: impl IntoIterator<Item = MorphismId>,\n\n category: &Category<O, M, E>,\n\n) -> bool {\n\n let mut morphisms = morphisms.into_iter();\n\n\n\n let mut from = match morphisms.next() {\n\n None => return true,\n\n Some(id) => match category.morphisms.get(&id) {\n\n None => return false,\n\n Some(morphism) => *morphism.connection.end_points()[1],\n\n },\n\n };\n\n\n\n for morphism in morphisms {\n\n match category.morphisms.get(&morphism) {\n\n None => return false,\n\n Some(morphism) => {\n\n if from != *morphism.connection.end_points()[0] {\n\n return false;\n\n } else {\n\n from = *morphism.connection.end_points()[1];\n\n }\n\n }\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "crates/category/src/rule/constraint/equality.rs", "rank": 10, "score": 154186.7915110076 }, { "content": "#[test]\n\nfn test_bug() {\n\n // Get rules\n\n let rule_unique = axioms::rule_unique::<&str>().unwrap();\n\n\n\n // Build the initial category\n\n let mut category = CategoryBuilder::<_, _, _, &str>::new()\n\n .object(\"A\", vec![], ())\n\n .object(\"B\", vec![], ())\n\n .morphism(\"id\", \"A\", \"A\", vec![MorphismTag::Identity(\"A\")], ())\n\n .morphism(\"f\", \"A\", \"B\", vec![], ())\n\n .morphism(\"g\", \"B\", \"A\", vec![], ())\n\n .equality(vec![\"f\", \"g\"], vec![\"id\"], ())\n\n .morphism(\"m\", \"B\", \"A\", vec![MorphismTag::Unique], ())\n\n .build();\n\n\n\n print_category(&category);\n\n assert_eq!(2, category.objects.len());\n\n assert_eq!(4, category.morphisms.len());\n\n assert_eq!(1, category.equalities.len());\n\n\n", "file_path": "crates/category/tests/substitution_bug.rs", "rank": 11, "score": 152470.13853529366 }, { "content": "pub fn constraint_object<'a, O, M, E, L: Label>(\n\n label: &'a L,\n\n tags: &'a [ObjectTag<L>],\n\n bindings: &'a Bindings<L>,\n\n category: &'a Category<O, M, E>,\n\n) -> Box<dyn Iterator<Item = Bindings<L>> + 'a> {\n\n match bindings.get_object(label) {\n\n Some(object) => {\n\n let object = category\n\n .objects\n\n .get(&object)\n\n .expect(\"Invalid bindings: unknown object id\"); // TODO: return an error\n\n object_matches(tags, object, bindings).map_or(Box::new(vec![].into_iter()), |binds| {\n\n Box::new(std::iter::once(binds))\n\n })\n\n }\n\n None => Box::new(category.objects.iter().filter_map(|(&id, object)| {\n\n object_matches(tags, object, bindings).map(|mut binds| {\n\n binds.bind_object(label.clone(), id);\n\n binds\n\n })\n\n })),\n\n }\n\n}\n\n\n", "file_path": "crates/category/src/rule/constraint/object.rs", "rank": 13, "score": 148828.17869615863 }, { "content": "pub fn constraint_equality<'a, O, M, E, L: Label>(\n\n equality: &'a Equality<L>,\n\n bindings: &'a Bindings<L>,\n\n category: &'a Category<O, M, E>,\n\n) -> Box<dyn Iterator<Item = Bindings<L>> + 'a> {\n\n let [left_constraints, right_constraints] =\n\n [equality.left(), equality.right()].map(|eq_side| {\n\n eq_side\n\n .iter()\n\n .map(|label| (label.clone(), bindings.get_morphism(label)))\n\n });\n\n\n\n let right_constraints = right_constraints.collect::<Vec<_>>();\n\n\n\n Box::new(\n\n find_possibilities(left_constraints, category).flat_map(move |left| {\n\n find_possibilities(right_constraints.clone().into_iter(), category)\n\n .filter_map(move |right| check_equality(left.clone(), right, category))\n\n }),\n\n )\n\n}\n\n\n", "file_path": "crates/category/src/rule/constraint/equality.rs", "rank": 14, "score": 148626.40246777202 }, { "content": "fn get_axioms<'a, T: Label + From<&'a str>>() -> Result<Vec<Rule<T>>, RuleConstructionError> {\n\n Ok(vec![\n\n rule_identity()?,\n\n rule_composition()?,\n\n rule_terminal()?,\n\n // rule_initial()?,\n\n rule_product()?,\n\n rule_unique()?,\n\n rule_isomorphism()?,\n\n ])\n\n}\n", "file_path": "crates/category/src/rule/axioms/mod.rs", "rank": 15, "score": 142862.66951226562 }, { "content": "fn get_object_or_new<O, M, E, L: Label>(\n\n label: &L,\n\n category: &mut Category<O, M, E>,\n\n bindings: &mut Bindings<L>,\n\n actions_handler: impl FnMut(Vec<Action<O, M, E>>),\n\n object_constructor: impl Fn(Vec<ObjectTag<&Object<O>>>) -> O,\n\n) -> ObjectId {\n\n bindings.get_object(label).unwrap_or_else(|| {\n\n create_vertices(\n\n category,\n\n bindings,\n\n actions_handler,\n\n vec![Object {\n\n tags: vec![],\n\n inner: object_constructor(vec![]),\n\n }],\n\n vec![label.clone()],\n\n )[0]\n\n })\n\n}\n", "file_path": "crates/category/src/rule/constraint/apply.rs", "rank": 16, "score": 142137.72689290374 }, { "content": "fn find_possibilities<'a, O, M, E, L: 'a + Clone>(\n\n mut constraints: impl Iterator<Item = (L, Option<MorphismId>)> + 'a,\n\n category: &'a Category<O, M, E>,\n\n) -> Box<dyn Iterator<Item = VecDeque<(L, MorphismId)>> + 'a> {\n\n match constraints.next() {\n\n None => Box::new(std::iter::once(VecDeque::new())),\n\n Some((label, Some(id))) => Box::new(find_possibilities(constraints, category).map(\n\n move |mut tail| {\n\n tail.push_front((label.clone(), id));\n\n tail\n\n },\n\n )),\n\n Some((label, None)) => {\n\n let head = category\n\n .morphisms\n\n .iter()\n\n .map(|(&id, _)| (label.clone(), id))\n\n .collect::<Vec<_>>();\n\n Box::new(\n\n find_possibilities(constraints, category).flat_map(move |tail| {\n", "file_path": "crates/category/src/rule/constraint/equality.rs", "rank": 17, "score": 141345.86182286608 }, { "content": "pub fn object_name_from_tag_label(tag: &ObjectTag<&str>) -> Option<String> {\n\n match &tag {\n\n ObjectTag::Product(a, b) => label_operation(a, b, \"x\"),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/game_state/graph_types.rs", "rank": 18, "score": 139710.2072557409 }, { "content": "fn connections(category: &Category, hidden_morphisms: &HashSet<MorphismId>) -> Connections {\n\n let mut connections = Connections::new();\n\n\n\n for (&id, _) in category\n\n .objects\n\n .iter()\n\n .filter(|(_, object)| !object.inner.is_anchor)\n\n {\n\n let neighbours = category\n\n .neighbours(id)\n\n .map(|id| BodyId::Object { id })\n\n .collect();\n\n connections.insert(BodyId::Object { id }, neighbours);\n\n }\n\n\n\n for (&id, morphism) in category\n\n .morphisms\n\n .iter()\n\n .filter(|(id, _)| !hidden_morphisms.contains(id))\n\n {\n", "file_path": "src/game_state/update.rs", "rank": 19, "score": 138517.53302204196 }, { "content": "fn tag_matches<L: Label>(\n\n constraint: &ObjectTag<L>,\n\n tags: &[ObjectTag],\n\n bindings: &Bindings<L>,\n\n) -> Option<Bindings<L>> {\n\n match constraint {\n\n ObjectTag::Initial => tags.iter().find_map(|tag| {\n\n if let ObjectTag::Initial = tag {\n\n Some(Bindings::new())\n\n } else {\n\n None\n\n }\n\n }),\n\n ObjectTag::Terminal => tags.iter().find_map(|tag| {\n\n if let ObjectTag::Terminal = tag {\n\n Some(Bindings::new())\n\n } else {\n\n None\n\n }\n\n }),\n", "file_path": "crates/category/src/rule/constraint/object.rs", "rank": 20, "score": 134452.5764164021 }, { "content": "pub fn morphism_name_from_tag(tag: &MorphismTag, category: &Category) -> Option<String> {\n\n match tag {\n\n MorphismTag::Identity(_) => Some(format!(\"id\")),\n\n MorphismTag::Isomorphism(_, _) => Some(format!(\"\")),\n\n MorphismTag::Composition { first, second } => {\n\n let mut decomposed = category::util::decompose_morphism(*first, category);\n\n let second = category::util::decompose_morphism(*second, category);\n\n decomposed.extend(second);\n\n let mut result = String::new();\n\n let to_label = |id| {\n\n category\n\n .morphisms\n\n .get(&id)\n\n .unwrap()\n\n .inner\n\n .label\n\n .clone()\n\n .unwrap_or_else(|| format!(\"{}\", id.raw()))\n\n };\n\n\n", "file_path": "src/game_state/graph_types.rs", "rank": 21, "score": 132528.18271810038 }, { "content": "type CategoryAction = category::Action<Point, Arrow, Equality>;\n", "file_path": "src/game_state/mod.rs", "rank": 22, "score": 125928.1696745967 }, { "content": "#[test]\n\nfn test_product() {\n\n // Build the initial category\n\n let mut category = Category::new();\n\n\n\n let object_a = category.new_object(Object {\n\n tags: vec![],\n\n inner: (),\n\n });\n\n let object_b = category.new_object(Object {\n\n tags: vec![],\n\n inner: (),\n\n });\n\n let bindings = Bindings::from_objects(vec![(\"A\", object_a), (\"B\", object_b)]);\n\n\n\n print_category(&category);\n\n\n\n // Make sure the build is correct\n\n assert_eq!(2, category.objects.len());\n\n assert_eq!(0, category.morphisms.len());\n\n assert_eq!(0, category.equalities.len());\n", "file_path": "crates/category/tests/product.rs", "rank": 23, "score": 124824.45381954113 }, { "content": "#[test]\n\nfn test_substitution() {\n\n // Build the initial category\n\n let mut category = CategoryBuilder::<_, _, _, &str>::new()\n\n .object(\"A\", vec![], ())\n\n .object(\"B\", vec![], ())\n\n .morphism(\"f\", \"A\", \"B\", vec![], ())\n\n .morphism(\"id\", \"A\", \"A\", vec![], ())\n\n .morphism(\n\n \"f.id\",\n\n \"A\",\n\n \"B\",\n\n vec![MorphismTag::Composition {\n\n first: \"id\",\n\n second: \"f\",\n\n }],\n\n (),\n\n )\n\n .morphism(\"m\", \"A\", \"B\", vec![MorphismTag::Unique], ())\n\n .build();\n\n\n", "file_path": "crates/category/tests/substitution.rs", "rank": 24, "score": 124824.45381954113 }, { "content": "#[test]\n\nfn test_find() {\n\n let category = CategoryBuilder::<(), (), (), &str>::new()\n\n .object(\"A\", vec![], ())\n\n .object(\"B\", vec![], ())\n\n .object(\"AxB\", vec![ObjectTag::Product(\"A\", \"B\")], ())\n\n .morphism(\"p1\", \"AxB\", \"A\", vec![], ())\n\n .morphism(\"p2\", \"AxB\", \"B\", vec![], ())\n\n .morphism(\"id\", \"AxB\", \"AxB\", vec![MorphismTag::Identity(\"AxB\")], ())\n\n .build();\n\n\n\n let constraints = ConstraintsBuilder::<&str>::new()\n\n .object(\"A\", vec![])\n\n .object(\"B\", vec![])\n\n .object(\"AxB\", vec![ObjectTag::Product(\"A\", \"B\")])\n\n .morphism(\"p1\", \"AxB\", \"A\", vec![])\n\n .morphism(\"p2\", \"AxB\", \"B\", vec![])\n\n .object(\"C\", vec![])\n\n .morphism(\"f\", \"C\", \"A\", vec![])\n\n .morphism(\"g\", \"C\", \"B\", vec![])\n\n .morphism(\"m\", \"C\", \"AxB\", vec![])\n", "file_path": "crates/category/tests/find.rs", "rank": 25, "score": 124824.45381954113 }, { "content": "fn update_category(category: &mut Category, hide_morphisms: bool, delta_time: f32) {\n\n let hidden_morphisms = if hide_morphisms {\n\n get_hidden_morphisms(&category.equalities)\n\n } else {\n\n Default::default()\n\n };\n\n let connections = connections(&category, &hidden_morphisms);\n\n let mut bodies = bodies_collection(category, &hidden_morphisms);\n\n force_graph::apply_forces(&default(), delta_time, &mut bodies, &connections)\n\n}\n", "file_path": "src/game_state/update.rs", "rank": 26, "score": 123442.78290272556 }, { "content": "type Connections = HashMap<BodyId, Vec<BodyId>>;\n\n\n", "file_path": "src/game_state/update.rs", "rank": 27, "score": 121582.68506716614 }, { "content": "pub fn decompose_morphism<O, M, E>(\n\n morphism_id: MorphismId,\n\n category: &Category<O, M, E>,\n\n) -> Vec<MorphismId> {\n\n fn decompose<O, M, E>(\n\n morphism_id: MorphismId,\n\n morphism: &Morphism<M>,\n\n category: &Category<O, M, E>,\n\n ) -> Vec<MorphismId> {\n\n morphism\n\n .tags\n\n .iter()\n\n .find_map(|tag| match tag {\n\n &MorphismTag::Composition { first, second } => {\n\n category.morphisms.get(&first).and_then(|morphism_f| {\n\n category.morphisms.get(&second).map(|morphism_g| {\n\n let mut composition = decompose(first, morphism_f, category);\n\n composition.extend(decompose(second, morphism_g, category));\n\n composition\n\n })\n", "file_path": "crates/category/src/rule/constraint/util.rs", "rank": 28, "score": 120190.80222832024 }, { "content": "fn apply_equality(\n\n data: &[MorphismId],\n\n left_eq: &Vec<MorphismId>,\n\n right_eq: &Vec<MorphismId>,\n\n) -> Option<Vec<MorphismId>> {\n\n let mut split = 0;\n\n let mut eq_index = 0;\n\n let mut current_i = 0;\n\n loop {\n\n if current_i == data.len() {\n\n // Failed to find a match\n\n return None;\n\n }\n\n let morphism = data[current_i];\n\n if morphism == left_eq[eq_index] {\n\n // Look for a match\n\n eq_index += 1;\n\n current_i += 1;\n\n if eq_index == left_eq.len() {\n\n break;\n", "file_path": "crates/category/src/rule/constraint/equality.rs", "rank": 29, "score": 119847.70923642436 }, { "content": "fn process<'a, O, M, E, L: Label>(\n\n new_binds: impl Iterator<Item = Bindings<L>> + 'a,\n\n constraints: &'a [Constraint<L>],\n\n bindings: &'a Bindings<L>,\n\n category: &'a Category<O, M, E>,\n\n) -> impl Iterator<Item = Bindings<L>> + 'a {\n\n new_binds.flat_map(|binds| {\n\n let mut old_binds = binds.clone();\n\n old_binds.extend(bindings.clone());\n\n let binds = match category.find_candidates(constraints, &old_binds) {\n\n Some(new_binds) => new_binds\n\n .map(|mut next_binds| {\n\n next_binds.extend(binds.clone());\n\n next_binds\n\n })\n\n .collect::<Vec<_>>(),\n\n None => vec![binds],\n\n };\n\n binds\n\n })\n\n}\n", "file_path": "crates/category/src/rule/find.rs", "rank": 30, "score": 118739.13631991445 }, { "content": "pub fn rule_product<'a, T: Label + From<&'a str>>() -> Result<Rule<T>, RuleConstructionError> {\n\n RuleBuilder::new()\n\n .forall(\n\n ConstraintsBuilder::new()\n\n .object(\"A\", vec![])\n\n .object(\"B\", vec![]),\n\n )\n\n .exists(ConstraintsBuilder::new().object(\"AxB\", vec![ObjectTag::Product(\"A\", \"B\")]))\n\n .exists(ConstraintsBuilder::new().morphism(\n\n \"id\",\n\n \"AxB\",\n\n \"AxB\",\n\n vec![MorphismTag::Identity(\"AxB\")],\n\n ))\n\n .exists(\n\n ConstraintsBuilder::new()\n\n .morphism(\"p1\", \"AxB\", \"A\", vec![MorphismTag::ProductP1])\n\n .morphism(\"p2\", \"AxB\", \"B\", vec![MorphismTag::ProductP2]),\n\n )\n\n .forall(\n", "file_path": "crates/category/src/rule/axioms/product.rs", "rank": 31, "score": 117597.80460657741 }, { "content": "fn create_vertices<O, M, E, L: Label>(\n\n category: &mut Category<O, M, E>,\n\n bindings: &mut Bindings<L>,\n\n mut actions_handler: impl FnMut(Vec<Action<O, M, E>>),\n\n new_vertices: Vec<Object<O>>,\n\n new_vertices_names: Vec<L>,\n\n) -> Vec<ObjectId> {\n\n let new_vertices = new_vertices\n\n .into_iter()\n\n .map(|object| (None, object))\n\n .collect();\n\n let actions = category.action_do(Action::NewObjects(new_vertices));\n\n assert_eq!(actions.len(), 1);\n\n // Bind new vertices\n\n let new_vertices = match &actions[0] {\n\n Action::RemoveObjects(vertices) => {\n\n assert_eq!(vertices.len(), new_vertices_names.len());\n\n for (label, id) in new_vertices_names.into_iter().zip(vertices.iter().copied()) {\n\n bindings.bind_object(label, id);\n\n }\n\n vertices.clone()\n\n }\n\n _ => unreachable!(),\n\n };\n\n actions_handler(actions);\n\n new_vertices\n\n}\n\n\n", "file_path": "crates/category/src/rule/constraint/apply.rs", "rank": 32, "score": 116286.7950478885 }, { "content": "fn add_constraints<'a, O, M, E, L: 'a + Label>(\n\n rule_part: RulePart,\n\n constraints: impl IntoIterator<Item = &'a Constraint<L>>,\n\n statement: &[RuleConstruction<L>],\n\n bindings: &mut Bindings<L>,\n\n category: &mut Category<O, M, E>,\n\n object_constructor: impl Fn(RulePart, &L, &Vec<ObjectTag<L>>) -> O,\n\n morphism_constructor: impl Fn(RulePart, &L, &Vec<MorphismTag<L, L>>) -> M,\n\n equality_constructor: impl Fn(RulePart, &Equality<L>) -> E,\n\n) -> Vec<RuleInput<L>> {\n\n let object_constructor = &object_constructor;\n\n\n\n fn get_object<O, M, E, L: Label>(\n\n label: &L,\n\n rule_part: RulePart,\n\n objects: &mut HashMap<L, ObjectId>,\n\n category: &mut Category<O, M, E>,\n\n statement: &[RuleConstruction<L>],\n\n object_constructor: &impl Fn(RulePart, &L, &Vec<ObjectTag<L>>) -> O,\n\n ) -> ObjectId {\n", "file_path": "crates/category/src/rule/init.rs", "rank": 33, "score": 115059.21578732666 }, { "content": "fn draw_object(\n\n geng: &Geng,\n\n font: &Rc<geng::Font>,\n\n framebuffer: &mut ugli::Framebuffer,\n\n camera: &Camera2d,\n\n object: &Object,\n\n background_color: Color<f32>,\n\n is_selected: bool,\n\n) {\n\n // Selection\n\n if is_selected {\n\n draw_2d::Ellipse::circle(\n\n object.inner.position,\n\n object.inner.radius + SELECTED_RADIUS,\n\n SELECTED_COLOR,\n\n )\n\n .draw_2d(geng, framebuffer, camera);\n\n }\n\n\n\n // Outline\n", "file_path": "src/game_state/draw/category.rs", "rank": 34, "score": 113364.18879238272 }, { "content": "pub fn constraint_morphism<'a, O, M, E, L: Label>(\n\n label: &'a L,\n\n connection: &'a MorphismConnection<L>,\n\n tags: &'a [MorphismTag<L, L>],\n\n bindings: &'a Bindings<L>,\n\n category: &'a Category<O, M, E>,\n\n) -> Box<dyn Iterator<Item = Bindings<L>> + 'a> {\n\n match bindings.get_morphism(label) {\n\n Some(morphism) => {\n\n let morphism = category\n\n .morphisms\n\n .get(&morphism)\n\n .expect(\"Invalid bindings: unknown morphism id\"); // TODO: return an error\n\n morphism_matches(connection, tags, morphism, bindings)\n\n .map_or(Box::new(vec![].into_iter()), |binds| {\n\n Box::new(std::iter::once(binds))\n\n })\n\n }\n\n None => Box::new(category.morphisms.iter().filter_map(|(&id, morphism)| {\n\n morphism_matches(connection, tags, morphism, bindings).map(|mut binds| {\n\n binds.bind_morphism(label.clone(), id);\n\n binds\n\n })\n\n })),\n\n }\n\n}\n\n\n", "file_path": "crates/category/src/rule/constraint/morphism.rs", "rank": 35, "score": 111106.92384846613 }, { "content": "#[test]\n\nfn test_unordered() {\n\n let result: Vec<_> = constraint_unordered(vec![(\"A\", Some(0)), (\"B\", Some(0))], vec![0, 0])\n\n .unwrap()\n\n .collect();\n\n assert!(result.is_empty());\n\n}\n", "file_path": "crates/category/src/rule/constraint/util.rs", "rank": 36, "score": 110466.80020795047 }, { "content": "fn object_matches<O, L: Label>(\n\n tags: &[ObjectTag<L>],\n\n object: &Object<O>,\n\n bindings: &Bindings<L>,\n\n) -> Option<Bindings<L>> {\n\n let mut new_bindings = Bindings::new();\n\n\n\n for tag_check in tags\n\n .iter()\n\n .map(|constraint| tag_matches(constraint, &object.tags, bindings))\n\n {\n\n let binds = match tag_check {\n\n Some(binds) => binds,\n\n None => return None,\n\n };\n\n new_bindings.extend(binds);\n\n }\n\n\n\n Some(new_bindings)\n\n}\n\n\n", "file_path": "crates/category/src/rule/constraint/object.rs", "rank": 37, "score": 110156.02257042045 }, { "content": "pub fn get_hidden_morphisms(equalities: &Equalities) -> HashSet<MorphismId> {\n\n let mut hidden = HashSet::new();\n\n\n\n for (equality, _) in equalities.iter() {\n\n match (&equality.left()[..], &equality.right()[..]) {\n\n (&[f], &[g]) => {\n\n // Hide `f`\n\n if !hidden.insert(f) {\n\n // `f` has already been hidden -> hide `g`\n\n hidden.insert(g);\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n\n\n\n hidden\n\n}\n\n\n", "file_path": "src/game_state/update.rs", "rank": 38, "score": 106830.31977785306 }, { "content": "pub fn rule_axioms<'a, T: Label + From<&'a str>>() -> Vec<Rule<T>> {\n\n get_axioms().expect(\"Axioms are expected to be valid\")\n\n}\n\n\n", "file_path": "crates/category/src/rule/axioms/mod.rs", "rank": 39, "score": 106299.43127675922 }, { "content": "fn invert_statement<L: Label>(statement: &RuleStatement<L>) -> Vec<RuleStatement<L>> {\n\n let mut prelude_forall = Vec::new();\n\n let mut prelude_exists = Vec::new();\n\n let mut statements = Vec::new();\n\n\n\n let mut last_forall = None;\n\n\n\n for construction in statement {\n\n match construction {\n\n RuleConstruction::Forall(constraints) => {\n\n if let Some(forall) = last_forall.replace(constraints.clone()) {\n\n prelude_exists.extend(forall);\n\n }\n\n }\n\n RuleConstruction::Exists(constraints) => {\n\n if let Some(forall) = last_forall.take() {\n\n // Construct an inverse rule\n\n let inv_forall = invert_constraints(constraints);\n\n let mut exist = prelude_exists.clone();\n\n exist.extend(forall.clone());\n", "file_path": "crates/category/src/rule/inverse.rs", "rank": 40, "score": 105426.62835395185 }, { "content": "fn tags_matches<L: Label>(\n\n constraint: &MorphismTag<L, L>,\n\n tags: &[MorphismTag],\n\n bindings: &Bindings<L>,\n\n) -> Option<Bindings<L>> {\n\n match constraint {\n\n MorphismTag::Unique => tags.iter().find_map(|tag| match tag {\n\n MorphismTag::Unique => Some(Bindings::new()),\n\n _ => None,\n\n }),\n\n MorphismTag::ProductP1 => tags.iter().find_map(|tag| match tag {\n\n MorphismTag::ProductP1 => Some(Bindings::new()),\n\n _ => None,\n\n }),\n\n MorphismTag::ProductP2 => tags.iter().find_map(|tag| match tag {\n\n MorphismTag::ProductP2 => Some(Bindings::new()),\n\n _ => None,\n\n }),\n\n MorphismTag::Identity(constraint) => tags.iter().find_map(|tag| {\n\n if let &MorphismTag::Identity(object) = tag {\n", "file_path": "crates/category/src/rule/constraint/morphism.rs", "rank": 41, "score": 103713.01692555466 }, { "content": "pub fn default_rules() -> Vec<Rule> {\n\n ::category::axioms::rule_axioms()\n\n}\n", "file_path": "src/game_state/init/rules.rs", "rank": 42, "score": 101865.512686415 }, { "content": "pub fn rule_composition<'a, T: Label + From<&'a str>>() -> Result<Rule<T>, RuleConstructionError> {\n\n RuleBuilder::new()\n\n .forall(\n\n ConstraintsBuilder::new()\n\n .morphism(\"f\", \"A\", \"B\", vec![])\n\n .morphism(\"g\", \"B\", \"C\", vec![]),\n\n )\n\n .exists(ConstraintsBuilder::new().morphism(\n\n \"g o f\",\n\n \"A\",\n\n \"C\",\n\n vec![MorphismTag::Composition {\n\n first: \"f\",\n\n second: \"g\",\n\n }],\n\n ))\n\n .build()\n\n}\n", "file_path": "crates/category/src/rule/axioms/composition.rs", "rank": 43, "score": 99955.16849765388 }, { "content": "pub fn rule_isomorphism<'a, T: Label + From<&'a str>>() -> Result<Rule<T>, RuleConstructionError> {\n\n RuleBuilder::new()\n\n .forall(\n\n ConstraintsBuilder::new()\n\n .morphism(\"f\", \"A\", \"B\", vec![])\n\n .morphism(\"g\", \"B\", \"A\", vec![]),\n\n )\n\n .forall(\n\n ConstraintsBuilder::new()\n\n .morphism(\"id_a\", \"A\", \"A\", vec![MorphismTag::Identity(\"A\")])\n\n .morphism(\"id_b\", \"B\", \"B\", vec![MorphismTag::Identity(\"B\")])\n\n .equality(vec![\"f\", \"g\"], vec![\"id_a\"])\n\n .equality(vec![\"g\", \"f\"], vec![\"id_b\"]),\n\n )\n\n .exists(ConstraintsBuilder::new().isomorphism(\n\n \"\",\n\n \"A\",\n\n \"B\",\n\n vec![MorphismTag::Isomorphism(\"f\", \"g\")],\n\n ))\n\n .build()\n\n}\n", "file_path": "crates/category/src/rule/axioms/isomorphism.rs", "rank": 44, "score": 99955.16849765388 }, { "content": "pub fn rule_initial<'a, T: Label + From<&'a str>>() -> Result<Rule<T>, RuleConstructionError> {\n\n RuleBuilder::new()\n\n .forall(ConstraintsBuilder::new().object(\"A\", vec![]))\n\n .exists(ConstraintsBuilder::new().object(\"0\", vec![ObjectTag::Initial]))\n\n .exists(ConstraintsBuilder::new().morphism(\"\", \"0\", \"A\", vec![MorphismTag::Unique]))\n\n .build()\n\n}\n", "file_path": "crates/category/src/rule/axioms/initial.rs", "rank": 45, "score": 99955.16849765388 }, { "content": "pub fn rule_terminal<'a, T: Label + From<&'a str>>() -> Result<Rule<T>, RuleConstructionError> {\n\n RuleBuilder::new()\n\n .forall(ConstraintsBuilder::new().object(\"A\", vec![]))\n\n .exists(ConstraintsBuilder::new().object(\"1\", vec![ObjectTag::Terminal]))\n\n .exists(ConstraintsBuilder::new().morphism(\"\", \"A\", \"1\", vec![MorphismTag::Unique]))\n\n .build()\n\n}\n", "file_path": "crates/category/src/rule/axioms/terminal.rs", "rank": 46, "score": 99955.16849765388 }, { "content": "pub fn rule_unique<'a, T: Label + From<&'a str>>() -> Result<Rule<T>, RuleConstructionError> {\n\n RuleBuilder::new()\n\n .forall(ConstraintsBuilder::new().morphism(\"f\", \"A\", \"B\", vec![]))\n\n .forall(ConstraintsBuilder::new().morphism(\"m\", \"A\", \"B\", vec![MorphismTag::Unique]))\n\n .exists(ConstraintsBuilder::new().equality(vec![\"f\"], vec![\"m\"]))\n\n .build()\n\n}\n", "file_path": "crates/category/src/rule/axioms/unique.rs", "rank": 47, "score": 99955.16849765388 }, { "content": "pub fn rule_identity<'a, T: Label + From<&'a str>>() -> Result<Rule<T>, RuleConstructionError> {\n\n RuleBuilder::new()\n\n .forall(ConstraintsBuilder::new().object(\"A\", vec![]))\n\n .exists(ConstraintsBuilder::new().morphism(\n\n \"id\",\n\n \"A\",\n\n \"A\",\n\n vec![MorphismTag::Identity(\"A\")],\n\n ))\n\n .build()\n\n}\n", "file_path": "crates/category/src/rule/axioms/identity.rs", "rank": 48, "score": 99955.16849765388 }, { "content": "type Object = category::types::Object<Point>;\n", "file_path": "src/game_state/mod.rs", "rank": 49, "score": 99260.7577936897 }, { "content": "pub fn fact_category() -> Category {\n\n CategoryBuilder::<_, _, _, Label>::new()\n\n .object(\"A\", vec![], point(\"A\"))\n\n .object(\"1\", vec![ObjectTag::Terminal], point(\"1\"))\n\n .build()\n\n}\n\n\n", "file_path": "src/game_state/init/category.rs", "rank": 50, "score": 86322.28626984982 }, { "content": "pub fn goal_category() -> Category {\n\n CategoryBuilder::<_, _, _, Label>::new()\n\n .object(\"A\", vec![], point(\"A\"))\n\n .object(\"1\", vec![ObjectTag::Terminal], point(\"1\"))\n\n .object(\"Ax1\", vec![ObjectTag::Product(\"A\", \"1\")], point(\"Ax1\"))\n\n .isomorphism(\"\", \"A\", \"Ax1\", vec![], isomorphism(\"\"))\n\n .build()\n\n}\n", "file_path": "src/game_state/init/category.rs", "rank": 51, "score": 86322.28626984982 }, { "content": "pub fn draw_category(\n\n geng: &Geng,\n\n assets: &Rc<Assets>,\n\n font: &Rc<geng::Font>,\n\n framebuffer: &mut ugli::Framebuffer,\n\n camera: &Camera2d,\n\n category: &Category,\n\n background_color: Color<f32>,\n\n selection: Option<&Vec<RuleInput<Label>>>,\n\n hide_morphisms: bool,\n\n) {\n\n // Selection\n\n let mut selected_vertices = HashSet::new();\n\n let mut selected_edges = HashSet::new();\n\n if let Some(selection) = selection {\n\n for selection in selection {\n\n match selection {\n\n RuleInput::Object { id, .. } => {\n\n selected_vertices.insert(id);\n\n }\n", "file_path": "src/game_state/draw/category.rs", "rank": 52, "score": 84984.71720821972 }, { "content": "fn main() {\n\n let mut category = CategoryBuilder::<_, _, _, &str>::new()\n\n .object(\"A\", vec![], ()) // Object A\n\n .object(\"B\", vec![], ()) // Object B\n\n .morphism(\"f\", \"A\", \"B\", vec![], ()) // Morphism f A->B\n\n .build();\n\n\n\n let morphism_f = *category.morphisms.iter().next().unwrap().0;\n\n\n\n print_category(&category);\n\n\n\n // Construct identity morphisms for every object\n\n category.apply_rule(\n\n &axioms::rule_identity().unwrap(),\n\n Bindings::<&str>::new(),\n\n |_| (),\n\n |_, _| (),\n\n |_| (),\n\n );\n\n\n", "file_path": "crates/category/examples/simple.rs", "rank": 53, "score": 83681.5318529516 }, { "content": "pub fn objects_under_point(\n\n category: &Category,\n\n local_pos: Vec2<f32>,\n\n) -> impl Iterator<Item = (&ObjectId, &Object)> {\n\n category\n\n .objects\n\n .iter()\n\n .filter(move |(_, object)| (object.inner.position - local_pos).len() <= object.inner.radius)\n\n}\n\n\n", "file_path": "src/game_state/selection/select.rs", "rank": 54, "score": 82025.1658681203 }, { "content": "fn draw_morphism(\n\n geng: &Geng,\n\n font: &Rc<geng::Font>,\n\n assets: &Rc<Assets>,\n\n framebuffer: &mut ugli::Framebuffer,\n\n camera: &Camera2d,\n\n background_color: Color<f32>,\n\n category: &Category,\n\n morphism_id: MorphismId,\n\n morphism: &Morphism,\n\n is_selected: bool,\n\n) {\n\n // Find endpoints\n\n let (from, to, isomorphism) = match morphism.connection {\n\n MorphismConnection::Regular { from, to } => (from, to, false),\n\n MorphismConnection::Isomorphism(a, b) => (a, b, true),\n\n };\n\n\n\n let from = match category.objects.get(&from) {\n\n Some(from) => from,\n", "file_path": "src/game_state/draw/category.rs", "rank": 55, "score": 79953.3610723548 }, { "content": "type Equalities = category::Equalities<Equality>;\n\n\n\npub struct GameState {\n\n geng: Geng,\n\n ui_camera: PixelPerfectCamera,\n\n state: State,\n\n rules: Vec<RenderableRule>,\n\n fact_category: RenderableCategory,\n\n goal_category: RenderableCategory,\n\n graph_link: GraphLink,\n\n focused_category: FocusedCategory,\n\n dragging: Option<Dragging>,\n\n fact_selection: Option<RuleSelection>,\n\n goal_selection: Option<RuleSelection>,\n\n}\n\n\n\nimpl GameState {\n\n pub fn new(geng: &Geng, assets: &Rc<Assets>) -> Self {\n\n let state = State::default();\n\n let fact_category =\n", "file_path": "src/game_state/mod.rs", "rank": 56, "score": 73526.02927916584 }, { "content": "pub fn infer_morphism_name(morphism: &Morphism, category: &Category) -> Option<String> {\n\n morphism.inner.label.clone().or_else(|| {\n\n morphism\n\n .tags\n\n .iter()\n\n .find_map(|tag| morphism_name_from_tag(tag, category))\n\n })\n\n}\n\n\n", "file_path": "src/game_state/graph_types.rs", "rank": 57, "score": 70941.00654204751 }, { "content": "fn morphism_matches<M, L: Label>(\n\n connection: &MorphismConnection<L>,\n\n tags: &[MorphismTag<L, L>],\n\n morphism: &Morphism<M>,\n\n bindings: &Bindings<L>,\n\n) -> Option<Bindings<L>> {\n\n // Check connection\n\n let connection_check = match (connection, &morphism.connection) {\n\n (\n\n MorphismConnection::Regular {\n\n from: constraint_from,\n\n to: constraint_to,\n\n },\n\n &MorphismConnection::Regular { from, to },\n\n ) => constraint_ordered(\n\n vec![constraint_from, constraint_to]\n\n .into_iter()\n\n .map(|label| (label.clone(), bindings.get_object(label))),\n\n vec![from, to],\n\n )\n", "file_path": "crates/category/src/rule/constraint/morphism.rs", "rank": 58, "score": 70488.87105024027 }, { "content": "fn point(label: impl Into<Label>) -> Point {\n\n Point {\n\n label: label.into(),\n\n is_anchor: false,\n\n position: util::random_shift(),\n\n velocity: Vec2::ZERO,\n\n radius: POINT_RADIUS,\n\n color: Color::WHITE,\n\n }\n\n}\n\n\n", "file_path": "src/game_state/init/category.rs", "rank": 59, "score": 70344.36370253244 }, { "content": "fn isomorphism(label: impl Into<Label>) -> Arrow {\n\n Arrow::new(\n\n Some(label.into()),\n\n ARROW_ISOMORPHISM_COLOR,\n\n Vec2::ZERO,\n\n Vec2::ZERO,\n\n )\n\n}\n\n\n", "file_path": "src/game_state/init/category.rs", "rank": 60, "score": 70344.36370253244 }, { "content": "type Bindings = category::Bindings<Label>;\n", "file_path": "src/game_state/mod.rs", "rank": 61, "score": 69529.05927184474 }, { "content": "pub trait Label: std::fmt::Debug + std::hash::Hash + Eq + Clone + Ord {}\n\n\n\nimpl<T: std::fmt::Debug + std::hash::Hash + Eq + Clone + Ord> Label for T {}\n", "file_path": "crates/category/src/label.rs", "rank": 62, "score": 68311.61540062772 }, { "content": " binds.bind_morphism(label, id);\n\n binds\n\n }\n\n\n\n pub fn extend(&mut self, bindings: Self) {\n\n self.objects.extend(bindings.objects.into_iter());\n\n self.morphisms.extend(bindings.morphisms.into_iter());\n\n }\n\n\n\n pub fn bind_object(&mut self, label: L, id: ObjectId) -> Option<ObjectId> {\n\n self.objects.insert(label, id)\n\n }\n\n\n\n pub fn bind_morphism(&mut self, label: L, id: MorphismId) -> Option<MorphismId> {\n\n self.morphisms.insert(label, id)\n\n }\n\n\n\n pub fn get_object(&self, label: &L) -> Option<ObjectId> {\n\n self.objects.get(label).copied()\n\n }\n\n\n\n pub fn get_morphism(&self, label: &L) -> Option<MorphismId> {\n\n self.morphisms.get(label).copied()\n\n }\n\n}\n", "file_path": "crates/category/src/bindings.rs", "rank": 63, "score": 67776.17256796597 }, { "content": "use std::collections::HashMap;\n\n\n\nuse super::*;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Bindings<L: Label> {\n\n pub objects: HashMap<L, ObjectId>,\n\n pub morphisms: HashMap<L, MorphismId>,\n\n}\n\n\n\nimpl<L: Label> Bindings<L> {\n\n pub fn new() -> Self {\n\n Self {\n\n objects: Default::default(),\n\n morphisms: Default::default(),\n\n }\n\n }\n\n\n\n pub fn from_objects(iter: impl IntoIterator<Item = (L, ObjectId)>) -> Self {\n\n Self {\n", "file_path": "crates/category/src/bindings.rs", "rank": 64, "score": 67775.94722987693 }, { "content": " objects: iter.into_iter().collect(),\n\n morphisms: Default::default(),\n\n }\n\n }\n\n\n\n pub fn from_morphisms(iter: impl IntoIterator<Item = (L, MorphismId)>) -> Self {\n\n Self {\n\n objects: Default::default(),\n\n morphisms: iter.into_iter().collect(),\n\n }\n\n }\n\n\n\n pub fn single_object(label: L, id: ObjectId) -> Self {\n\n let mut binds = Self::new();\n\n binds.bind_object(label, id);\n\n binds\n\n }\n\n\n\n pub fn single_morphism(label: L, id: MorphismId) -> Self {\n\n let mut binds = Self::new();\n", "file_path": "crates/category/src/bindings.rs", "rank": 65, "score": 67775.13867547999 }, { "content": "impl<O, M> MorphismTag<O, M> {\n\n pub fn map<V, E, Fv: FnMut(O) -> V, Fe: FnMut(M) -> E>(\n\n self,\n\n mut fv: Fv,\n\n mut fe: Fe,\n\n ) -> MorphismTag<V, E> {\n\n match self {\n\n Self::Unique => MorphismTag::Unique,\n\n Self::ProductP1 => MorphismTag::ProductP1,\n\n Self::ProductP2 => MorphismTag::ProductP2,\n\n Self::Identity(v) => MorphismTag::Identity(fv(v)),\n\n Self::Composition { first, second } => MorphismTag::Composition {\n\n first: fe(first),\n\n second: fe(second),\n\n },\n\n Self::Isomorphism(f, g) => MorphismTag::Isomorphism(fe(f), fe(g)),\n\n }\n\n }\n\n\n\n pub fn map_borrowed<V, E, Fv: FnMut(&O) -> V, Fe: FnMut(&M) -> E>(\n", "file_path": "crates/category/src/tag.rs", "rank": 66, "score": 67726.67568587614 }, { "content": "use super::*;\n\n\n\nimpl<O> ObjectTag<O> {\n\n pub fn map<V, Fv: FnMut(O) -> V>(self, mut fv: Fv) -> ObjectTag<V> {\n\n match self {\n\n Self::Initial => ObjectTag::Initial,\n\n Self::Terminal => ObjectTag::Terminal,\n\n Self::Product(a, b) => ObjectTag::Product(fv(a), fv(b)),\n\n }\n\n }\n\n\n\n pub fn map_borrowed<V, Fv: FnMut(&O) -> V>(&self, mut fv: Fv) -> ObjectTag<V> {\n\n match self {\n\n Self::Initial => ObjectTag::Initial,\n\n Self::Terminal => ObjectTag::Terminal,\n\n Self::Product(a, b) => ObjectTag::Product(fv(a), fv(b)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/category/src/tag.rs", "rank": 67, "score": 67725.39467118742 }, { "content": " &self,\n\n mut fv: Fv,\n\n mut fe: Fe,\n\n ) -> MorphismTag<V, E> {\n\n match self {\n\n Self::Unique => MorphismTag::Unique,\n\n Self::ProductP1 => MorphismTag::ProductP1,\n\n Self::ProductP2 => MorphismTag::ProductP2,\n\n Self::Identity(v) => MorphismTag::Identity(fv(v)),\n\n Self::Composition { first, second } => MorphismTag::Composition {\n\n first: fe(first),\n\n second: fe(second),\n\n },\n\n Self::Isomorphism(f, g) => MorphismTag::Isomorphism(fe(f), fe(g)),\n\n }\n\n }\n\n}\n", "file_path": "crates/category/src/tag.rs", "rank": 68, "score": 67721.53664913004 }, { "content": "use super::*;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Action<O, M, E> {\n\n NewObjects(Vec<(Option<ObjectId>, Object<O>)>),\n\n ExtendObjectTags(Vec<(ObjectId, Vec<ObjectTag>)>),\n\n RemoveObjectTags(Vec<(ObjectId, Vec<ObjectTag>)>),\n\n RemoveObjects(Vec<ObjectId>),\n\n NewMorphisms(Vec<(Option<MorphismId>, Morphism<M>)>),\n\n ExtendMorphismTags(Vec<(MorphismId, Vec<MorphismTag>)>),\n\n RemoveMorphismTags(Vec<(MorphismId, Vec<MorphismTag>)>),\n\n RemoveMorphisms(Vec<MorphismId>),\n\n NewEqualities(Vec<(Equality, E)>),\n\n RemoveEqualities(Vec<Equality>),\n\n}\n\n\n\nimpl<O, M, E> Category<O, M, E> {\n\n /// Perform the action and returns the inverse action that can be used to undo the action.\n\n pub fn action_do(&mut self, action_do: Action<O, M, E>) -> Vec<Action<O, M, E>> {\n\n match action_do {\n", "file_path": "crates/category/src/action.rs", "rank": 69, "score": 67638.20530322511 }, { "content": " vec![]\n\n } else {\n\n vec![Action::ExtendObjectTags(extensions)]\n\n }\n\n }\n\n Action::RemoveMorphismTags(mut extensions) => {\n\n extensions.retain(|(id, _)| self.morphisms.contains(id));\n\n for (morphism_id, remove_tags) in &extensions {\n\n let morphism = self.morphisms.get_mut(morphism_id).unwrap(); // Check was done when retaining\n\n morphism.tags.retain(|tag| !remove_tags.contains(tag));\n\n }\n\n\n\n if extensions.is_empty() {\n\n vec![]\n\n } else {\n\n vec![Action::ExtendMorphismTags(extensions)]\n\n }\n\n }\n\n Action::RemoveObjects(objects) => {\n\n let (objects, morphisms) = objects\n", "file_path": "crates/category/src/action.rs", "rank": 70, "score": 67628.29374809742 }, { "content": " (id, tags)\n\n })\n\n .filter(|(_, tags)| !tags.is_empty())\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n if extensions.is_empty() {\n\n vec![]\n\n } else {\n\n vec![Action::RemoveMorphismTags(extensions)]\n\n }\n\n }\n\n Action::RemoveObjectTags(mut extensions) => {\n\n extensions.retain(|(id, _)| self.objects.contains(id));\n\n for (object_id, remove_tags) in &extensions {\n\n let object = self.objects.get_mut(object_id).unwrap(); // Check was done when retaining\n\n object.tags.retain(|tag| !remove_tags.contains(tag));\n\n }\n\n\n\n if extensions.is_empty() {\n", "file_path": "crates/category/src/action.rs", "rank": 71, "score": 67627.42334987137 }, { "content": " let mut undo = vec![Action::NewObjects(objects)];\n\n if !morphisms.is_empty() {\n\n undo.push(Action::NewMorphisms(morphisms));\n\n };\n\n undo\n\n }\n\n Action::RemoveMorphisms(morphisms) => {\n\n let equalities: Vec<_> = morphisms\n\n .iter()\n\n .flat_map(|&morphism| {\n\n let equals: Vec<_> = self\n\n .equalities\n\n .get_equalities_with(morphism)\n\n .cloned()\n\n .collect();\n\n let equals: Vec<_> = equals\n\n .into_iter()\n\n .map(|equality| {\n\n let inner = self.equalities.remove_equality(&equality).unwrap();\n\n (equality, inner)\n", "file_path": "crates/category/src/action.rs", "rank": 72, "score": 67626.38419233118 }, { "content": " if extensions.is_empty() {\n\n vec![]\n\n } else {\n\n vec![Action::RemoveObjectTags(extensions)]\n\n }\n\n }\n\n Action::ExtendMorphismTags(extensions) => {\n\n // Avoid duplicating tags\n\n let extensions = extensions\n\n .into_iter()\n\n .filter_map(|(id, tags)| {\n\n self.morphisms\n\n .get_mut(&id)\n\n .map(|morphism| (morphism, tags))\n\n .map(|(morphism, tags)| {\n\n let tags = tags\n\n .into_iter()\n\n .filter(|tag| !morphism.tags.contains(tag))\n\n .collect::<Vec<_>>();\n\n morphism.tags.extend(tags.clone());\n", "file_path": "crates/category/src/action.rs", "rank": 73, "score": 67623.02599163755 }, { "content": " Action::ExtendObjectTags(extensions) => {\n\n // Avoid duplicating tags\n\n let extensions = extensions\n\n .into_iter()\n\n .filter_map(|(id, tags)| {\n\n self.objects\n\n .get_mut(&id)\n\n .map(|object| (object, tags))\n\n .map(|(object, tags)| {\n\n let tags = tags\n\n .into_iter()\n\n .filter(|tag| !object.tags.contains(tag))\n\n .collect::<Vec<_>>();\n\n object.tags.extend(tags.clone());\n\n (id, tags)\n\n })\n\n .filter(|(_, tags)| !tags.is_empty())\n\n })\n\n .collect::<Vec<_>>();\n\n\n", "file_path": "crates/category/src/action.rs", "rank": 74, "score": 67622.9322335249 }, { "content": " })\n\n .collect();\n\n equals\n\n })\n\n .collect();\n\n let morphisms: Vec<_> = morphisms\n\n .into_iter()\n\n .filter_map(|id| {\n\n self.remove_morphism(id)\n\n .map(|morphism| (Some(id), morphism))\n\n })\n\n .collect();\n\n\n\n let mut undo = vec![Action::NewMorphisms(morphisms)];\n\n if !equalities.is_empty() {\n\n undo.push(Action::NewEqualities(equalities));\n\n }\n\n undo\n\n }\n\n Action::NewEqualities(equalities) => {\n", "file_path": "crates/category/src/action.rs", "rank": 75, "score": 67622.06879936 }, { "content": " let equalitites = equalities\n\n .into_iter()\n\n .map(|(equality, inner)| {\n\n let (left, right) = equality.destructure();\n\n let left = left\n\n .into_iter()\n\n .flat_map(|id| util::decompose_morphism(id, self))\n\n .collect();\n\n let right = right\n\n .into_iter()\n\n .flat_map(|id| util::decompose_morphism(id, self))\n\n .collect();\n\n let equality =\n\n Equality::new(left, right).expect(\"Failed to flatten equality\");\n\n self.equalities.new_equality(equality.clone(), inner);\n\n equality\n\n })\n\n .collect();\n\n vec![Action::RemoveEqualities(equalitites)]\n\n }\n", "file_path": "crates/category/src/action.rs", "rank": 76, "score": 67619.9439994272 }, { "content": " Action::RemoveEqualities(equalities) => {\n\n let equalities = equalities\n\n .into_iter()\n\n .filter_map(|equality| {\n\n self.equalities\n\n .remove_equality(&equality)\n\n .map(|inner| (equality, inner))\n\n })\n\n .collect();\n\n vec![Action::NewEqualities(equalities)]\n\n }\n\n }\n\n }\n\n}\n", "file_path": "crates/category/src/action.rs", "rank": 77, "score": 67619.53112014166 }, { "content": " Action::NewObjects(objects) => {\n\n let objects = objects\n\n .into_iter()\n\n .map(|(id, object)| match id {\n\n Some(id) => {\n\n let replaced = self\n\n .insert_object(object, id)\n\n .expect(\"Object ids are expected to be valid\");\n\n if replaced.is_some() {\n\n panic!(\"Cannot replace an existing object with another\");\n\n }\n\n id\n\n }\n\n None => self.new_object(object),\n\n })\n\n .collect();\n\n vec![Action::RemoveObjects(objects)]\n\n }\n\n Action::NewMorphisms(morphisms) => {\n\n let morphisms = morphisms\n", "file_path": "crates/category/src/action.rs", "rank": 78, "score": 67619.23647477358 }, { "content": " .into_iter()\n\n .filter_map(|id| {\n\n self.remove_object(id)\n\n .map(|(object, morphisms)| (id, object, morphisms))\n\n })\n\n .map(|(object_id, object, morphisms)| {\n\n let morphisms: Vec<_> = morphisms\n\n .into_iter()\n\n .map(|(id, morphism)| (Some(id), morphism))\n\n .collect();\n\n ((object_id, object), morphisms)\n\n })\n\n .fold(\n\n (Vec::new(), Vec::new()),\n\n |(mut acc_objects, mut acc_morphisms), ((object_id, object), morphisms)| {\n\n acc_objects.push((Some(object_id), object));\n\n acc_morphisms.extend(morphisms);\n\n (acc_objects, acc_morphisms)\n\n },\n\n );\n", "file_path": "crates/category/src/action.rs", "rank": 79, "score": 67619.0912759589 }, { "content": " .into_iter()\n\n .map(|(id, morphism)| {\n\n match id {\n\n Some(id) => {\n\n let replaced = self\n\n .insert_morphism(morphism, id)\n\n .expect(\"Morphism ids are expected to be valid\");\n\n if replaced.is_some() {\n\n panic!(\"Cannot replace an existing morphism with another\");\n\n }\n\n id\n\n }\n\n None => self\n\n .new_morphism(morphism)\n\n .expect(\"Objects are expected to exist\"), // TODO: do proper handling\n\n }\n\n })\n\n .collect();\n\n vec![Action::RemoveMorphisms(morphisms)]\n\n }\n", "file_path": "crates/category/src/action.rs", "rank": 80, "score": 67615.68403618077 }, { "content": "use std::collections::HashMap;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Object<T> {\n\n pub tags: Vec<ObjectTag>,\n\n pub inner: T,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum ObjectTag<O = ObjectId> {\n\n Initial,\n\n Terminal,\n\n Product(O, O),\n\n}\n\n\n\npub struct Objects<O> {\n\n objects: HashMap<ObjectId, Object<O>>,\n\n next_id: ObjectId,\n\n}\n\n\n", "file_path": "crates/category/src/object.rs", "rank": 81, "score": 67608.52045733461 }, { "content": " assert!(\n\n self.objects.insert(id, object).is_none(),\n\n \"Failed to generate new object\"\n\n );\n\n id\n\n }\n\n\n\n pub(crate) fn insert(\n\n &mut self,\n\n object: Object<T>,\n\n object_id: ObjectId,\n\n ) -> Result<Option<Object<T>>, ()> {\n\n if object_id.0 >= self.next_id.0 {\n\n return Err(());\n\n }\n\n\n\n Ok(self.objects.insert(object_id, object))\n\n }\n\n\n\n pub fn len(&self) -> usize {\n", "file_path": "crates/category/src/object.rs", "rank": 82, "score": 67600.5610475343 }, { "content": " self.objects.len()\n\n }\n\n\n\n pub fn iter(&self) -> impl Iterator<Item = (&ObjectId, &Object<T>)> {\n\n self.objects.iter()\n\n }\n\n\n\n pub fn iter_mut(&mut self) -> impl Iterator<Item = (&ObjectId, &mut Object<T>)> {\n\n self.objects.iter_mut()\n\n }\n\n\n\n pub fn get(&self, id: &ObjectId) -> Option<&Object<T>> {\n\n self.objects.get(id)\n\n }\n\n\n\n pub fn get_mut(&mut self, id: &ObjectId) -> Option<&mut Object<T>> {\n\n self.objects.get_mut(id)\n\n }\n\n\n\n pub(crate) fn remove(&mut self, id: &ObjectId) -> Option<Object<T>> {\n\n self.objects.remove(id)\n\n }\n\n\n\n pub fn contains(&self, id: &ObjectId) -> bool {\n\n self.objects.contains_key(id)\n\n }\n\n}\n", "file_path": "crates/category/src/object.rs", "rank": 83, "score": 67600.02666913935 }, { "content": "#[derive(Hash, PartialOrd, Ord, PartialEq, Eq, Debug, Clone, Copy)]\n\npub struct ObjectId(u64);\n\n\n\nimpl ObjectId {\n\n pub fn raw(&self) -> u64 {\n\n self.0\n\n }\n\n}\n\n\n\nimpl<T> Objects<T> {\n\n pub fn new() -> Self {\n\n Self {\n\n objects: HashMap::new(),\n\n next_id: ObjectId(0),\n\n }\n\n }\n\n\n\n pub(crate) fn new_object(&mut self, object: Object<T>) -> ObjectId {\n\n let id = self.next_id;\n\n self.next_id.0 += 1;\n", "file_path": "crates/category/src/object.rs", "rank": 84, "score": 67599.8800200519 }, { "content": "type Category = category::types::Category<Point, Arrow, Equality>;\n", "file_path": "src/game_state/mod.rs", "rank": 85, "score": 67463.49840841895 }, { "content": "use std::collections::HashMap;\n\n\n\nuse super::*;\n\n\n\npub struct Equalities<T> {\n\n inner: HashMap<Equality, T>,\n\n}\n\n\n\n#[derive(Debug, Clone, Hash, PartialEq, Eq)]\n\npub struct Equality<M = MorphismId> {\n\n left: Vec<M>,\n\n right: Vec<M>,\n\n}\n\n\n\nimpl<M> Equality<M> {\n\n /// Constructs a new equality and check its validity.\n\n /// May change the order (i.e. right may become left)\n\n /// to preserve equality uniqueness\n\n pub fn new(mut left: Vec<M>, mut right: Vec<M>) -> Result<Self, ()>\n\n where\n", "file_path": "crates/category/src/equalities.rs", "rank": 86, "score": 67412.24353162557 }, { "content": " }\n\n\n\n pub fn iter_inners<'a>(&'a self) -> impl Iterator<Item = &'a T> + 'a {\n\n self.inner.values()\n\n }\n\n\n\n pub fn get_equalities<'a>(\n\n &'a self,\n\n morphism: MorphismId,\n\n ) -> impl Iterator<Item = &'a Vec<MorphismId>> + 'a {\n\n self.inner.keys().filter_map(move |equality| {\n\n if equality.left.len() == 1 && equality.left[0] == morphism {\n\n Some(&equality.right)\n\n } else if equality.right.len() == 1 && equality.right[0] == morphism {\n\n Some(&equality.left)\n\n } else {\n\n None\n\n }\n\n })\n\n }\n", "file_path": "crates/category/src/equalities.rs", "rank": 87, "score": 67404.71440306486 }, { "content": " }\n\n\n\n pub fn right(&self) -> &Vec<M> {\n\n &self.right\n\n }\n\n}\n\n\n\nimpl<T> Equalities<T> {\n\n pub fn new() -> Self {\n\n Self {\n\n inner: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.inner.len()\n\n }\n\n\n\n pub fn new_equality(&mut self, equality: Equality, inner: T) {\n\n self.inner.insert(equality, inner);\n", "file_path": "crates/category/src/equalities.rs", "rank": 88, "score": 67404.69533696254 }, { "content": " }\n\n\n\n pub fn contains_equality(&self, equality: &Equality) -> bool {\n\n self.inner.contains_key(equality)\n\n }\n\n\n\n pub fn remove_equality(&mut self, equality: &Equality) -> Option<T> {\n\n self.inner.remove(equality)\n\n }\n\n\n\n pub fn drain<'a>(&'a mut self) -> impl Iterator<Item = (Equality, T)> + 'a {\n\n self.inner.drain()\n\n }\n\n\n\n pub fn iter<'a>(&'a self) -> impl Iterator<Item = (&'a Equality, &'a T)> + 'a {\n\n self.inner.iter()\n\n }\n\n\n\n pub fn iter_equalities<'a>(&'a self) -> impl Iterator<Item = &'a Equality> + 'a {\n\n self.inner.keys()\n", "file_path": "crates/category/src/equalities.rs", "rank": 89, "score": 67402.8079955727 }, { "content": "\n\n pub fn get_equalities_with<'a>(\n\n &'a self,\n\n morphism: MorphismId,\n\n ) -> impl Iterator<Item = &'a Equality> + 'a {\n\n self.inner.keys().filter(move |equality| {\n\n equality.left.contains(&morphism) || equality.right.contains(&morphism)\n\n })\n\n }\n\n}\n", "file_path": "crates/category/src/equalities.rs", "rank": 90, "score": 67401.27974915077 }, { "content": " M: Ord,\n\n {\n\n // TODO: check validity\n\n if left.len() == 0 || right.len() == 0 {\n\n return Err(());\n\n }\n\n\n\n if left.len() < right.len() || left.len() == right.len() && left > right {\n\n std::mem::swap(&mut left, &mut right);\n\n }\n\n\n\n Ok(Self { left, right })\n\n }\n\n\n\n pub fn destructure(self) -> (Vec<M>, Vec<M>) {\n\n (self.left, self.right)\n\n }\n\n\n\n pub fn left(&self) -> &Vec<M> {\n\n &self.left\n", "file_path": "crates/category/src/equalities.rs", "rank": 91, "score": 67397.59271922194 }, { "content": "\n\n // Get rules\n\n let rule_product = axioms::rule_product::<&str>().unwrap();\n\n let rule_composition = axioms::rule_composition::<&str>().unwrap();\n\n\n\n // Apply product rule\n\n category.apply_rule(&rule_product, bindings.clone(), |_| (), |_, _| (), |_| ());\n\n print_category(&category);\n\n assert_eq!(3, category.objects.len());\n\n assert_eq!(3, category.morphisms.len());\n\n assert_eq!(0, category.equalities.len());\n\n\n\n // Find morphisms f: Identity(AxB), g: AxB->A\n\n let constraints = ConstraintsBuilder::new()\n\n .object(\"AxB\", vec![ObjectTag::Product(\"A\", \"B\")])\n\n .morphism(\"m\", \"AxB\", \"AxB\", vec![])\n\n .build();\n\n let morphism_f = category\n\n .find_candidates(&constraints, &bindings)\n\n .unwrap()\n", "file_path": "crates/category/tests/product.rs", "rank": 92, "score": 67322.06496079036 }, { "content": " .next()\n\n .unwrap()\n\n .get_morphism(&\"m\")\n\n .unwrap();\n\n let constraints = ConstraintsBuilder::new()\n\n .object(\"AxB\", vec![ObjectTag::Product(\"A\", \"B\")])\n\n .morphism(\"m\", \"AxB\", \"A\", vec![])\n\n .build();\n\n let morphism_g = category\n\n .find_candidates(&constraints, &bindings)\n\n .unwrap()\n\n .next()\n\n .unwrap()\n\n .get_morphism(&\"m\")\n\n .unwrap();\n\n\n\n println!(\"f = {morphism_f:?}\");\n\n println!(\"g = {morphism_g:?}\");\n\n\n\n // Apply composition rule\n", "file_path": "crates/category/tests/product.rs", "rank": 93, "score": 67321.22182033498 }, { "content": "use category::{axioms, Bindings};\n\nuse category::{prelude::*, CategoryBuilder};\n\n\n\nuse std::fmt::Debug;\n\n\n\n#[test]\n", "file_path": "crates/category/tests/substitution.rs", "rank": 94, "score": 67318.72565133582 }, { "content": "use category::constraint::ConstraintsBuilder;\n\nuse category::prelude::*;\n\nuse category::{axioms, Bindings};\n\n\n\nuse std::fmt::Debug;\n\n\n\n#[test]\n", "file_path": "crates/category/tests/product.rs", "rank": 95, "score": 67318.36698055346 }, { "content": "use category::constraint::ConstraintsBuilder;\n\nuse category::prelude::*;\n\nuse category::{Bindings, CategoryBuilder};\n\n\n\n#[test]\n", "file_path": "crates/category/tests/find.rs", "rank": 96, "score": 67315.46834022134 }, { "content": " .equality(vec![\"m\", \"p1\"], vec![\"f\"])\n\n .equality(vec![\"m\", \"p2\"], vec![\"g\"])\n\n .build();\n\n let candidates = category\n\n .find_candidates(&constraints, &Bindings::new())\n\n .unwrap()\n\n .collect::<Vec<_>>();\n\n\n\n println!(\"Candidates for:\");\n\n println!(\" {constraints:?}\");\n\n println!(\"are:\");\n\n for (i, candidate) in candidates.iter().enumerate() {\n\n println!(\"{i:4}) {candidate:?}\");\n\n }\n\n\n\n assert_eq!(candidates.len(), 1);\n\n}\n", "file_path": "crates/category/tests/find.rs", "rank": 97, "score": 67315.40885217275 }, { "content": " category.apply_rule(\n\n &rule_composition,\n\n Bindings::from_morphisms(vec![(\"f\", morphism_f), (\"g\", morphism_g)]),\n\n |_| (),\n\n |_, _| (),\n\n |_| (),\n\n );\n\n print_category(&category);\n\n assert_eq!(3, category.objects.len());\n\n assert_eq!(4, category.morphisms.len());\n\n assert_eq!(0, category.equalities.len());\n\n\n\n // Apply product rule\n\n category.apply_rule(&rule_product, bindings, |_| (), |_, _| (), |_| ());\n\n print_category(&category);\n\n assert_eq!(3, category.objects.len());\n\n assert_eq!(4, category.morphisms.len());\n\n assert_eq!(0, category.equalities.len());\n\n}\n\n\n", "file_path": "crates/category/tests/product.rs", "rank": 98, "score": 67312.6149265056 }, { "content": " print_category(&category);\n\n\n\n // Make sure the build is correct\n\n assert_eq!(2, category.objects.len());\n\n assert_eq!(4, category.morphisms.len());\n\n assert_eq!(0, category.equalities.len());\n\n\n\n // Get rules\n\n let rule_unique = axioms::rule_unique::<&str>().unwrap();\n\n\n\n // Apply unique rule\n\n category.apply_rule(&rule_unique, Bindings::new(), |_| (), |_, _| (), |_| ());\n\n print_category(&category);\n\n assert_eq!(2, category.objects.len());\n\n assert_eq!(4, category.morphisms.len());\n\n assert_eq!(2, category.equalities.len());\n\n}\n\n\n", "file_path": "crates/category/tests/substitution.rs", "rank": 99, "score": 67311.45154474307 } ]
Rust
db/src/impls/rangestore/kvdb.rs
shogochiai/plasma-rust-framework
e72cb12d80d3e3ab080746f9fb0576a242e9d194
use crate::error::{Error, ErrorKind}; use crate::range::Range; use crate::traits::db::DatabaseTrait; use crate::traits::rangestore::RangeStore; use kvdb::{DBTransaction, KeyValueDB}; use kvdb_memorydb::{create, InMemory}; pub struct RangeDb { db: InMemory, col: u32, } impl DatabaseTrait for RangeDb { fn open(_dbname: &str) -> Self { RangeDb { db: create(8), col: 0, } } fn close(&self) {} } impl RangeDb { fn validate_range(start: u64, end: u64) -> bool { start < end } pub fn del_batch(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> { let ranges = self.get(start, end)?; let mut tr = DBTransaction::new(); for range in ranges.clone().iter() { let query = range.get_end().to_be_bytes(); tr.delete(Some(self.col), &query); } self.db.write(tr)?; if self.db.flush().is_ok() { Ok(ranges) } else { Err(Error::from(ErrorKind::Dammy)) } } pub fn put_batch(&self, ranges: &[Range]) -> Result<(), Error> { let mut tr = DBTransaction::new(); for range in ranges.iter() { let query = range.get_end().to_be_bytes(); tr.put(Some(self.col), &query, &rlp::encode(range)); } self.db.write(tr)?; if self.db.flush().is_ok() { Ok(()) } else { Err(Error::from(ErrorKind::Dammy)) } } } impl RangeStore for RangeDb { fn get(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> { let query = start.to_be_bytes(); let iter = self.db.iter_from_prefix(Some(self.col), &query); let mut result = vec![]; for (_key, value) in iter { let range: Range = rlp::decode(&value).unwrap(); if start < range.get_end() { result.push(range.clone()); if !range.intersect(start, end) { break; } } } Ok(result.into_boxed_slice()) } fn del(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> { self.del_batch(start, end) } fn put(&self, start: u64, end: u64, value: &[u8]) -> Result<(), Error> { let input_ranges = self.del_batch(start, end)?; let mut output_ranges = vec![]; if !Self::validate_range(start, end) { return Err(Error::from(ErrorKind::Dammy)); } if !input_ranges.is_empty() && input_ranges[0].get_start() < start { output_ranges.push(Range::new( input_ranges[0].get_start(), start, &input_ranges[0].get_value(), )); } if !input_ranges.is_empty() { let last_range = &input_ranges[input_ranges.len() - 1]; if end < last_range.get_end() { output_ranges.push(Range::new( end, last_range.get_end(), &last_range.get_value(), )); } } output_ranges.push(Range::new(start, end, value)); if self.put_batch(&output_ranges).is_ok() { Ok(()) } else { Err(Error::from(ErrorKind::Dammy)) } } } #[cfg(test)] mod tests { use super::RangeDb; use crate::traits::db::DatabaseTrait; use crate::traits::rangestore::RangeStore; #[test] fn test_put() { let db = RangeDb::open("test"); assert_eq!(db.put(0, 100, b"Alice is owner").is_ok(), true); assert_eq!(db.put(100, 200, b"Bob is owner").is_ok(), true); let result1 = db.get(100, 200).unwrap(); assert_eq!(result1.is_empty(), false); } #[test] fn test_get() { let db = RangeDb::open("test"); assert_eq!(db.put(0, 100, b"Alice is owner").is_ok(), true); assert_eq!(db.put(100, 120, b"Bob is owner").is_ok(), true); assert_eq!(db.put(120, 180, b"Carol is owner").is_ok(), true); let result1 = db.get(20, 50).unwrap(); assert_eq!(result1.is_empty(), true); } }
use crate::error::{Error, ErrorKind}; use crate::range::Range; use crate::traits::db::DatabaseTrait; use crate::traits::rangestore::RangeStore; use kvdb::{DBTransaction, KeyValueDB}; use kvdb_memorydb::{create, InMemory}; pub struct RangeDb { db: InMemory, col: u32, } impl DatabaseTrait for RangeDb { fn open(_dbname: &str) -> Self { RangeDb { db: create(8), col: 0, } } fn close(&self) {} } impl RangeDb { fn validate_range(start: u64, end: u64) -> bool { start < end } pub fn del_batch(&self, start: u64, end:
Dammy)) } } pub fn put_batch(&self, ranges: &[Range]) -> Result<(), Error> { let mut tr = DBTransaction::new(); for range in ranges.iter() { let query = range.get_end().to_be_bytes(); tr.put(Some(self.col), &query, &rlp::encode(range)); } self.db.write(tr)?; if self.db.flush().is_ok() { Ok(()) } else { Err(Error::from(ErrorKind::Dammy)) } } } impl RangeStore for RangeDb { fn get(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> { let query = start.to_be_bytes(); let iter = self.db.iter_from_prefix(Some(self.col), &query); let mut result = vec![]; for (_key, value) in iter { let range: Range = rlp::decode(&value).unwrap(); if start < range.get_end() { result.push(range.clone()); if !range.intersect(start, end) { break; } } } Ok(result.into_boxed_slice()) } fn del(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> { self.del_batch(start, end) } fn put(&self, start: u64, end: u64, value: &[u8]) -> Result<(), Error> { let input_ranges = self.del_batch(start, end)?; let mut output_ranges = vec![]; if !Self::validate_range(start, end) { return Err(Error::from(ErrorKind::Dammy)); } if !input_ranges.is_empty() && input_ranges[0].get_start() < start { output_ranges.push(Range::new( input_ranges[0].get_start(), start, &input_ranges[0].get_value(), )); } if !input_ranges.is_empty() { let last_range = &input_ranges[input_ranges.len() - 1]; if end < last_range.get_end() { output_ranges.push(Range::new( end, last_range.get_end(), &last_range.get_value(), )); } } output_ranges.push(Range::new(start, end, value)); if self.put_batch(&output_ranges).is_ok() { Ok(()) } else { Err(Error::from(ErrorKind::Dammy)) } } } #[cfg(test)] mod tests { use super::RangeDb; use crate::traits::db::DatabaseTrait; use crate::traits::rangestore::RangeStore; #[test] fn test_put() { let db = RangeDb::open("test"); assert_eq!(db.put(0, 100, b"Alice is owner").is_ok(), true); assert_eq!(db.put(100, 200, b"Bob is owner").is_ok(), true); let result1 = db.get(100, 200).unwrap(); assert_eq!(result1.is_empty(), false); } #[test] fn test_get() { let db = RangeDb::open("test"); assert_eq!(db.put(0, 100, b"Alice is owner").is_ok(), true); assert_eq!(db.put(100, 120, b"Bob is owner").is_ok(), true); assert_eq!(db.put(120, 180, b"Carol is owner").is_ok(), true); let result1 = db.get(20, 50).unwrap(); assert_eq!(result1.is_empty(), true); } }
u64) -> Result<Box<[Range]>, Error> { let ranges = self.get(start, end)?; let mut tr = DBTransaction::new(); for range in ranges.clone().iter() { let query = range.get_end().to_be_bytes(); tr.delete(Some(self.col), &query); } self.db.write(tr)?; if self.db.flush().is_ok() { Ok(ranges) } else { Err(Error::from(ErrorKind::
function_block-random_span
[ { "content": "fn create_object_id(start: u64, end: u64) -> Vec<u8> {\n\n let mut object_id_buf = BytesMut::with_capacity(64);\n\n object_id_buf.put_u64_le(start);\n\n object_id_buf.put_u64_le(end);\n\n object_id_buf.to_vec()\n\n}\n\n*/\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct StateObject {\n\n predicate: Address,\n\n data: Vec<u8>,\n\n}\n\n\n\nimpl StateObject {\n\n pub fn new(predicate: Address, data: &[u8]) -> StateObject {\n\n StateObject {\n\n predicate,\n\n data: data.to_vec(),\n\n }\n", "file_path": "core/src/data_structure/state_object.rs", "rank": 0, "score": 136346.99409417436 }, { "content": "/// Caluculate hash of a node\n\nfn compute_node(end: u64, data: &Bytes) -> Bytes {\n\n let mut end_writer = vec![];\n\n end_writer.write_u64::<LittleEndian>(end).unwrap();\n\n let mut buf = Bytes::new();\n\n buf.extend_from_slice(&end_writer);\n\n buf.extend_from_slice(&data);\n\n hash_leaf(&buf)\n\n}\n\n\n\nimpl Hashable for SumMerkleNode {\n\n fn hash(&self) -> Bytes {\n\n match self {\n\n SumMerkleNode::Leaf { data, .. } => hash_leaf(data),\n\n // H(H(left.end + left.data) + H(right.end + right.data))\n\n SumMerkleNode::Node { left, right, .. } => {\n\n let mut buf = compute_node(left.get_end(), &left.hash());\n\n buf.extend_from_slice(&compute_node(right.get_end(), &right.hash()));\n\n hash_leaf(&buf)\n\n }\n\n SumMerkleNode::ProofNode { data, .. } => data.clone(),\n", "file_path": "sum_merkle_tree/src/lib.rs", "rank": 1, "score": 99117.54982570405 }, { "content": "pub trait DatabaseTrait {\n\n fn open(dbname: &str) -> Self;\n\n fn close(&self);\n\n}\n", "file_path": "db/src/traits/db.rs", "rank": 2, "score": 85094.8213953248 }, { "content": "/// entry point of plasma chain.\n\npub fn entry() {\n\n env::set_var(\"RUST_LOG\", \"info\");\n\n env_logger::init();\n\n println!(\"Hello, operator!!\");\n\n let server = get_server(&Default::default()).expect(\"Unable to start rpc server.\");\n\n server.wait();\n\n}\n", "file_path": "operator/src/lib.rs", "rank": 3, "score": 80218.97843869468 }, { "content": "pub trait RangeStore {\n\n /// get ranges between start and end\n\n fn get(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error>;\n\n /// delete ranges between start and end\n\n fn del(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error>;\n\n /// put a range in start and end\n\n fn put(&self, start: u64, end: u64, value: &[u8]) -> Result<(), Error>;\n\n}\n", "file_path": "db/src/traits/rangestore.rs", "rank": 4, "score": 62111.96897326291 }, { "content": "pub trait KvsIterator {\n\n fn next(&self) -> Result<KeyValue, Error>;\n\n}\n\n\n", "file_path": "db/src/traits/kvs.rs", "rank": 5, "score": 62111.96897326291 }, { "content": "pub fn get_server(options: &HttpOption) -> Result<Server, Error> {\n\n let mut io = IoHandler::new();\n\n\n\n let rpc: PlasmaRpcImpl = Default::default();\n\n io.extend_with(rpc.to_delegate());\n\n\n\n let parsed: Result<SocketAddr, Error> = options.url.parse().map_err(Into::into);\n\n let url = parsed?;\n\n ServerBuilder::new(io)\n\n .threads(options.threads)\n\n .start_http(&url)\n\n .map_err(Into::into)\n\n}\n", "file_path": "operator/src/server.rs", "rank": 6, "score": 61226.532008836424 }, { "content": "pub trait KeyValueStore {\n\n fn get(&self, key: &[u8]) -> Result<Option<Box<[u8]>>, Error>;\n\n fn put(&mut self, key: &[u8], value: &[u8]) -> Result<(), Error>;\n\n fn del(&self, key: &[u8]) -> Result<(), Error>;\n\n fn has(&self, key: &[u8]) -> Result<bool, Error>;\n\n fn batch(&self, operations: &[Batch]) -> Result<(), Error>;\n\n fn iterator(&self, prefix: &[u8]) -> Result<Box<KvsIterator>, Error>;\n\n}\n", "file_path": "db/src/traits/kvs.rs", "rank": 7, "score": 60397.36798973802 }, { "content": "pub mod kvs;\n\npub mod rangestore;\n", "file_path": "db/src/impls.rs", "rank": 8, "score": 58556.23296290089 }, { "content": "/// kvdb implementation for range store\n\npub mod kvdb;\n\n/// leveldb implementation for range store\n\n#[cfg(leveldb)]\n\npub mod leveldb;\n\n/// memory implementation for range store\n\npub mod memory;\n", "file_path": "db/src/impls/rangestore.rs", "rank": 9, "score": 56287.64975907299 }, { "content": "pub mod kvdb;\n", "file_path": "db/src/impls/kvs.rs", "rank": 10, "score": 56287.44384273695 }, { "content": "/// invalid parameters\n\npub fn invalid_params<T: std::fmt::Debug>(details: T) -> JsonRpcError {\n\n JsonRpcError::invalid_params(format!(\"{:?}\", details))\n\n}\n", "file_path": "operator/src/rpc/errors.rs", "rank": 11, "score": 54681.78872262408 }, { "content": "impl DatabaseTrait for RangeDbLevelImpl {\n\n fn open(_dbname: &str) -> Self {\n\n let tempdir = TempDir::new(\"demo\").unwrap();\n\n let path = tempdir.path();\n\n\n\n let mut options = Options::new();\n\n options.create_if_missing = true;\n\n Self {\n\n db: Database::open(path, options).unwrap(),\n\n }\n\n }\n\n fn close(&self) {}\n\n}\n\n\n\nimpl RangeDbLevelImpl {\n\n fn validate_range(start: u64, end: u64) -> bool {\n\n start < end\n\n }\n\n pub fn del_batch(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> {\n\n let ranges = self.get(start, end)?;\n", "file_path": "db/src/impls/rangestore/leveldb.rs", "rank": 12, "score": 54205.23527288946 }, { "content": "impl RangeDbMemoryImpl {\n\n fn validate_range(start: u64, end: u64) -> bool {\n\n start < end\n\n }\n\n pub fn del_batch(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> {\n\n let ranges = self.get(start, end)?;\n\n let mut db = self.ranges.write();\n\n for range in ranges.clone().iter() {\n\n db.remove(&range.get_end());\n\n }\n\n Ok(ranges.clone())\n\n }\n\n pub fn put_batch(&self, ranges: &[Range]) -> Result<(), Error> {\n\n let mut db = self.ranges.write();\n\n for range in ranges.iter() {\n\n db.insert(range.get_end(), range.clone());\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "db/src/impls/rangestore/memory.rs", "rank": 15, "score": 54203.775071866556 }, { "content": "use crate::error::{Error, ErrorKind};\n\nuse crate::range::Range;\n\nuse crate::traits::db::DatabaseTrait;\n\nuse crate::traits::rangestore::RangeStore;\n\nuse parking_lot::RwLock;\n\nuse std::collections::BTreeMap;\n\n\n\npub struct RangeDbMemoryImpl {\n\n ranges: RwLock<BTreeMap<u64, Range>>,\n\n}\n\n\n\nimpl DatabaseTrait for RangeDbMemoryImpl {\n\n fn open(_dbname: &str) -> Self {\n\n Self {\n\n ranges: RwLock::new(BTreeMap::new()),\n\n }\n\n }\n\n fn close(&self) {}\n\n}\n\n\n", "file_path": "db/src/impls/rangestore/memory.rs", "rank": 16, "score": 54201.398151931186 }, { "content": "use crate::error::{Error, ErrorKind};\n\nuse crate::traits::db::DatabaseTrait;\n\nuse crate::traits::kvs::{Batch, KeyValueStore, KvsIterator};\n\nuse kvdb::{DBTransaction, KeyValueDB};\n\nuse kvdb_memorydb::InMemory;\n\n\n\npub struct CoreDb {\n\n db: InMemory,\n\n}\n\n\n\nimpl DatabaseTrait for CoreDb {\n\n fn open(_dbname: &str) -> Self {\n\n CoreDb {\n\n db: Default::default(),\n\n }\n\n }\n\n fn close(&self) {}\n\n}\n\n\n\nimpl KeyValueStore for CoreDb {\n", "file_path": "db/src/impls/kvs/kvdb.rs", "rank": 18, "score": 54200.02677383202 }, { "content": " }\n\n}\n\n\n\nimpl RangeStore for RangeDbLevelImpl {\n\n fn get(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> {\n\n let iter = self.db.value_iter(ReadOptions::new());\n\n iter.seek(&(start as i32));\n\n let mut result = vec![];\n\n for value in iter {\n\n let range: Range = rlp::decode(&value).unwrap();\n\n if start < range.get_end() {\n\n if !range.intersect(start, end) {\n\n break;\n\n } else {\n\n result.push(range.clone());\n\n }\n\n }\n\n }\n\n Ok(result.into_boxed_slice())\n\n }\n", "file_path": "db/src/impls/rangestore/leveldb.rs", "rank": 19, "score": 54198.09078697479 }, { "content": "\n\nimpl RangeStore for RangeDbMemoryImpl {\n\n fn get(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> {\n\n let db = self.ranges.read();\n\n let mut result = vec![];\n\n println!(\"get() {:?}, {:?}\", start, end);\n\n for (key, range) in db.iter() {\n\n println!(\"get {:?}, {:?}\", key, range);\n\n if start < *key {\n\n if !range.intersect(start, end) {\n\n break;\n\n } else {\n\n result.push(range.clone());\n\n }\n\n }\n\n }\n\n Ok(result.into_boxed_slice())\n\n }\n\n fn del(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> {\n\n self.del_batch(start, end)\n", "file_path": "db/src/impls/rangestore/memory.rs", "rank": 20, "score": 54197.54222745163 }, { "content": " end,\n\n last_range.get_end(),\n\n &last_range.get_value(),\n\n ));\n\n }\n\n }\n\n output_ranges.push(Range::new(start, end, value));\n\n if self.put_batch(&output_ranges).is_ok() {\n\n Ok(())\n\n } else {\n\n Err(Error::from(ErrorKind::Dammy))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::RangeDbLevelImpl;\n\n use crate::traits::db::DatabaseTrait;\n\n use crate::traits::rangestore::RangeStore;\n", "file_path": "db/src/impls/rangestore/leveldb.rs", "rank": 21, "score": 54196.647011680216 }, { "content": "extern crate leveldb;\n\nextern crate tempdir;\n\n\n\nuse crate::error::{Error, ErrorKind};\n\nuse crate::range::Range;\n\nuse crate::traits::db::DatabaseTrait;\n\nuse crate::traits::rangestore::RangeStore;\n\n//use db_key::Key;\n\nuse leveldb::database::{batch::Writebatch, Database};\n\nuse leveldb::iterator::Iterable;\n\n//use leveldb::kv::KV;\n\nuse leveldb::batch::Batch;\n\nuse leveldb::iterator::LevelDBIterator;\n\nuse leveldb::options::{Options, ReadOptions, WriteOptions};\n\nuse tempdir::TempDir;\n\n\n\npub struct RangeDbLevelImpl {\n\n db: Database<i32>,\n\n}\n\n\n", "file_path": "db/src/impls/rangestore/leveldb.rs", "rank": 23, "score": 54195.14309931243 }, { "content": " }\n\n fn put(&self, start: u64, end: u64, value: &[u8]) -> Result<(), Error> {\n\n let input_ranges = self.del_batch(start, end)?;\n\n let mut output_ranges = vec![];\n\n if !Self::validate_range(start, end) {\n\n return Err(Error::from(ErrorKind::Dammy));\n\n }\n\n if !input_ranges.is_empty() && input_ranges[0].get_start() < start {\n\n output_ranges.push(Range::new(\n\n input_ranges[0].get_start(),\n\n start,\n\n &input_ranges[0].get_value(),\n\n ));\n\n }\n\n if !input_ranges.is_empty() {\n\n let last_range = &input_ranges[input_ranges.len() - 1];\n\n if end < last_range.get_end() {\n\n output_ranges.push(Range::new(\n\n end,\n\n last_range.get_end(),\n", "file_path": "db/src/impls/rangestore/memory.rs", "rank": 24, "score": 54195.075892852146 }, { "content": " fn del(&self, start: u64, end: u64) -> Result<Box<[Range]>, Error> {\n\n self.del_batch(start, end)\n\n }\n\n fn put(&self, start: u64, end: u64, value: &[u8]) -> Result<(), Error> {\n\n let input_ranges = self.del_batch(start, end)?;\n\n let mut output_ranges = vec![];\n\n if !Self::validate_range(start, end) {\n\n return Err(Error::from(ErrorKind::Dammy));\n\n }\n\n if !input_ranges.is_empty() && input_ranges[0].get_start() < start {\n\n output_ranges.push(Range::new(\n\n input_ranges[0].get_start(),\n\n start,\n\n &input_ranges[0].get_value(),\n\n ));\n\n }\n\n if !input_ranges.is_empty() {\n\n let last_range = &input_ranges[input_ranges.len() - 1];\n\n if end < last_range.get_end() {\n\n output_ranges.push(Range::new(\n", "file_path": "db/src/impls/rangestore/leveldb.rs", "rank": 25, "score": 54194.93736811816 }, { "content": " &last_range.get_value(),\n\n ));\n\n }\n\n }\n\n output_ranges.push(Range::new(start, end, value));\n\n if self.put_batch(&output_ranges).is_ok() {\n\n Ok(())\n\n } else {\n\n Err(Error::from(ErrorKind::Dammy))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::RangeDbMemoryImpl;\n\n use crate::traits::db::DatabaseTrait;\n\n use crate::traits::rangestore::RangeStore;\n\n\n\n #[test]\n", "file_path": "db/src/impls/rangestore/memory.rs", "rank": 26, "score": 54194.92507289376 }, { "content": " fn test_get_same_range() {\n\n let db = RangeDbMemoryImpl::open(\"test\");\n\n assert_eq!(db.put(0, 100, b\"Alice is owner\").is_ok(), true);\n\n assert_eq!(db.put(100, 200, b\"Bob is owner\").is_ok(), true);\n\n let result1 = db.get(100, 200).unwrap();\n\n assert_eq!(result1.is_empty(), false);\n\n assert_eq!(result1[0].get_start(), 100);\n\n assert_eq!(result1[0].get_value(), b\"Bob is owner\");\n\n }\n\n\n\n #[test]\n\n fn test_get_small_range() {\n\n let db = RangeDbMemoryImpl::open(\"test\");\n\n assert_eq!(db.put(0, 100, b\"Alice is owner\").is_ok(), true);\n\n assert_eq!(db.put(100, 120, b\"Bob is owner\").is_ok(), true);\n\n assert_eq!(db.put(120, 180, b\"Carol is owner\").is_ok(), true);\n\n let result1 = db.get(20, 50).unwrap();\n\n assert_eq!(result1.is_empty(), false);\n\n assert_eq!(result1[0].get_start(), 0);\n\n assert_eq!(result1[0].get_value(), b\"Alice is owner\");\n", "file_path": "db/src/impls/rangestore/memory.rs", "rank": 27, "score": 54192.72518529479 }, { "content": " assert_eq!(result1[0].get_start(), 0);\n\n assert_eq!(result1[0].get_value(), b\"Alice is owner\");\n\n assert_eq!(result1.len(), 1);\n\n }\n\n\n\n #[test]\n\n fn test_get_large_range() {\n\n let db = RangeDbLevelImpl::open(\"test\");\n\n assert_eq!(db.put(0, 100, b\"Alice is owner\").is_ok(), true);\n\n assert_eq!(db.put(100, 120, b\"Bob is owner\").is_ok(), true);\n\n assert_eq!(db.put(120, 180, b\"Carol is owner\").is_ok(), true);\n\n let result1 = db.get(20, 150).unwrap();\n\n assert_eq!(result1.is_empty(), false);\n\n assert_eq!(result1[0].get_start(), 0);\n\n assert_eq!(result1[0].get_value(), b\"Alice is owner\");\n\n assert_eq!(result1.len(), 3);\n\n }\n\n\n\n}\n", "file_path": "db/src/impls/rangestore/leveldb.rs", "rank": 28, "score": 54192.29923272037 }, { "content": " let mut batch = Writebatch::new();\n\n for range in ranges.clone().iter() {\n\n batch.delete(range.get_end() as i32)\n\n }\n\n if self.db.write(WriteOptions::new(), &batch).is_ok() {\n\n Ok(ranges)\n\n } else {\n\n Err(Error::from(ErrorKind::LevelDb))\n\n }\n\n }\n\n pub fn put_batch(&self, ranges: &[Range]) -> Result<(), Error> {\n\n let mut batch = Writebatch::new();\n\n for range in ranges.iter() {\n\n batch.put(range.get_end() as i32, &rlp::encode(range))\n\n }\n\n if self.db.write(WriteOptions::new(), &batch).is_ok() {\n\n Ok(())\n\n } else {\n\n Err(Error::from(ErrorKind::LevelDb))\n\n }\n", "file_path": "db/src/impls/rangestore/leveldb.rs", "rank": 29, "score": 54192.29364069763 }, { "content": "\n\n #[test]\n\n fn test_get_same_range() {\n\n let db = RangeDbLevelImpl::open(\"test\");\n\n assert_eq!(db.put(0, 100, b\"Alice is owner\").is_ok(), true);\n\n assert_eq!(db.put(100, 200, b\"Bob is owner\").is_ok(), true);\n\n let result1 = db.get(100, 200).unwrap();\n\n assert_eq!(result1.is_empty(), false);\n\n assert_eq!(result1[0].get_start(), 100);\n\n assert_eq!(result1[0].get_value(), b\"Bob is owner\");\n\n }\n\n\n\n #[test]\n\n fn test_get_small_range() {\n\n let db = RangeDbLevelImpl::open(\"test\");\n\n assert_eq!(db.put(0, 100, b\"Alice is owner\").is_ok(), true);\n\n assert_eq!(db.put(100, 120, b\"Bob is owner\").is_ok(), true);\n\n assert_eq!(db.put(120, 180, b\"Carol is owner\").is_ok(), true);\n\n let result1 = db.get(20, 50).unwrap();\n\n assert_eq!(result1.is_empty(), false);\n", "file_path": "db/src/impls/rangestore/leveldb.rs", "rank": 30, "score": 54192.28878388641 }, { "content": " assert_eq!(result1.len(), 1);\n\n }\n\n\n\n #[test]\n\n fn test_get_large_range() {\n\n let db = RangeDbMemoryImpl::open(\"test\");\n\n assert_eq!(db.put(0, 100, b\"Alice is owner\").is_ok(), true);\n\n assert_eq!(db.put(100, 120, b\"Bob is owner\").is_ok(), true);\n\n assert_eq!(db.put(120, 180, b\"Carol is owner\").is_ok(), true);\n\n let result1 = db.get(20, 150).unwrap();\n\n assert_eq!(result1.is_empty(), false);\n\n assert_eq!(result1[0].get_start(), 0);\n\n assert_eq!(result1[0].get_value(), b\"Alice is owner\");\n\n assert_eq!(result1.len(), 3);\n\n }\n\n}\n", "file_path": "db/src/impls/rangestore/memory.rs", "rank": 31, "score": 54191.956809643045 }, { "content": " fn get(&self, key: &[u8]) -> Result<Option<Box<[u8]>>, Error> {\n\n self.db\n\n .get(None, key)\n\n .map_err(Into::into)\n\n .map(|v| v.map(|v| v.to_vec().into_boxed_slice()))\n\n }\n\n fn put(&mut self, key: &[u8], value: &[u8]) -> Result<(), Error> {\n\n let mut tr = DBTransaction::new();\n\n tr.put(None, key, value);\n\n self.db.write(tr).map_err(Into::into)\n\n }\n\n fn del(&self, key: &[u8]) -> Result<(), Error> {\n\n let mut tr = DBTransaction::new();\n\n tr.delete(None, key);\n\n self.db.write(tr).map_err(Into::into)\n\n }\n\n fn has(&self, _key: &[u8]) -> Result<bool, Error> {\n\n Ok(true)\n\n }\n\n fn batch(&self, _operations: &[Batch]) -> Result<(), Error> {\n\n Ok(())\n\n }\n\n fn iterator(&self, _prefix: &[u8]) -> Result<Box<KvsIterator + 'static>, Error> {\n\n Err(Error::from(ErrorKind::Dammy))\n\n }\n\n}\n", "file_path": "db/src/impls/kvs/kvdb.rs", "rank": 34, "score": 54189.816507924195 }, { "content": "fn main() {\n\n entry();\n\n}\n", "file_path": "plasma-chamber/main.rs", "rank": 36, "score": 43175.623910519746 }, { "content": "#[rpc]\n\npub trait PlasmaRpc {\n\n /// Returns a protocol version\n\n #[rpc(name = \"protocolVersion\")]\n\n fn protocol_version(&self) -> Result<String>;\n\n /// append signed transaction\n\n #[rpc(name = \"sendTransaction\")]\n\n fn send_transaction(&self, message: String) -> Result<bool>;\n\n /// operator can generate block\n\n #[rpc(name = \"generateBlock\")]\n\n fn generate_block(&self) -> Result<String>;\n\n}\n", "file_path": "operator/src/rpc/plasmarpc.rs", "rank": 37, "score": 37394.64309271662 }, { "content": "/// Base class of predicate plugin\n\npub trait PredicatePlugin {\n\n fn execute_state_transition(\n\n &self,\n\n input: &StateUpdate,\n\n transaction: &Transaction,\n\n ) -> StateUpdate;\n\n}\n", "file_path": "predicate-plugins/src/predicate.rs", "rank": 38, "score": 37394.64309271662 }, { "content": "fn hash_leaf(value: &Bytes) -> Bytes {\n\n let mut hasher = Sha3::keccak256();\n\n let mut result = vec![0u8; hasher.output_bits() / 8];\n\n hasher.reset();\n\n hasher.input(value.as_ref());\n\n hasher.result(result.as_mut_slice());\n\n Bytes::from(result)\n\n}\n\n\n", "file_path": "sum_merkle_tree/src/lib.rs", "rank": 39, "score": 34501.96525050997 }, { "content": "}\n\n\n\nimpl Range {\n\n pub fn new(start: u64, end: u64, value: &[u8]) -> Self {\n\n Range {\n\n start,\n\n end,\n\n value: value.to_vec(),\n\n }\n\n }\n\n pub fn get_start(&self) -> u64 {\n\n self.start\n\n }\n\n pub fn get_end(&self) -> u64 {\n\n self.end\n\n }\n\n pub fn get_value(&self) -> &[u8] {\n\n &self.value\n\n }\n\n pub fn validate(&self) -> bool {\n", "file_path": "db/src/range.rs", "rank": 40, "score": 28721.861535345637 }, { "content": " self.start < self.end\n\n }\n\n pub fn intersect(&self, start: u64, end: u64) -> bool {\n\n let max_start = max(self.start, start);\n\n let max_end = min(self.end, end);\n\n max_start < max_end\n\n }\n\n}\n\n\n\nimpl Encodable for Range {\n\n fn rlp_append(&self, s: &mut RlpStream) {\n\n s.begin_list(3);\n\n s.append(&self.start);\n\n s.append(&self.end);\n\n s.append(&self.value);\n\n }\n\n}\n\n\n\nimpl Decodable for Range {\n\n fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {\n", "file_path": "db/src/range.rs", "rank": 41, "score": 28719.990237603342 }, { "content": "use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream};\n\nuse std::cmp::{max, min, Ordering};\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Range {\n\n start: u64,\n\n end: u64,\n\n value: Vec<u8>,\n\n}\n\n\n\nimpl Ord for Range {\n\n fn cmp(&self, other: &Range) -> Ordering {\n\n self.end.cmp(&other.end)\n\n }\n\n}\n\n\n\nimpl PartialOrd for Range {\n\n fn partial_cmp(&self, other: &Range) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n", "file_path": "db/src/range.rs", "rank": 42, "score": 28713.69914746836 }, { "content": "/// error definition for plasma db.\n\nuse failure::{Backtrace, Context, Fail};\n\nuse leveldb::error::Error as LeveldbError;\n\nuse std::fmt;\n\nuse std::fmt::Display;\n\nuse std::io::Error as IoError;\n\n\n\n#[derive(Fail, Debug)]\n\npub enum ErrorKind {\n\n #[fail(display = \"IO error\")]\n\n Io,\n\n #[fail(display = \"Dammy error\")]\n\n Dammy,\n\n #[fail(display = \"LevelDb error\")]\n\n LevelDb,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Error {\n\n inner: Context<ErrorKind>,\n", "file_path": "db/src/error.rs", "rank": 43, "score": 28710.658483067124 }, { "content": " let start: u64 = rlp.val_at(0)?;\n\n let end: u64 = rlp.val_at(1)?;\n\n let value: Vec<u8> = rlp.val_at(2)?;\n\n Ok(Range::new(start, end, &value))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Range;\n\n\n\n #[test]\n\n fn test_intersect() {\n\n let range = Range::new(0, 100, b\"aaa\");\n\n assert_eq!(range.intersect(50, 120), true);\n\n assert_eq!(range.intersect(100, 200), false);\n\n }\n\n\n\n}\n", "file_path": "db/src/range.rs", "rank": 44, "score": 28709.682857391763 }, { "content": "pub mod db;\n\npub mod kvs;\n\npub mod rangestore;\n", "file_path": "db/src/traits.rs", "rank": 45, "score": 28708.666882852256 }, { "content": "pub mod error;\n\npub mod impls;\n\npub mod range;\n\npub mod traits;\n", "file_path": "db/src/lib.rs", "rank": 46, "score": 28708.433261636204 }, { "content": "}\n\n\n\nimpl From<IoError> for Error {\n\n fn from(error: IoError) -> Error {\n\n Error {\n\n inner: error.context(ErrorKind::Io),\n\n }\n\n }\n\n}\n\n\n\nimpl From<LeveldbError> for Error {\n\n fn from(error: LeveldbError) -> Error {\n\n Error {\n\n inner: error.context(ErrorKind::LevelDb),\n\n }\n\n }\n\n}\n", "file_path": "db/src/error.rs", "rank": 47, "score": 28707.23891677236 }, { "content": "}\n\n\n\nimpl Fail for Error {\n\n fn cause(&self) -> Option<&Fail> {\n\n self.inner.cause()\n\n }\n\n\n\n fn backtrace(&self) -> Option<&Backtrace> {\n\n self.inner.backtrace()\n\n }\n\n}\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n Display::fmt(&self.inner, f)\n\n }\n\n}\n\n\n\nimpl Error {\n\n pub fn new(inner: Context<ErrorKind>) -> Error {\n", "file_path": "db/src/error.rs", "rank": 48, "score": 28707.216068331538 }, { "content": " Error { inner }\n\n }\n\n\n\n pub fn kind(&self) -> &ErrorKind {\n\n self.inner.get_context()\n\n }\n\n}\n\n\n\nimpl From<ErrorKind> for Error {\n\n fn from(kind: ErrorKind) -> Error {\n\n Error {\n\n inner: Context::new(kind),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Context<ErrorKind>> for Error {\n\n fn from(inner: Context<ErrorKind>) -> Error {\n\n Error { inner }\n\n }\n", "file_path": "db/src/error.rs", "rank": 49, "score": 28706.83015371285 }, { "content": "use crate::error::Error;\n\n\n\npub enum Batch<'a> {\n\n BatchPut { key: &'a [u8], value: &'a [u8] },\n\n BatchDel { key: &'a [u8] },\n\n}\n\n\n\npub struct KeyValue {\n\n key: Box<[u8]>,\n\n value: Box<[u8]>,\n\n}\n\n\n\nimpl KeyValue {\n\n pub fn get_key(&self) -> &[u8] {\n\n &self.key\n\n }\n\n pub fn get_value(&self) -> &[u8] {\n\n &self.value\n\n }\n\n}\n\n\n", "file_path": "db/src/traits/kvs.rs", "rank": 50, "score": 27597.324600437645 }, { "content": "use crate::error::Error;\n\nuse crate::range::Range;\n\n\n", "file_path": "db/src/traits/rangestore.rs", "rank": 51, "score": 27593.755481108772 }, { "content": "extern crate ethabi;\n\n\n\nuse crate::error::{Error, ErrorKind};\n\nuse ethabi::Token;\n\nuse plasma_core::data_structure::StateUpdate;\n\nuse plasma_db::impls::rangestore::memory::RangeDbMemoryImpl;\n\nuse plasma_db::traits::db::DatabaseTrait;\n\nuse plasma_db::traits::rangestore::RangeStore;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct VerifiedStateUpdate {\n\n start: u64,\n\n end: u64,\n\n verified_block_number: u64,\n\n state_update: StateUpdate,\n\n}\n\n\n\nimpl VerifiedStateUpdate {\n\n pub fn new(\n\n start: u64,\n", "file_path": "client/src/state/state_db.rs", "rank": 52, "score": 26577.97709773276 }, { "content": " end: u64,\n\n verified_block_number: u64,\n\n state_update: &StateUpdate,\n\n ) -> Self {\n\n VerifiedStateUpdate {\n\n start,\n\n end,\n\n verified_block_number,\n\n state_update: state_update.clone(),\n\n }\n\n }\n\n pub fn from(verified_block_number: u64, state_update: &StateUpdate) -> Self {\n\n VerifiedStateUpdate {\n\n start: state_update.get_start(),\n\n end: state_update.get_end(),\n\n verified_block_number,\n\n state_update: state_update.clone(),\n\n }\n\n }\n\n pub fn get_start(&self) -> u64 {\n", "file_path": "client/src/state/state_db.rs", "rank": 53, "score": 26574.298407997256 }, { "content": " ))\n\n } else {\n\n Err(Error::from(ErrorKind::AbiDecode))\n\n }\n\n }\n\n}\n\n\n\npub struct StateDb {\n\n db: Box<RangeStore>,\n\n}\n\n\n\nimpl Default for StateDb {\n\n fn default() -> Self {\n\n Self {\n\n db: Box::new(RangeDbMemoryImpl::open(\"test\")),\n\n }\n\n }\n\n}\n\n\n\nimpl StateDb {\n", "file_path": "client/src/state/state_db.rs", "rank": 54, "score": 26573.39848398058 }, { "content": " pub fn get_verified_state_updates(\n\n &self,\n\n start: u64,\n\n end: u64,\n\n ) -> Result<Box<[VerifiedStateUpdate]>, Error> {\n\n let ranges = self.db.get(start, end).map_err::<Error, _>(Into::into)?;\n\n ranges\n\n .iter()\n\n .map(|range| VerifiedStateUpdate::from_abi(range.get_value()))\n\n .collect()\n\n }\n\n pub fn put_verified_state_update(\n\n &self,\n\n verified_state_update: &VerifiedStateUpdate,\n\n ) -> Result<(), Error> {\n\n self.db\n\n .put(\n\n verified_state_update.get_start(),\n\n verified_state_update.get_end(),\n\n &verified_state_update.to_abi(),\n\n )\n\n .map_err::<Error, _>(Into::into)\n\n }\n\n}\n", "file_path": "client/src/state/state_db.rs", "rank": 55, "score": 26572.563671399435 }, { "content": " self.start\n\n }\n\n pub fn get_end(&self) -> u64 {\n\n self.end\n\n }\n\n pub fn get_state_update(&self) -> &StateUpdate {\n\n &self.state_update\n\n }\n\n pub fn to_abi(&self) -> Vec<u8> {\n\n ethabi::encode(&[\n\n Token::Uint(self.start.into()),\n\n Token::Uint(self.end.into()),\n\n Token::Uint(self.verified_block_number.into()),\n\n Token::Bytes(self.state_update.to_abi()),\n\n ])\n\n }\n\n pub fn from_abi(data: &[u8]) -> Result<Self, Error> {\n\n let decoded: Vec<Token> = ethabi::decode(\n\n &[\n\n ethabi::ParamType::Uint(8),\n", "file_path": "client/src/state/state_db.rs", "rank": 56, "score": 26572.36457424334 }, { "content": " ethabi::ParamType::Uint(8),\n\n ethabi::ParamType::Uint(8),\n\n ethabi::ParamType::Bytes,\n\n ],\n\n data,\n\n )\n\n .map_err(|_e| Error::from(ErrorKind::AbiDecode))?;\n\n let block_number = decoded[0].clone().to_uint();\n\n let start = decoded[1].clone().to_uint();\n\n let end = decoded[2].clone().to_uint();\n\n let state_update = decoded[3].clone().to_bytes();\n\n\n\n if let (Some(block_number), Some(start), Some(end), Some(state_update)) =\n\n (block_number, start, end, state_update)\n\n {\n\n Ok(VerifiedStateUpdate::new(\n\n block_number.as_u64(),\n\n start.as_u64(),\n\n end.as_u64(),\n\n &StateUpdate::from_abi(&state_update).unwrap(),\n", "file_path": "client/src/state/state_db.rs", "rank": 57, "score": 26567.114319427328 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct ImplicitBounds {\n\n implicit_start: u64,\n\n implicit_end: u64,\n\n}\n\n\n\nimpl ImplicitBounds {\n\n pub fn new(implicit_start: u64, implicit_end: u64) -> Self {\n\n ImplicitBounds {\n\n implicit_start,\n\n implicit_end,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "sum_merkle_tree/src/lib.rs", "rank": 58, "score": 17.334309749695308 }, { "content": " self.start\n\n }\n\n pub fn get_end(&self) -> u64 {\n\n self.end\n\n }\n\n pub fn get_block_number(&self) -> u64 {\n\n self.block_number\n\n }\n\n pub fn get_state_object(&self) -> &StateObject {\n\n &self.state_object\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::StateObject;\n\n use super::StateUpdate;\n\n use ethereum_types::Address;\n\n\n\n #[test]\n", "file_path": "core/src/data_structure/state_update.rs", "rank": 59, "score": 13.45444316418555 }, { "content": " }\n\n pub fn get_ranges(&self) -> &[VerifiedStateUpdate] {\n\n &self.ranges\n\n }\n\n}\n\n\n\npub struct StateManager {\n\n db: Box<StateDb>,\n\n}\n\n\n\nimpl Default for StateManager {\n\n fn default() -> Self {\n\n Self {\n\n db: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl StateManager {\n\n /// force to put state update\n", "file_path": "client/src/state/state_manager.rs", "rank": 60, "score": 12.21709580827163 }, { "content": "extern crate ethabi;\n\nextern crate rlp;\n\n\n\nuse super::error::{Error, ErrorKind};\n\nuse super::state_object::StateObject;\n\nuse ethabi::Token;\n\nuse ethereum_types::Address;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct StateUpdate {\n\n state_object: StateObject,\n\n start: u64,\n\n end: u64,\n\n block_number: u64,\n\n plasma_contract: Address,\n\n}\n\n\n\nimpl StateUpdate {\n\n pub fn new(\n\n state_object: &StateObject,\n", "file_path": "core/src/data_structure/state_update.rs", "rank": 61, "score": 12.018953188140028 }, { "content": "extern crate ethereum_types;\n\nextern crate rlp;\n\nextern crate tiny_keccak;\n\n\n\nuse super::error::{Error, ErrorKind};\n\nuse ethabi::Token;\n\nuse ethereum_types::{Address, H256};\n\nuse rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream};\n\nuse tiny_keccak::Keccak;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Witness {\n\n v: H256,\n\n r: H256,\n\n s: u64,\n\n}\n\n\n\nimpl Witness {\n\n pub fn new(v: H256, r: H256, s: u64) -> Self {\n\n Witness { v, r, s }\n", "file_path": "core/src/data_structure/transaction.rs", "rank": 62, "score": 11.809661035456392 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl SumMerkleNode {\n\n pub fn create_proof_node(node: &SumMerkleNode) -> SumMerkleNode {\n\n SumMerkleNode::ProofNode {\n\n end: node.get_end(),\n\n data: node.hash(),\n\n }\n\n }\n\n\n\n pub fn create_empty() -> Self {\n\n SumMerkleNode::Leaf {\n\n end: u64::max_value(),\n\n data: hash_leaf(&Bytes::from_static(&[0u8])),\n\n }\n\n }\n\n\n\n pub fn create_leaf(end: u64, data: Bytes) -> Self {\n", "file_path": "sum_merkle_tree/src/lib.rs", "rank": 63, "score": 11.794180333595197 }, { "content": " /// ```\n\n pub fn get_end(&self) -> u64 {\n\n self.end\n\n }\n\n pub fn get_parameters(&self) -> &[u8] {\n\n &self.parameters\n\n }\n\n}\n\n\n\nimpl Encodable for Transaction {\n\n fn rlp_append(&self, s: &mut RlpStream) {\n\n s.begin_list(1);\n\n s.append(&self.to_abi());\n\n }\n\n}\n\n\n\nimpl Decodable for Transaction {\n\n fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {\n\n let abi: Vec<u8> = rlp.list_at(0)?;\n\n Ok(Transaction::from_abi(&abi).unwrap())\n", "file_path": "core/src/data_structure/transaction.rs", "rank": 64, "score": 11.407622537667455 }, { "content": "pub mod state_db;\n\npub mod state_manager;\n\n\n\npub use self::state_db::StateDb;\n\npub use self::state_db::VerifiedStateUpdate;\n\npub use self::state_manager::StateManager;\n", "file_path": "client/src/state.rs", "rank": 65, "score": 10.76197573506701 }, { "content": " for (i, item) in inclusion_proof.iter().enumerate() {\n\n if path[i] {\n\n // leaf is in right\n\n computed = Self::verify_and_get_parent(item, &computed, first_left_end)?\n\n } else {\n\n // leaf is in left\n\n computed = Self::verify_and_get_parent(&computed, item, first_left_end)?\n\n }\n\n }\n\n let is_last_leaf = 2u64.pow(inclusion_proof.len() as u32) - 1 == (idx as u64);\n\n if computed.hash() == root {\n\n Ok(ImplicitBounds::new(\n\n first_left_end,\n\n if is_last_leaf {\n\n u64::max_value()\n\n } else {\n\n leaf.get_end()\n\n },\n\n ))\n\n } else {\n", "file_path": "sum_merkle_tree/src/lib.rs", "rank": 66, "score": 10.724386339093396 }, { "content": " pub fn deposit(&self, start: u64, end: u64, state_update: &StateUpdate) -> Result<(), Error> {\n\n self.db\n\n .put_verified_state_update(&VerifiedStateUpdate::new(start, end, 0, state_update))\n\n }\n\n\n\n /// Execute a transaction\n\n pub fn execute_transaction(\n\n &self,\n\n transaction: &Transaction,\n\n ) -> Result<ResultOfExecuteTransaction, Error> {\n\n let verified_state_updates = self\n\n .db\n\n .get_verified_state_updates(transaction.get_start(), transaction.get_end())?;\n\n let new_state_updates: Vec<StateUpdate> = verified_state_updates\n\n .iter()\n\n .map(|verified_state_update| {\n\n let predicate_address: &Address = verified_state_update\n\n .get_state_update()\n\n .get_state_object()\n\n .get_predicate();\n", "file_path": "client/src/state/state_manager.rs", "rank": 67, "score": 10.701337473945905 }, { "content": " /// ```\n\n pub fn create_method_id(value: &[u8]) -> u8 {\n\n let mut hasher = Keccak::new_sha3_256();\n\n hasher.update(value);\n\n let mut result: [u8; 32] = [0; 32];\n\n hasher.finalize(&mut result);\n\n result[0]\n\n }\n\n /// ### tx.get_start()\n\n /// A function to get start of a range of a tx instance\n\n /// ```ignore\n\n /// let start = tx.get_start();\n\n /// ```\n\n pub fn get_start(&self) -> u64 {\n\n self.start\n\n }\n\n /// ### tx.get_end()\n\n /// A function to get end of a range of a tx instance\n\n /// ```ignore\n\n /// let end = tx.get_end();\n", "file_path": "core/src/data_structure/transaction.rs", "rank": 68, "score": 10.388310355438591 }, { "content": " start: u64,\n\n end: u64,\n\n block_number: u64,\n\n plasma_contract: Address,\n\n ) -> Self {\n\n StateUpdate {\n\n state_object: state_object.clone(),\n\n start,\n\n end,\n\n block_number,\n\n plasma_contract,\n\n }\n\n }\n\n pub fn to_abi(&self) -> Vec<u8> {\n\n ethabi::encode(&[\n\n Token::Bytes(self.state_object.to_abi()),\n\n Token::Uint(self.start.into()),\n\n Token::Uint(self.end.into()),\n\n Token::Uint(self.block_number.into()),\n\n Token::Address(self.plasma_contract),\n", "file_path": "core/src/data_structure/state_update.rs", "rank": 69, "score": 9.999368364568207 }, { "content": "use crate::error::Error;\n\nuse crate::state::{StateDb, VerifiedStateUpdate};\n\nuse ethereum_types::Address;\n\nuse plasma_core::data_structure::{StateUpdate, Transaction};\n\nuse predicate_plugins::PredicateManager;\n\n\n\npub struct ResultOfExecuteTransaction {\n\n state_update: Box<StateUpdate>,\n\n ranges: Box<[VerifiedStateUpdate]>,\n\n}\n\n\n\nimpl ResultOfExecuteTransaction {\n\n pub fn new(state_update: &StateUpdate, ranges: &[VerifiedStateUpdate]) -> Self {\n\n ResultOfExecuteTransaction {\n\n state_update: Box::new(state_update.clone()),\n\n ranges: ranges.to_vec().into_boxed_slice(),\n\n }\n\n }\n\n pub fn get_state_update(&self) -> &StateUpdate {\n\n &self.state_update\n", "file_path": "client/src/state/state_manager.rs", "rank": 70, "score": 9.79240351803364 }, { "content": " witness: Witness,\n\n}\n\n\n\nimpl Transaction {\n\n /// ### Transaction.new\n\n /// A constructor of a Transaction struct\n\n /// ```ignore\n\n /// let tx = Transaction.new(plasma_contract_address, start, end ,method_id, &parameters, &witness);\n\n /// ```\n\n pub fn new(\n\n plasma_contract_address: Address,\n\n start: u64,\n\n end: u64,\n\n method_id: u8,\n\n parameters: &[u8],\n\n witness: &Witness,\n\n ) -> Transaction {\n\n Transaction {\n\n plasma_contract_address,\n\n start,\n", "file_path": "core/src/data_structure/transaction.rs", "rank": 71, "score": 9.737304982466242 }, { "content": " if let (\n\n Some(state_object),\n\n Some(start),\n\n Some(end),\n\n Some(block_number),\n\n Some(plasma_contract),\n\n ) = (state_object, start, end, block_number, plasma_contract)\n\n {\n\n Ok(StateUpdate::new(\n\n &StateObject::from_abi(&state_object).unwrap(),\n\n start.as_u64(),\n\n end.as_u64(),\n\n block_number.as_u64(),\n\n plasma_contract,\n\n ))\n\n } else {\n\n Err(Error::from(ErrorKind::AbiDecode))\n\n }\n\n }\n\n pub fn get_start(&self) -> u64 {\n", "file_path": "core/src/data_structure/state_update.rs", "rank": 72, "score": 8.780167807440751 }, { "content": "use crate::predicate::PredicatePlugin;\n\nuse plasma_core::data_structure::{StateUpdate, Transaction};\n\n\n\n/// Simple ownership predicate\n\npub struct OwnershipPredicate {}\n\n\n\nimpl Default for OwnershipPredicate {\n\n fn default() -> Self {\n\n OwnershipPredicate {}\n\n }\n\n}\n\n\n\nimpl PredicatePlugin for OwnershipPredicate {\n\n fn execute_state_transition(\n\n &self,\n\n input: &StateUpdate,\n\n _transaction: &Transaction,\n\n ) -> StateUpdate {\n\n // should parse transaction.parameters\n\n // make new state update\n\n input.clone()\n\n }\n\n}\n", "file_path": "predicate-plugins/src/ownership.rs", "rank": 73, "score": 8.688305726083431 }, { "content": " fn get_path(idx: usize, depth: usize, path: &mut Vec<bool>) {\n\n if depth == 0 {\n\n return;\n\n }\n\n path.push((idx & 0x01) != 0);\n\n Self::get_path(idx.rotate_right(1), depth - 1, path)\n\n }\n\n\n\n fn verify_and_get_parent(\n\n left: &SumMerkleNode,\n\n right: &SumMerkleNode,\n\n _first_left_end: u64,\n\n ) -> Result<SumMerkleNode, Error> {\n\n /*\n\n if left.get_end() > first_left_end {\n\n return Err(Error::VerifyError);\n\n }\n\n */\n\n if left.get_end() > right.get_end() {\n\n return Err(Error::VerifyError);\n", "file_path": "sum_merkle_tree/src/lib.rs", "rank": 74, "score": 8.573853731835573 }, { "content": " use super::StateManager;\n\n use ethereum_types::{Address, H256};\n\n use plasma_core::data_structure::{StateObject, StateUpdate, Transaction, Witness};\n\n\n\n fn create_state_update(start: u64, end: u64, block_number: u64) -> StateUpdate {\n\n StateUpdate::new(\n\n &StateObject::new(Address::zero(), &b\"data\"[..]),\n\n start,\n\n end,\n\n block_number,\n\n Address::zero(),\n\n )\n\n }\n\n\n\n #[test]\n\n fn test_execute_transaction() {\n\n // make state update\n\n let state_update = create_state_update(0, 100, 1);\n\n // make transaction\n\n let transaction = Transaction::new(\n", "file_path": "client/src/state/state_manager.rs", "rank": 75, "score": 8.312173644258719 }, { "content": " SumMerkleNode::Leaf { end, data }\n\n }\n\n\n\n pub fn create_node(end: u64, left: &SumMerkleNode, right: &SumMerkleNode) -> Self {\n\n SumMerkleNode::Node {\n\n end,\n\n left: Box::new(left.clone()),\n\n right: Box::new(right.clone()),\n\n }\n\n }\n\n\n\n pub fn compute_parent(left: &SumMerkleNode, right: &SumMerkleNode) -> SumMerkleNode {\n\n SumMerkleNode::create_node(right.get_end(), left, right)\n\n }\n\n\n\n fn get_end(&self) -> u64 {\n\n match self {\n\n SumMerkleNode::Leaf { end, .. } => *end,\n\n SumMerkleNode::Node { end, .. } => *end,\n\n SumMerkleNode::ProofNode { end, .. } => *end,\n", "file_path": "sum_merkle_tree/src/lib.rs", "rank": 76, "score": 8.237602185952685 }, { "content": "extern crate ethereum_types;\n\n\n\nuse super::signed_transaction::SignedTransaction;\n\nuse ethereum_types::H256;\n\n\n\npub struct SubmittedBlock {\n\n block: Block,\n\n block_number: u64,\n\n root: H256,\n\n}\n", "file_path": "core/src/data_structure/submitted_block.rs", "rank": 77, "score": 7.875769189309908 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\n/// ## struct Transaction\n\n/// - has a `plasma_contract_address`\n\n/// - has a `start` (A range element)\n\n/// - has a `end` (A range element)\n\n/// - has a `method_id` (like ABI)\n\n/// - has many `parameters`\n\n/// - has a `witness` (signature, proof or some)\n\n/// - Traits\n\n/// - Encodable\n\n/// - Decodable\n\npub struct Transaction {\n\n plasma_contract_address: Address,\n\n start: u64,\n\n end: u64,\n\n method_id: u8,\n\n parameters: Vec<u8>,\n", "file_path": "core/src/data_structure/transaction.rs", "rank": 78, "score": 7.579279905698382 }, { "content": "use crate::ownership::OwnershipPredicate;\n\nuse crate::predicate::PredicatePlugin;\n\nuse ethereum_types::Address;\n\n\n\n/// Predicate manager to load and get predicate plugin\n\npub struct PredicateManager {}\n\n\n\nimpl PredicateManager {\n\n pub fn get_plugin(_address: &Address) -> Box<dyn PredicatePlugin> {\n\n let predicate: OwnershipPredicate = Default::default();\n\n Box::new(predicate)\n\n }\n\n}\n", "file_path": "predicate-plugins/src/predicate_manager.rs", "rank": 79, "score": 7.554058089731673 }, { "content": "//\n\n// Created on Wed Jun 04 2019\n\n//\n\n// Copyright (c) 2019 Cryptoeconomics Lab, Inc.\n\n// This file is part of Plasma Chamber.\n\n//\n\n\n\nuse ethabi::Error as AbiDecodeError;\n\nuse failure::{Backtrace, Context, Fail};\n\nuse plasma_db::error::Error as PlasmaDbError;\n\nuse std::fmt;\n\nuse std::fmt::Display;\n\nuse std::io::Error as IoError;\n\n\n\n/// error definition for plasma core.\n\n#[derive(Fail, Debug)]\n\npub enum ErrorKind {\n\n #[fail(display = \"IO error\")]\n\n Io,\n\n #[fail(display = \"ABI Decode error\")]\n", "file_path": "client/src/error.rs", "rank": 80, "score": 7.452716873711903 }, { "content": "extern crate ethereum_types;\n\n\n\nuse crate::data_structure::block::Block;\n\nuse crate::data_structure::error::Error;\n\nuse crate::data_structure::transaction::Transaction;\n\nuse ethereum_types::H256;\n\n\n\npub struct BlockGenerator {}\n\n\n\nimpl BlockGenerator {\n\n pub fn generate(transactions: &[Transaction]) -> Result<Block, Error> {\n\n // TODO: caluculate merkle root\n\n // copy all transactions\n\n Ok(Block::new(transactions, H256::zero()))\n\n }\n\n}\n", "file_path": "core/src/process/block_generator.rs", "rank": 81, "score": 7.418753047983525 }, { "content": "#![cfg(target_os = \"android\")]\n\n#![allow(non_snake_case)]\n\n\n\nuse ethereum_types::Address;\n\nuse jni::objects::{JObject, JString};\n\nuse jni::sys::jstring;\n\nuse jni::JNIEnv;\n\nuse std::ffi::{CStr, CString};\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn Java_com_example_android_MainActivity_hello(\n\n env: JNIEnv,\n\n _: JObject,\n\n j_recipient: JString,\n\n) -> jstring {\n\n let recipient = CString::from(CStr::from_ptr(\n\n env.get_string(j_recipient).unwrap().as_ptr(),\n\n ));\n\n\n\n let output = env\n\n .new_string(\"Hello \".to_owned() + recipient.to_str().unwrap())\n\n .unwrap();\n\n output.into_inner()\n\n}\n", "file_path": "android/src/lib.rs", "rank": 82, "score": 7.356173298149919 }, { "content": "pub mod block;\n\npub mod error;\n\npub mod state_object;\n\npub mod state_update;\n\npub mod transaction;\n\n\n\npub use self::block::Block;\n\npub use self::state_object::StateObject;\n\npub use self::state_update::StateUpdate;\n\npub use self::transaction::Transaction;\n\npub use self::transaction::Witness;\n", "file_path": "core/src/data_structure.rs", "rank": 83, "score": 7.316906853822308 }, { "content": " AbiDecode,\n\n #[fail(display = \"Plasma Db error\")]\n\n PlasmaDbError,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Error {\n\n inner: Context<ErrorKind>,\n\n}\n\n\n\nimpl Fail for Error {\n\n fn cause(&self) -> Option<&Fail> {\n\n self.inner.cause()\n\n }\n\n\n\n fn backtrace(&self) -> Option<&Backtrace> {\n\n self.inner.backtrace()\n\n }\n\n}\n\n\n", "file_path": "client/src/error.rs", "rank": 84, "score": 7.27534242110381 }, { "content": " }\n\n Ok(SumMerkleNode::compute_parent(left, right))\n\n }\n\n\n\n /// Verify whether leaf is included or not\n\n pub fn verify(\n\n leaf: &SumMerkleNode,\n\n idx: usize,\n\n inclusion_proof: Vec<SumMerkleNode>,\n\n root: &Bytes,\n\n ) -> Result<ImplicitBounds, Error> {\n\n let mut path: Vec<bool> = vec![];\n\n Self::get_path(idx, inclusion_proof.len(), path.as_mut());\n\n println!(\"{:?}, {:?}\", path, inclusion_proof);\n\n let first_left_end = path\n\n .iter()\n\n .position(|&p| p)\n\n .map(|pos| inclusion_proof[pos].clone())\n\n .map_or(0, |n| n.get_end());\n\n let mut computed = leaf.clone();\n", "file_path": "sum_merkle_tree/src/lib.rs", "rank": 85, "score": 7.059938196174651 }, { "content": "impl ChainContext {\n\n pub fn new() -> Self {\n\n ChainContext {\n\n transactions: Arc::new(RwLock::new(vec![])),\n\n }\n\n }\n\n pub fn append(&self, signed_transaction: &Transaction) {\n\n self.transactions.write().push(signed_transaction.clone());\n\n }\n\n pub fn generate(&self) -> Result<Block, Error> {\n\n BlockGenerator::generate(&self.transactions.read().clone()).map_err(Into::into)\n\n }\n\n}\n", "file_path": "operator/src/context.rs", "rank": 86, "score": 7.022035527649106 }, { "content": "pub mod ownership;\n\npub mod predicate;\n\npub mod predicate_manager;\n\n\n\npub use predicate::PredicatePlugin;\n\npub use predicate_manager::PredicateManager;\n", "file_path": "predicate-plugins/src/lib.rs", "rank": 87, "score": 6.836567043877215 }, { "content": "extern crate ethereum_types;\n\n\n\nuse super::transaction::Transaction;\n\nuse ethereum_types::H256;\n\nuse rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream};\n\n\n\n#[derive(Clone, Debug)]\n\n/// ## struct Block\n\n/// - has many `transactions`\n\n/// - has a `merkle root hash`\n\n/// - Traits\n\n/// - Encodable\n\n/// - Decodable\n\npub struct Block {\n\n transactions: Vec<Transaction>,\n\n root: H256,\n\n}\n\n\n\nimpl Block {\n\n /// ### Block.new\n", "file_path": "core/src/data_structure/block.rs", "rank": 88, "score": 6.78876217464361 }, { "content": " chain_context: ChainContext,\n\n}\n\n\n\nimpl PlasmaRpcImpl {\n\n pub fn new() -> PlasmaRpcImpl {\n\n PlasmaRpcImpl {\n\n chain_context: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl PlasmaRpc for PlasmaRpcImpl {\n\n fn protocol_version(&self) -> Result<String> {\n\n Ok(\"0.1.0\".into())\n\n }\n\n fn send_transaction(&self, message: String) -> Result<bool> {\n\n let abi_bytes = hex::decode(message).map_err(errors::invalid_params)?;\n\n let transaction: Transaction =\n\n Transaction::from_abi(&abi_bytes).map_err(errors::invalid_params)?;\n\n self.chain_context.append(&transaction);\n", "file_path": "operator/src/rpc/plasmarpcimpl.rs", "rank": 89, "score": 6.65106472364724 }, { "content": "\n\nimpl Default for HttpOption {\n\n fn default() -> Self {\n\n Self {\n\n threads: 3,\n\n url: \"127.0.0.1:8080\".to_string(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "operator/src/server.rs", "rank": 90, "score": 6.596912197328582 }, { "content": "//\n\n// Created on Wed May 08 2019\n\n//\n\n// Copyright (c) 2019 Cryptoeconomics Lab, Inc.\n\n// This file is part of Plasma Chamber.\n\n//\n\n\n\n/// json rpc server.\n\nuse super::error::Error;\n\nuse super::rpc::plasmarpc::PlasmaRpc;\n\nuse super::rpc::plasmarpcimpl::PlasmaRpcImpl;\n\nuse jsonrpc_http_server::jsonrpc_core::IoHandler;\n\nuse jsonrpc_http_server::{Server, ServerBuilder};\n\nuse std::net::SocketAddr;\n\n\n\n/// Options for Plasma JSON RPC server.\n\npub struct HttpOption {\n\n threads: usize,\n\n url: String,\n\n}\n", "file_path": "operator/src/server.rs", "rank": 91, "score": 6.562669327463942 }, { "content": "//\n\n// Created on Wed May 08 2019\n\n//\n\n// Copyright (c) 2019 Cryptoeconomics Lab, Inc.\n\n// This file is part of Plasma Chamber.\n\n//\n\n\n\nextern crate jsonrpc_core;\n\nextern crate plasma_core;\n\nextern crate rlp;\n\n\n\nuse super::errors;\n\nuse super::plasmarpc::PlasmaRpc;\n\nuse crate::context::ChainContext;\n\nuse jsonrpc_core::{Error as JsonRpcError, ErrorCode, Result};\n\nuse plasma_core::data_structure::Transaction;\n\n\n\n/// Plasma JSON RPC implementation.\n\n#[derive(Default)]\n\npub struct PlasmaRpcImpl {\n", "file_path": "operator/src/rpc/plasmarpcimpl.rs", "rank": 92, "score": 6.483187849869092 }, { "content": "impl Decodable for Block {\n\n fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {\n\n let transactions: Vec<Transaction> = rlp.list_at(0)?;\n\n let root: Vec<u8> = rlp.val_at(1)?;\n\n Ok(Block::new(&transactions, H256::from_slice(&root)))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Block;\n\n use ethereum_types::H256;\n\n\n\n /*\n\n #[test]\n\n fn test_new() {\n\n let block = Block::new(&[], H256::zero());\n\n assert_eq!(block.root, H256::zero());\n\n }\n\n */\n", "file_path": "core/src/data_structure/block.rs", "rank": 93, "score": 6.169736100928542 }, { "content": " Error {\n\n inner: Context::from(ErrorKind::AbiDecode),\n\n }\n\n }\n\n}\n\n\n\nimpl From<PlasmaDbError> for Error {\n\n fn from(error: PlasmaDbError) -> Error {\n\n Error {\n\n inner: error.context(ErrorKind::PlasmaDbError),\n\n }\n\n }\n\n}\n", "file_path": "client/src/error.rs", "rank": 94, "score": 5.892150560612007 }, { "content": "extern crate crypto;\n\n\n\nuse self::crypto::sha3::Sha3;\n\nuse byteorder::{LittleEndian, WriteBytesExt};\n\nuse bytes::Bytes;\n\nuse crypto::digest::Digest;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n VerifyError,\n\n}\n\n\n", "file_path": "sum_merkle_tree/src/lib.rs", "rank": 95, "score": 5.829400191110122 }, { "content": "//\n\n// Created on Wed May 08 2019\n\n//\n\n// Copyright (c) 2019 Cryptoeconomics Lab, Inc.\n\n// This file is part of Plasma Chamber.\n\n//\n\n\n\nextern crate failure;\n\nextern crate serde;\n\nextern crate serde_derive;\n\n\n\n/// plasma chain context data structure\n\npub mod context;\n\n/// error definitions.\n\npub mod error;\n\n/// APIs for JSON RPC.\n\npub mod rpc;\n\n/// Plasma JSON RPC server.\n\npub mod server;\n\n\n\nuse self::server::get_server;\n\nuse env_logger;\n\nuse std::env;\n\n\n\n/// entry point of plasma chain.\n", "file_path": "operator/src/lib.rs", "rank": 96, "score": 5.756981745450677 }, { "content": "mod block_generator;\n\n\n\npub use self::block_generator::BlockGenerator;\n", "file_path": "core/src/process.rs", "rank": 97, "score": 5.754621538242756 }, { "content": " }\n\n}\n\n\n\nimpl Error {\n\n pub fn new(inner: Context<ErrorKind>) -> Error {\n\n Error { inner }\n\n }\n\n\n\n pub fn kind(&self) -> &ErrorKind {\n\n self.inner.get_context()\n\n }\n\n}\n\n\n\nimpl From<ErrorKind> for Error {\n\n fn from(kind: ErrorKind) -> Error {\n\n Error {\n\n inner: Context::new(kind),\n\n }\n\n }\n\n}\n", "file_path": "core/src/data_structure/error.rs", "rank": 98, "score": 5.628711321624209 }, { "content": "impl Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n Display::fmt(&self.inner, f)\n\n }\n\n}\n\n\n\nimpl Error {\n\n pub fn new(inner: Context<ErrorKind>) -> Error {\n\n Error { inner }\n\n }\n\n\n\n pub fn kind(&self) -> &ErrorKind {\n\n self.inner.get_context()\n\n }\n\n}\n\n\n\nimpl From<ErrorKind> for Error {\n\n fn from(kind: ErrorKind) -> Error {\n\n Error {\n\n inner: Context::new(kind),\n", "file_path": "client/src/error.rs", "rank": 99, "score": 5.550242681575185 } ]
Rust
crates/m-mazing-tile-util/src/main.rs
tmfink/m-mazing
e603dd081f56b897d49014dc45078db1ba9ec19b
use std::{fs::File, io::Read, path::PathBuf, sync::mpsc}; use anyhow::{Context, Result}; use clap::Parser; use m_mazing_core::bevy; use m_mazing_core::prelude::*; use notify::Watcher; use bevy::asset::AssetServerSettings; use bevy::ecs as bevy_ecs; use bevy::prelude::*; use bevy::render::camera::ScalingMode; mod gui; use crate::gui::*; const LEGEND: &str = " Arrow keys - cycle; K/U - toggle cell used; [] - rotate; P - print; R - reload; Home/End - start/end; "; #[derive(Parser, Debug, Clone)] #[clap(about, version, author)] pub struct Args { #[clap(long, short, parse(from_occurrences))] verbose: i32, #[clap(long, short, parse(from_occurrences), conflicts_with = "verbose")] quiet: i32, #[clap(long, short)] tile_file: PathBuf, #[clap(long = "start-idx", short = 'i', default_value = "0")] index: usize, } #[derive(Debug)] pub struct CurrentTile { pub tile: Tile, pub id: Entity, } #[derive(Debug, Default)] pub struct RefreshTile(pub bool); #[allow(dead_code)] #[derive(Debug)] pub struct Ctx { pub args: Args, pub tileset: Vec<(String, Tile)>, pub tile_idx: isize, pub notify_rx: mpsc::Receiver<notify::Result<notify::Event>>, pub notify_watcher: notify::RecommendedWatcher, } #[derive(Component)] pub struct TitleString; #[derive(Debug)] pub struct TileAvailability(pub CellItemAvailability); #[derive(Debug, Default)] pub struct TileRotation { pub left_turns: u8, } impl Default for TileAvailability { fn default() -> Self { Self(CellItemAvailability::Available) } } impl Ctx { fn new() -> Result<Ctx> { let args = Args::parse(); let (notify_tx, notify_rx) = mpsc::channel(); let mut notify_watcher = notify::RecommendedWatcher::new(notify_tx) .context("Failed to create notify watcher")?; notify_watcher .watch(&args.tile_file, notify::RecursiveMode::Recursive) .context(format!("Failed to watch file {:?}", args.tile_file))?; let tile_idx = args.index as isize; let mut ctx = Ctx { args, tileset: Default::default(), tile_idx, notify_rx, notify_watcher, }; ctx.refresh()?; Ok(ctx) } fn refresh(&mut self) -> Result<()> { let mut tile_input_file = File::open(&self.args.tile_file) .with_context(|| format!("Failed to open input file {:?}", &self.args.tile_file))?; let mut tile_str = String::new(); tile_input_file .read_to_string(&mut tile_str) .with_context(|| "Failed to read input")?; self.tileset = m_mazing_core::tile::tileset::tileset_from_str(&tile_str) .with_context(|| "failed to parse tileset")?; Ok(()) } } fn setup_system(mut commands: Commands) { const CAMERA_EXTENT: f32 = 3.0; let mut camera_bundle = OrthographicCameraBundle::new_2d(); camera_bundle.orthographic_projection.left = -CAMERA_EXTENT; camera_bundle.orthographic_projection.right = CAMERA_EXTENT; camera_bundle.orthographic_projection.top = CAMERA_EXTENT; camera_bundle.orthographic_projection.bottom = -CAMERA_EXTENT; camera_bundle.orthographic_projection.scaling_mode = ScalingMode::FixedVertical; camera_bundle.transform.scale = Vec3::new(3.0, 3.0, 1.0); commands.spawn_bundle(camera_bundle); } fn ui_setup(mut commands: Commands, asset_server: Res<AssetServer>) { let font = asset_server.load("fonts/FiraMono-Medium.ttf"); commands.spawn_bundle(UiCameraBundle::default()); commands .spawn_bundle(NodeBundle { style: Style { size: Size::new(Val::Percent(100.0), Val::Percent(100.0)), justify_content: JustifyContent::SpaceBetween, ..Default::default() }, color: Color::NONE.into(), ..Default::default() }) .with_children(|parent| { parent .spawn_bundle(NodeBundle { style: Style { size: Size::new(Val::Percent(100.0), Val::Percent(100.0)), position_type: PositionType::Absolute, justify_content: JustifyContent::Center, align_items: AlignItems::FlexEnd, ..Default::default() }, color: Color::NONE.into(), ..Default::default() }) .with_children(|parent| { parent .spawn_bundle(TextBundle { style: Style { size: Size::new(Val::Auto, Val::Auto), ..Default::default() }, text: Text::with_section( "", TextStyle { font: font.clone(), font_size: 50.0, color: Color::BLACK, }, Default::default(), ), ..Default::default() }) .insert(TitleString); }); }); commands.spawn_bundle(TextBundle { style: Style { align_self: AlignSelf::FlexEnd, position_type: PositionType::Absolute, position: Rect { top: Val::Px(5.0), left: Val::Px(15.0), ..Default::default() }, ..Default::default() }, text: Text::with_section( LEGEND.trim().to_string(), TextStyle { font, font_size: 40.0, color: Color::WHITE, }, Default::default(), ), ..Default::default() }); } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, SystemLabel)] enum MySystemLabels { Input, SpawnTile, } fn frame_init(mut refresh: ResMut<RefreshTile>) { refresh.0 = false; } #[allow(unused)] fn debug_system(query: Query<Entity>) { info!("entities: {}", query.iter().count()); } fn main() -> Result<()> { let ctx = Ctx::new().with_context(|| "Failed to generate context")?; let level = log_level(ctx.args.verbose, ctx.args.quiet); println!("tileset: {:#?}", ctx.tileset); App::new() .insert_resource(Msaa { samples: 4 }) .insert_non_send_resource(ctx) .init_resource::<RenderState>() .init_resource::<TileAvailability>() .insert_resource(RefreshTile(true)) .insert_resource(AssetServerSettings { asset_folder: "../../assets".to_string(), }) .init_resource::<TileRotation>() .insert_resource(bevy::log::LogSettings { level, ..Default::default() }) .add_plugins(DefaultPlugins) .add_plugin(ShapePlugin) .add_startup_system(setup_system) .add_startup_system(ui_setup) .add_system(frame_init.before(MySystemLabels::Input)) .add_system(keyboard_input_system.label(MySystemLabels::Input)) .add_system(notify_tileset_change.label(MySystemLabels::Input)) .add_system( spawn_tile .label(MySystemLabels::SpawnTile) .after(MySystemLabels::Input), ) .add_system(print_tile.after(MySystemLabels::SpawnTile)) .run(); Ok(()) }
use std::{fs::File, io::Read, path::PathBuf, sync::mpsc}; use anyhow::{Context, Result}; use clap::Parser; use m_mazing_core::bevy; use m_mazing_core::prelude::*; use notify::Watcher; use bevy::asset::AssetServerSettings; use bevy::ecs as bevy_ecs; use bevy::prelude::*; use bevy::render::camera::ScalingMode; mod gui; use crate::gui::*; const LEGEND: &str = " Arrow keys - cycle; K/U - toggle cell used; [] - rotate; P - print; R - reload; Home/End - start/end; "; #[derive(Parser, Debug, Clone)] #[clap(about, version, author)] pub struct Args { #[clap(long, short, parse(from_occurrences))] verbose: i32, #[clap(long, short, parse(from_occurrences), conflicts_with = "verbose")] quiet: i32, #[clap(long, short)] tile_file: PathBuf, #[clap(long = "start-idx", short = 'i', default_value = "0")] index: usize, } #[derive(Debug)] pub struct CurrentTile { pub tile: Tile, pub id: Entity, } #[derive(Debug, Default)] pub struct RefreshTile(pub bool); #[allow(dead_code)] #[derive(Debug)] pub struct Ctx { pub args: Args, pub tileset: Vec<(String, Tile)>, pub tile_idx: isize, pub notify_rx: mpsc::Receiver<notify::Result<notify::Event>>, pub notify_watcher: notify::RecommendedWatcher, } #[derive(Component)] pub struct TitleString; #[derive(Debug)] pub struct TileAvailability(pub CellItemAvailability); #[derive(Debug, Default)] pub struct TileRotation { pub left_turns: u8, } impl Default for TileAvailability { fn default() -> Self { Self(CellItemAvailability::Available) } } impl Ctx { fn new() -> Result<Ctx> { let args = Args::parse(); let (notify_tx, notify_rx) = mpsc::channel(); let mut notify_watcher = notify::RecommendedWatcher::new(notify_tx) .context("Failed to create notify watcher")?; notify_watcher .watch(&args.tile_file, notify::RecursiveMode::Recursive) .context(format!("Failed to watch file {:?}", args.tile_file))?; let tile_idx = args.index as isize; let mut ctx = Ctx { args, tileset: Default::default(), tile_idx, notify_rx, notify_watcher, }; ctx.refresh()?; Ok(ctx) } fn refresh(&mut self) -> Result<()> { let mut tile_input_file = File::open(&self.args.tile_file) .with_context(|| format!("Failed to open input file {:?}", &self.args.tile_file))?; let mut tile_str = String::new(); tile_input_file .read_to_string(&mut tile_str) .with_context(|| "Failed to read input")?; self.tileset = m_mazing_core::tile::tileset::tileset_from_str(&tile_str) .with_context(|| "failed to parse tileset")?; Ok(()) } } fn setup_system(mut commands: Commands) { const CAMERA_EXTENT: f32 = 3.0; let mut camera_bundle = OrthographicCameraBundle::new_2d(); camera_bundle.orthographic_projection.left = -CAMERA_EXTENT; camera_bundle.orthographic_projection.right = CAMERA_EXTENT; camera_bundle.orthographic_projection.top = CAMERA_EXTENT; camera_bundle.orthographic_projection.bottom = -CAMERA_EXTENT; camera_bundle.orthographic_projection.scaling_mode = ScalingMode::FixedVertical; camera_bundle.transform.scale = Vec3::new(3.0, 3.0, 1.0); commands.spawn_bundle(camera_bundle); } fn ui_setup(mut commands: Commands, asset_server: Res<AssetServer>) { let font = asset_server.load("fonts/FiraMono-Medium.ttf"); commands.spawn_bundle(UiCameraBundle::default()); commands .spawn_bundle(NodeBundle { style: Style { size: Size::new(Val::Percent(100.0), Val::Percent(100.0)), justify_content: JustifyContent::SpaceBetween, ..Default::default() }, color: Color::NONE.into(), ..Default::default() }) .with_children(|parent| { parent .spawn_bundle(NodeBundle { style: Style { size: Size::new(Val::Percent(100.0), Val::Percent(100.0)), position_type: PositionType::Absolute, justify_content: JustifyContent::Center, align_items: AlignItems::FlexEnd, ..Default::default() }, color: Color::NONE.into(), ..Default::default() }) .with_children(|parent| { parent .spawn_bundle(TextBundle { style: Style { size: Size::new(Val::Auto, Val::Auto), ..Default::default() }, text: Text::with_section( "",
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, SystemLabel)] enum MySystemLabels { Input, SpawnTile, } fn frame_init(mut refresh: ResMut<RefreshTile>) { refresh.0 = false; } #[allow(unused)] fn debug_system(query: Query<Entity>) { info!("entities: {}", query.iter().count()); } fn main() -> Result<()> { let ctx = Ctx::new().with_context(|| "Failed to generate context")?; let level = log_level(ctx.args.verbose, ctx.args.quiet); println!("tileset: {:#?}", ctx.tileset); App::new() .insert_resource(Msaa { samples: 4 }) .insert_non_send_resource(ctx) .init_resource::<RenderState>() .init_resource::<TileAvailability>() .insert_resource(RefreshTile(true)) .insert_resource(AssetServerSettings { asset_folder: "../../assets".to_string(), }) .init_resource::<TileRotation>() .insert_resource(bevy::log::LogSettings { level, ..Default::default() }) .add_plugins(DefaultPlugins) .add_plugin(ShapePlugin) .add_startup_system(setup_system) .add_startup_system(ui_setup) .add_system(frame_init.before(MySystemLabels::Input)) .add_system(keyboard_input_system.label(MySystemLabels::Input)) .add_system(notify_tileset_change.label(MySystemLabels::Input)) .add_system( spawn_tile .label(MySystemLabels::SpawnTile) .after(MySystemLabels::Input), ) .add_system(print_tile.after(MySystemLabels::SpawnTile)) .run(); Ok(()) }
TextStyle { font: font.clone(), font_size: 50.0, color: Color::BLACK, }, Default::default(), ), ..Default::default() }) .insert(TitleString); }); }); commands.spawn_bundle(TextBundle { style: Style { align_self: AlignSelf::FlexEnd, position_type: PositionType::Absolute, position: Rect { top: Val::Px(5.0), left: Val::Px(15.0), ..Default::default() }, ..Default::default() }, text: Text::with_section( LEGEND.trim().to_string(), TextStyle { font, font_size: 40.0, color: Color::WHITE, }, Default::default(), ), ..Default::default() }); }
function_block-function_prefix_line
[ { "content": "pub fn notify_tileset_change(mut should_refresh: ResMut<RefreshTile>, mut ctx: NonSendMut<Ctx>) {\n\n match ctx.notify_rx.try_recv() {\n\n Ok(Ok(event)) => {\n\n info!(\"new event {:?}\", event);\n\n should_refresh.0 = true;\n\n\n\n match ctx.refresh() {\n\n Ok(()) => info!(\"Manually reloading\"),\n\n Err(err) => error!(\"Failed to manually reload: {:#}\", err),\n\n }\n\n }\n\n Ok(Err(err)) => error!(\"Failed to get new event {:#}\", err),\n\n Err(TryRecvError::Empty) => (),\n\n Err(TryRecvError::Disconnected) => error!(\"Notify disconnected\"),\n\n }\n\n}\n\n\n\n/*\n", "file_path": "crates/m-mazing-tile-util/src/gui.rs", "rank": 0, "score": 231232.48358008385 }, { "content": "pub fn update(ctx: &mut Ctx) {\n\n // todo: smarter fit rect for all tiles\n\n let fit_rect = Rect {\n\n x: -3.,\n\n y: -3.,\n\n w: 6.,\n\n h: 6.,\n\n };\n\n let whole_camera = camera_zoom_to_fit(fit_rect);\n\n set_camera(&whole_camera);\n\n}\n\n*/\n\n\n\n/*\n", "file_path": "crates/m-mazing-tile-util/src/gui.rs", "rank": 1, "score": 212028.99377471136 }, { "content": "pub fn tileset_from_str(s: &str) -> Result<Vec<(String, Tile)>, TileParsingError> {\n\n info!(\"Parsing tileset\");\n\n tileset_from_lines(s.lines())\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ParseContext<'a> {\n\n pub line: &'a [u8],\n\n pub line_number: u32,\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 2, "score": 205065.82205255295 }, { "content": "pub fn draw(ctx: NonSend<Ctx>, render: Res<RenderState>, mut commands: Commands) {\n\n // screen space camera for text\n\n let (font_size, font_scale, font_scale_aspect) = camera_font_scale(render.theme.font_size);\n\n}\n\n*/\n\n\n", "file_path": "crates/m-mazing-tile-util/src/gui.rs", "rank": 3, "score": 199932.20182034926 }, { "content": "pub fn print_tile(keyboard_input: Res<Input<KeyCode>>, tile: Option<Res<CurrentTile>>) {\n\n if keyboard_input.just_pressed(KeyCode::P) {\n\n match tile {\n\n None => println!(\"No tile\"),\n\n Some(tile) => println!(\"{:#?}\", tile.tile),\n\n }\n\n }\n\n}\n", "file_path": "crates/m-mazing-tile-util/src/gui.rs", "rank": 4, "score": 196174.1477196841 }, { "content": "pub fn rotate_2d_array<T: Copy, const WIDTH: usize, const HEIGHT: usize>(\n\n arr: &[[T; WIDTH]; HEIGHT],\n\n out: &mut [[T; HEIGHT]; WIDTH],\n\n spin: SpinDirection,\n\n) {\n\n for (row_idx, row) in arr.iter().enumerate() {\n\n for (col_idx, cell) in row.iter().copied().enumerate() {\n\n let (a, b) = match spin {\n\n SpinDirection::Clockwise => (col_idx, HEIGHT - 1 - row_idx),\n\n SpinDirection::CounterClockwise => (WIDTH - 1 - col_idx, row_idx),\n\n };\n\n out[a][b] = cell;\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for Tile {\n\n type Err = tileset::TileParsingError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n tileset::tile_from_str(s)\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 5, "score": 195924.46840367554 }, { "content": "fn skip_tile_line(line: &[u8]) -> bool {\n\n matches!(line.get(0), None | Some(b'#'))\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 6, "score": 163608.21619704913 }, { "content": "pub fn log_level(verbose: i32, quiet: i32) -> Level {\n\n let levels = &[\n\n Level::ERROR,\n\n Level::WARN,\n\n Level::INFO,\n\n Level::DEBUG,\n\n Level::TRACE,\n\n ];\n\n let level_count = 2 + verbose - quiet;\n\n\n\n let idx = level_count.clamp(0, (levels.len() - 1) as i32);\n\n let level = levels[idx as usize];\n\n info!(\"log verbosity: {:?}\", level);\n\n level\n\n}\n", "file_path": "crates/m-mazing-core/src/lib.rs", "rank": 7, "score": 161406.32341508463 }, { "content": "fn eat_thing<C, T>(ctx: &ParseContext, cursor: &mut C) -> Result<T, TileParsingError>\n\nwhere\n\n C: Iterator<Item = (usize, u8)>,\n\n T: TileTokenParse,\n\n{\n\n trace!(\" Eating {}\", T::NAME);\n\n let (col_number, c) = if let Some((col_number, c)) = cursor.next() {\n\n (col_number, c)\n\n } else {\n\n return Err(TileParsingError::IncompleteLine {\n\n line_number: ctx.line_number,\n\n line: String::from_utf8_lossy(ctx.line).to_string(),\n\n });\n\n };\n\n\n\n T::parse(c).ok_or_else(|| TileParsingError::ItemParse {\n\n col_number: col_number as u32,\n\n line_number: ctx.line_number,\n\n line: String::from_utf8_lossy(ctx.line).to_string(),\n\n char: char::from_u32(c as u32).unwrap_or(char::REPLACEMENT_CHARACTER),\n\n name: T::NAME,\n\n allowed: T::ALLOWED_CHARS,\n\n })\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 8, "score": 157432.89471997216 }, { "content": "/// Creates a `Tile` from an iterator that produces (line_number, line).\n\n/// *line_number* is 1-based indexing.\n\nfn tile_from_lines<L, S>(lines: &mut L) -> Result<Tile, TileParsingError>\n\nwhere\n\n L: Iterator<Item = (usize, S)>,\n\n S: AsRef<[u8]>,\n\n{\n\n #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n enum ParsingState {\n\n WallRow { row_num: u32 },\n\n CellRow { row_num: u32 },\n\n Elevator,\n\n }\n\n let mut allow_line_skips = true;\n\n let mut line_number = 0;\n\n let mut state = ParsingState::WallRow { row_num: 0 };\n\n let mut tile = Tile::default();\n\n\n\n for (line_number_x, line_x) in lines {\n\n line_number = line_number_x as u32;\n\n let line = line_x.as_ref();\n\n let ctx = ParseContext { line, line_number };\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 9, "score": 153485.55709948263 }, { "content": "pub fn keyboard_input_system(\n\n keyboard_input: Res<Input<KeyCode>>,\n\n mut app_exit_events: EventWriter<AppExit>,\n\n mut should_refresh: ResMut<RefreshTile>,\n\n mut ctx: NonSendMut<Ctx>,\n\n mut availability: ResMut<TileAvailability>,\n\n mut tile_rotation: ResMut<TileRotation>,\n\n) {\n\n if keyboard_input.any_pressed([KeyCode::Escape, KeyCode::Q]) {\n\n app_exit_events.send(AppExit);\n\n }\n\n\n\n if keyboard_input.just_pressed(KeyCode::R) {\n\n should_refresh.0 = true;\n\n match ctx.refresh() {\n\n Ok(()) => info!(\"Manually reloading\"),\n\n Err(err) => error!(\"Failed to manually reload: {:#}\", err),\n\n }\n\n }\n\n\n", "file_path": "crates/m-mazing-tile-util/src/gui.rs", "rank": 10, "score": 148639.10267284984 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn spawn_tile(\n\n ctx: NonSend<Ctx>,\n\n availability: Res<TileAvailability>,\n\n render: Res<RenderState>,\n\n refresh: Res<RefreshTile>,\n\n tile: Option<ResMut<CurrentTile>>,\n\n tile_rotation: Res<TileRotation>,\n\n mut commands: Commands,\n\n mut query: Query<&mut Text, With<TitleString>>,\n\n) {\n\n if !(refresh.0 || ctx.is_changed() || availability.is_changed() || tile_rotation.is_changed()) {\n\n return;\n\n }\n\n\n\n info!(\"spawning tile\");\n\n\n\n if let Some(tile) = tile {\n\n commands.entity(tile.id).despawn_recursive();\n\n }\n\n\n", "file_path": "crates/m-mazing-tile-util/src/gui.rs", "rank": 11, "score": 131057.54456888564 }, { "content": "fn tileset_from_lines<L, S>(lines: L) -> Result<Vec<(String, Tile)>, TileParsingError>\n\nwhere\n\n L: Iterator<Item = S>,\n\n S: AsRef<[u8]>,\n\n{\n\n let mut lines = lines.enumerate().map(|(idx, line)| (idx + 1, line));\n\n let mut tileset = Vec::new();\n\n\n\n loop {\n\n let line = lines.next();\n\n let (line_number, line) = match &line {\n\n Some((line_number, line)) => (*line_number as u32, line.as_ref()),\n\n None => break,\n\n };\n\n\n\n debug!(\n\n \"tileset line {}: {:?}\",\n\n line_number,\n\n String::from_utf8_lossy(line),\n\n );\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 13, "score": 122565.17829253149 }, { "content": "fn polar_to_cartesian(radius: f32, angle_rads: f32) -> Vec2 {\n\n let x = radius * angle_rads.cos();\n\n let y = radius * angle_rads.sin();\n\n Vec2::new(x, y)\n\n}\n", "file_path": "crates/m-mazing-core/src/render/mod.rs", "rank": 15, "score": 107352.58593605642 }, { "content": "pub fn game_roles(num_players: u8) -> Option<&'static [&'static [BoardAction]]> {\n\n if num_players == 0 {\n\n return None;\n\n }\n\n ALLOWED_ACTIONS.get(num_players as usize).copied()\n\n}\n\n\n\nconst ALLOWED_ACTIONS: &[&[&[BoardAction]]] = &[\n\n // 0 players\n\n &[],\n\n // 1 player\n\n &[&[\n\n Warp,\n\n Explore,\n\n Escalator,\n\n Slide(CartesianDirection::Left),\n\n Slide(CartesianDirection::Up),\n\n Slide(CartesianDirection::Down),\n\n Slide(CartesianDirection::Right),\n\n ]],\n", "file_path": "crates/m-mazing-core/src/role.rs", "rank": 17, "score": 100294.67922715645 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\nstruct Placeholder;\n\n\n\nimpl TileTokenParse for Placeholder {\n\n const NAME: &'static str = \"Placeholder\";\n\n const ALLOWED_CHARS: &'static str = \"+\";\n\n\n\n fn parse(value: u8) -> Option<Self> {\n\n if value == b'+' {\n\n Some(Self)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n#[derive(Error, Debug, PartialEq, Eq)]\n\npub enum TileParsingError {\n\n #[error(\"Expected tile name leader '@', at line {line_number} found line {line:?}\")]\n\n InvalidNameLeader { line_number: u32, line: String },\n\n\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 20, "score": 96068.1463546251 }, { "content": "fn main() -> Result<()> {\n\n let args = Args::parse();\n\n let level = log_level(args.verbose, args.quiet);\n\n\n\n App::new()\n\n .insert_resource(Msaa { samples: 4 })\n\n .insert_resource(bevy::log::LogSettings {\n\n level,\n\n ..Default::default()\n\n })\n\n .add_plugins(DefaultPlugins)\n\n .add_plugin(ShapePlugin)\n\n .run();\n\n\n\n Ok(())\n\n}\n", "file_path": "crates/m-mazing-client/src/main.rs", "rank": 21, "score": 87333.6707562895 }, { "content": "fn elevators_from_line(\n\n ctx: &ParseContext,\n\n line: &[u8],\n\n tile: &mut Tile,\n\n) -> Result<(), InvalidEscalator> {\n\n trace!(\" parsing elevators\");\n\n let err = InvalidEscalator {\n\n line_number: ctx.line_number,\n\n line: String::from_utf8_lossy(line).to_string(),\n\n msg: \"\",\n\n };\n\n let rest = match line {\n\n [b'E', b':', rest @ ..] => {\n\n trace!(\" found elevator prefix\");\n\n rest\n\n }\n\n [] => {\n\n trace!(\" found empty elevator line\");\n\n return Ok(());\n\n }\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 22, "score": 82521.76636730696 }, { "content": "fn render_used_marker(\n\n render: &RenderState,\n\n location: Vec2,\n\n commands: &mut Commands,\n\n tile_entity: Entity,\n\n) {\n\n let x_left = 0.1;\n\n let x_right = 0.9;\n\n let y_top = -0.1;\n\n let y_bottom = -0.9;\n\n\n\n let builder = GeometryBuilder::new()\n\n .add(&shapes::Line(\n\n Vec2::new(x_left, y_top),\n\n Vec2::new(x_right, y_bottom),\n\n ))\n\n .add(&shapes::Line(\n\n Vec2::new(x_left, y_bottom),\n\n Vec2::new(x_right, y_top),\n\n ));\n", "file_path": "crates/m-mazing-core/src/render/render_tile.rs", "rank": 23, "score": 79803.81929775349 }, { "content": "#[derive(Component)]\n\nstruct TileShape;\n\n\n\nimpl Tile {\n\n pub fn spawn(&self, pos: Vec2, render: &RenderState, commands: &mut Commands) -> Entity {\n\n let is_reachable_coord = self.reachable_coords();\n\n\n\n let tile_bg_color = if self.has_camera() {\n\n render.theme.tile_camera_bg_color\n\n } else {\n\n render.theme.tile_normal_bg_color\n\n };\n\n\n\n let shape = shapes::Rectangle {\n\n extents: Vec2::new(Self::CELL_GRID_WIDTH as f32, Self::CELL_GRID_WIDTH as f32),\n\n origin: RectangleOrigin::Center,\n\n };\n\n let tile_entity = commands\n\n .spawn_bundle(GeometryBuilder::build_as(\n\n &shape,\n\n DrawMode::Fill(FillMode::color(tile_bg_color)),\n", "file_path": "crates/m-mazing-core/src/render/render_tile.rs", "rank": 24, "score": 72080.71242420662 }, { "content": "pub trait Render {\n\n fn render(&self, pos: Vec2, render: &RenderState);\n\n}\n\n\n\nimpl WallState {\n\n fn wall_color(self, render: &RenderState, tile_bg_color: Color) -> Color {\n\n match self {\n\n WallState::Explore(pawn) => pawn.as_color(render),\n\n WallState::Open => render.theme.wall_open_color,\n\n WallState::OrangeOnly => render.theme.wall_orange_only_color,\n\n WallState::Entrance => tile_bg_color,\n\n WallState::Blocked => render.theme.wall_blocked_color,\n\n }\n\n }\n\n}\n\n\n\nimpl Pawn {\n\n fn as_color(self, render: &RenderState) -> Color {\n\n match self {\n\n Self::Green => render.theme.pawn_green_color,\n\n Self::Orange => render.theme.pawn_orange_color,\n\n Self::Yellow => render.theme.pawn_yellow_color,\n\n Self::Purple => render.theme.pawn_purple_color,\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/render/mod.rs", "rank": 25, "score": 71986.51930968743 }, { "content": "pub fn draw_connected_line<I: Iterator<Item = Vec2>>(\n\n points: I,\n\n mut builder: GeometryBuilder,\n\n) -> GeometryBuilder {\n\n for (a, b) in points.tuple_windows() {\n\n builder = builder.add(&shapes::Line(a, b));\n\n }\n\n builder\n\n}\n", "file_path": "crates/m-mazing-core/src/render/shape.rs", "rank": 26, "score": 69730.11316669497 }, { "content": "#[must_use]\n\npub fn camera_zoom_to_fit(fit: Rect) -> mq::Camera2D {\n\n let desired_aspect_ratio = fit.w / fit.h;\n\n let screen_width = screen_width();\n\n let screen_height = screen_height();\n\n let current_aspect_ratio = screen_width / screen_height;\n\n debug!(\"Fitting camera to {:?}\", fit);\n\n\n\n let w;\n\n let h;\n\n let x;\n\n let y;\n\n\n\n if current_aspect_ratio >= desired_aspect_ratio {\n\n trace!(\n\n \" Window too wide; want {} but am currently {}\",\n\n desired_aspect_ratio,\n\n current_aspect_ratio\n\n );\n\n w = fit.w * current_aspect_ratio;\n\n h = fit.h;\n", "file_path": "crates/m-mazing-core/src/render/camera.rs", "rank": 27, "score": 67492.58450734487 }, { "content": "fn render_wall(\n\n render: &RenderState,\n\n a: Vec2,\n\n b: Vec2,\n\n wall: WallState,\n\n tile_bg_color: Color,\n\n commands: &mut Commands,\n\n tile_entity: Entity,\n\n) {\n\n let mut builder = GeometryBuilder::new();\n\n let color;\n\n if wall == WallState::OrangeOnly {\n\n let hole_halfwidth = 0.5 * render.theme.wall_orange_only_hole_width;\n\n let hole_a = a.lerp(b, 0.5 - hole_halfwidth);\n\n let hole_b = a.lerp(b, 0.5 + hole_halfwidth);\n\n let line1 = shapes::Line(a, hole_a);\n\n let line2 = shapes::Line(hole_b, b);\n\n builder = builder.add(&line1).add(&line2);\n\n color = render.theme.wall_orange_only_color;\n\n } else {\n", "file_path": "crates/m-mazing-core/src/render/render_tile.rs", "rank": 28, "score": 55407.345231382766 }, { "content": "fn render_camera(\n\n render: &RenderState,\n\n location: Vec2,\n\n commands: &mut Commands,\n\n tile_entity: Entity,\n\n) {\n\n let translation = (location + Vec2::new(0.5, -0.5)).extend(CELL_ITEM_Z);\n\n let transform = Transform::from_translation(translation);\n\n\n\n let points = [\n\n Vec2::new(-0.35, 0.0),\n\n Vec2::new(-0.175, 0.15),\n\n Vec2::new(0.0, 0.2),\n\n Vec2::new(0.175, 0.15),\n\n Vec2::new(0.35, 0.0),\n\n ];\n\n\n\n let mut builder = GeometryBuilder::new();\n\n builder = shape::draw_connected_line(points.iter().copied(), builder);\n\n builder = shape::draw_connected_line(\n", "file_path": "crates/m-mazing-core/src/render/render_tile.rs", "rank": 29, "score": 55407.345231382766 }, { "content": "fn render_timer(\n\n render: &RenderState,\n\n location: Vec2,\n\n commands: &mut Commands,\n\n tile_entity: Entity,\n\n) {\n\n const X_LEFT: f32 = 0.25;\n\n const X_RIGHT: f32 = 0.75;\n\n const Y_TOP: f32 = -0.2;\n\n const Y_BOTTOM: f32 = -0.8;\n\n\n\n const TILE_POINTS: [Vec2; 5] = [\n\n const_vec2!([X_LEFT, Y_TOP]),\n\n const_vec2!([X_RIGHT, Y_TOP]),\n\n const_vec2!([X_LEFT, Y_BOTTOM]),\n\n const_vec2!([X_RIGHT, Y_BOTTOM]),\n\n const_vec2!([X_LEFT, Y_TOP]),\n\n ];\n\n\n\n let builder = shape::draw_connected_line(TILE_POINTS.iter().copied(), GeometryBuilder::new());\n", "file_path": "crates/m-mazing-core/src/render/render_tile.rs", "rank": 30, "score": 55407.345231382766 }, { "content": "fn render_escalator(\n\n render: &RenderState,\n\n escalator: EscalatorLocation,\n\n commands: &mut Commands,\n\n tile_entity: Entity,\n\n) {\n\n let [a, b] = escalator.0;\n\n let offset = CELL_HALF_WIDTH - GRID_HALF_WIDTH;\n\n let transform = Transform::from_xyz(offset, offset, ESCALATOR_Z);\n\n let entity = commands\n\n .spawn_bundle(GeometryBuilder::build_as(\n\n &shapes::Line(\n\n Vec2::new(a.x() as f32, 3.0 - a.y() as f32),\n\n Vec2::new(b.x() as f32, 3.0 - b.y() as f32),\n\n ),\n\n DrawMode::Stroke(StrokeMode::new(\n\n render.theme.escalator_color,\n\n render.theme.escalator_thickness,\n\n )),\n\n transform,\n\n ))\n\n .id();\n\n commands.entity(tile_entity).push_children(&[entity]);\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/render/render_tile.rs", "rank": 31, "score": 55407.345231382766 }, { "content": "fn render_loot(\n\n render: &RenderState,\n\n location: Vec2,\n\n pawn: Pawn,\n\n commands: &mut Commands,\n\n tile_entity: Entity,\n\n) {\n\n let shape = shapes::Rectangle {\n\n extents: Vec2::new(0.5, 0.5),\n\n origin: RectangleOrigin::Center,\n\n };\n\n let rot = Quat::from_rotation_z(std::f32::consts::FRAC_PI_4);\n\n let translation = (location + Vec2::new(0.5, -0.5)).extend(CELL_ITEM_Z);\n\n let transform = Transform::from_rotation(rot).with_translation(translation);\n\n let entity = commands\n\n .spawn_bundle(GeometryBuilder::build_as(\n\n &shape,\n\n DrawMode::Stroke(StrokeMode::new(\n\n pawn.as_color(render),\n\n render.theme.wall_thickness,\n\n )),\n\n transform,\n\n ))\n\n .id();\n\n commands.entity(tile_entity).push_children(&[entity]);\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/render/render_tile.rs", "rank": 32, "score": 55407.345231382766 }, { "content": "fn render_warp(\n\n render: &RenderState,\n\n location: Vec2,\n\n pawn: Pawn,\n\n commands: &mut Commands,\n\n tile_entity: Entity,\n\n) {\n\n let center = Vec2::new(CELL_HALF_WIDTH, -CELL_HALF_WIDTH);\n\n\n\n const NUM_ANGLES: u32 = 8;\n\n const NUM_RADII: u32 = 24;\n\n\n\n let angles = (0..NUM_ANGLES)\n\n .map(|x| x as f32 * -2.0 * std::f32::consts::PI / NUM_ANGLES as f32)\n\n .cycle();\n\n let radii = (0..NUM_RADII).map(|x| x as f32 * CELL_HALF_WIDTH * 0.8 / NUM_RADII as f32);\n\n let points = angles\n\n .zip(radii)\n\n .map(|(angle, radius)| polar_to_cartesian(radius, angle) + center);\n\n\n", "file_path": "crates/m-mazing-core/src/render/render_tile.rs", "rank": 33, "score": 55407.345231382766 }, { "content": "fn render_crystal_ball(\n\n render: &RenderState,\n\n location: Vec2,\n\n commands: &mut Commands,\n\n tile_entity: Entity,\n\n) {\n\n let translation = (location + Vec2::new(0.5, -0.5)).extend(CELL_ITEM_Z);\n\n let transform = Transform::from_translation(translation);\n\n let draw_mode = DrawMode::Outlined {\n\n fill_mode: FillMode::color(Color::WHITE),\n\n outline_mode: StrokeMode::new(render.theme.crystal_ball_color, 0.05),\n\n };\n\n\n\n let hexagon = shapes::RegularPolygon {\n\n sides: 6,\n\n center: Vec2::ZERO,\n\n feature: shapes::RegularPolygonFeature::Radius(0.4),\n\n };\n\n let hexagon = commands\n\n .spawn_bundle(GeometryBuilder::build_as(&hexagon, draw_mode, transform))\n", "file_path": "crates/m-mazing-core/src/render/render_tile.rs", "rank": 34, "score": 54227.537992359976 }, { "content": "#[allow(clippy::too_many_arguments)]\n\nfn render_final_exit(\n\n render: &RenderState,\n\n location: Vec2,\n\n pawn: Pawn,\n\n tile: &Tile,\n\n col_idx: usize,\n\n row_idx: usize,\n\n commands: &mut Commands,\n\n tile_entity: Entity,\n\n) {\n\n let point = TileGridCoord::new(col_idx as u8, row_idx as u8)\n\n .expect(\"could not convert row/col idx to tile\");\n\n\n\n let angle = tile.cell_exit_direction(point).as_angle();\n\n let rotation = Quat::from_rotation_z(angle);\n\n let translation = (location + Vec2::new(0.5, -0.5)).extend(CELL_BG_Z);\n\n let transform = Transform::from_rotation(rotation).with_translation(translation);\n\n\n\n let width = 1.0 - render.theme.wall_thickness;\n\n let bg_square = shapes::Rectangle {\n", "file_path": "crates/m-mazing-core/src/render/render_tile.rs", "rank": 35, "score": 54227.537992359976 }, { "content": "trait TileTokenParse\n\nwhere\n\n Self: Sized,\n\n{\n\n const NAME: &'static str;\n\n const ALLOWED_CHARS: &'static str;\n\n fn parse(value: u8) -> Option<Self>;\n\n}\n\n\n\n#[derive(Debug, Default, Clone, PartialEq, Eq)]\n\npub struct Tile {\n\n cell_grid: [[TileCell; Self::CELL_GRID_WIDTH as usize]; Self::CELL_GRID_WIDTH as usize],\n\n horz_walls: [[WallState; Self::CELL_GRID_WIDTH as usize]; (Self::CELL_GRID_WIDTH + 1) as usize],\n\n vert_walls: [[WallState; (Self::CELL_GRID_WIDTH + 1) as usize]; Self::CELL_GRID_WIDTH as usize],\n\n escalators: arrayvec::ArrayVec<EscalatorLocation, { Self::MAX_ESCALATORS_PER_TILE as usize }>,\n\n}\n\n\n\nimpl Tile {\n\n pub const CELL_GRID_WIDTH: u8 = 4;\n\n const MAX_ESCALATORS_PER_TILE: u8 = 4;\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 36, "score": 47526.36111815432 }, { "content": "\n\n pub fn set_availability(&mut self, new_used: CellItemAvailability) {\n\n use TileCell::*;\n\n\n\n match self {\n\n TimerFlip(avail) | Camera(avail) | CrystalBall(avail) => {\n\n *avail = new_used;\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n\nimpl TileTokenParse for TileCell {\n\n const NAME: &'static str = \"TileCell\";\n\n const ALLOWED_CHARS: &'static str = \" 1234GOYPgoypctb\";\n\n\n\n fn parse(value: u8) -> Option<Self> {\n\n Some(match value {\n\n b' ' => Self::Empty,\n", "file_path": "crates/m-mazing-core/src/tile/cell.rs", "rank": 37, "score": 47401.5977687688 }, { "content": " /// Final exit tile\n\n FinalExit(Pawn),\n\n\n\n /// Crystal ball\n\n CrystalBall(CellItemAvailability),\n\n}\n\n\n\nimpl Default for TileCell {\n\n fn default() -> Self {\n\n Self::Empty\n\n }\n\n}\n\n\n\nimpl TileCell {\n\n pub fn is_used(self) -> bool {\n\n use CellItemAvailability::*;\n\n use TileCell::*;\n\n\n\n matches!(self, TimerFlip(Used) | Camera(Used) | CrystalBall(Used))\n\n }\n", "file_path": "crates/m-mazing-core/src/tile/cell.rs", "rank": 38, "score": 47399.30857954305 }, { "content": " b'c' => Self::Camera(CellItemAvailability::Available),\n\n b'b' => Self::CrystalBall(CellItemAvailability::Available),\n\n\n\n _ => return None,\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum CellItemAvailability {\n\n Available,\n\n Used,\n\n}\n", "file_path": "crates/m-mazing-core/src/tile/cell.rs", "rank": 39, "score": 47393.983091461676 }, { "content": "use super::TileTokenParse;\n\nuse crate::prelude::*;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum TileCell {\n\n /// Pawn walk freely through\n\n Empty,\n\n\n\n // Pawn can be warped to this point\n\n Warp(Pawn),\n\n\n\n /// Can flip sand timer\n\n TimerFlip(CellItemAvailability),\n\n\n\n /// Security Camera\n\n Camera(CellItemAvailability),\n\n\n\n /// Loot that pawns need to \"steal\" before exit\n\n Loot(Pawn),\n\n\n", "file_path": "crates/m-mazing-core/src/tile/cell.rs", "rank": 40, "score": 47391.14702382979 }, { "content": "\n\n // Warp\n\n b'1' => Self::Warp(Pawn::Green),\n\n b'2' => Self::Warp(Pawn::Orange),\n\n b'3' => Self::Warp(Pawn::Yellow),\n\n b'4' => Self::Warp(Pawn::Purple),\n\n\n\n // Loot\n\n b'g' => Self::Loot(Pawn::Green),\n\n b'o' => Self::Loot(Pawn::Orange),\n\n b'y' => Self::Loot(Pawn::Yellow),\n\n b'p' => Self::Loot(Pawn::Purple),\n\n\n\n // Explore exit\n\n b'G' => Self::FinalExit(Pawn::Green),\n\n b'O' => Self::FinalExit(Pawn::Orange),\n\n b'Y' => Self::FinalExit(Pawn::Yellow),\n\n b'P' => Self::FinalExit(Pawn::Purple),\n\n\n\n b't' => Self::TimerFlip(CellItemAvailability::Available),\n", "file_path": "crates/m-mazing-core/src/tile/cell.rs", "rank": 41, "score": 47385.89092566884 }, { "content": " let title_str = &mut query.single_mut().sections[0].value;\n\n let tile = if let Some(item) = ctx.tileset.get(ctx.tile_idx as usize) {\n\n *title_str = format!(\n\n \"TILE: {} (idx={})\\navail={:?}, left_turns={}\",\n\n item.0, ctx.tile_idx, availability.0, tile_rotation.left_turns\n\n );\n\n &item.1\n\n } else {\n\n *title_str = \"no tile\".to_string();\n\n return;\n\n };\n\n\n\n let mut tile = tile.clone();\n\n for cell in tile.cells_iter_mut() {\n\n cell.set_availability(availability.0);\n\n }\n\n for _ in 0..tile_rotation.left_turns {\n\n tile.rotate(SpinDirection::CounterClockwise);\n\n }\n\n\n\n let id = tile.spawn(Vec2::ZERO, &*render, &mut commands);\n\n\n\n let new_tile = CurrentTile { id, tile };\n\n commands.insert_resource(new_tile);\n\n}\n\n\n", "file_path": "crates/m-mazing-tile-util/src/gui.rs", "rank": 42, "score": 47201.36684281273 }, { "content": " ctx.tile_idx = ctx.tileset.len() as isize - 1;\n\n }\n\n\n\n if keyboard_input.any_just_pressed([KeyCode::K, KeyCode::U]) {\n\n availability.0 = match availability.0 {\n\n CellItemAvailability::Available => CellItemAvailability::Used,\n\n CellItemAvailability::Used => CellItemAvailability::Available,\n\n };\n\n info!(\"availability = {:?}\", availability.0);\n\n }\n\n\n\n const NUM_SPIN_DIRS: u8 = 4;\n\n if keyboard_input.any_just_pressed([KeyCode::LBracket]) {\n\n tile_rotation.left_turns = (tile_rotation.left_turns + 1).rem_euclid(NUM_SPIN_DIRS);\n\n }\n\n if keyboard_input.any_just_pressed([KeyCode::RBracket]) {\n\n tile_rotation.left_turns =\n\n (tile_rotation.left_turns as i8 - 1).rem_euclid(NUM_SPIN_DIRS as i8) as u8;\n\n }\n\n}\n\n\n", "file_path": "crates/m-mazing-tile-util/src/gui.rs", "rank": 43, "score": 47197.89803887578 }, { "content": " if keyboard_input.any_just_pressed([KeyCode::Right, KeyCode::Down]) {\n\n ctx.tile_idx += 1;\n\n }\n\n if keyboard_input.any_just_pressed([KeyCode::Left, KeyCode::Up]) {\n\n ctx.tile_idx -= 1;\n\n }\n\n\n\n // Avoid triggering change detection if unchanged\n\n let new_tile_idx = ctx\n\n .tile_idx\n\n .checked_rem_euclid(ctx.tileset.len() as isize)\n\n .unwrap_or(0);\n\n if new_tile_idx != ctx.tile_idx {\n\n ctx.tile_idx = new_tile_idx;\n\n }\n\n\n\n if keyboard_input.just_pressed(KeyCode::Home) {\n\n ctx.tile_idx = 0;\n\n }\n\n if keyboard_input.just_pressed(KeyCode::End) {\n", "file_path": "crates/m-mazing-tile-util/src/gui.rs", "rank": 44, "score": 47193.76165046256 }, { "content": "use std::sync::mpsc::TryRecvError;\n\n\n\nuse bevy::app::AppExit;\n\n\n\nuse crate::*;\n\n\n", "file_path": "crates/m-mazing-tile-util/src/gui.rs", "rank": 45, "score": 47179.24471464304 }, { "content": " debug!(\"parsed tile_name {:?}\", tile_name);\n\n\n\n let tile = tile_from_lines(&mut lines)?;\n\n tileset.push((tile_name, tile));\n\n }\n\n\n\n Ok(tileset)\n\n}\n\n\n\npub(super) fn tile_from_str(s: &str) -> Result<Tile, TileParsingError> {\n\n let mut lines = s.lines().enumerate().map(|(idx, line)| (idx + 1, line));\n\n tile_from_lines(&mut lines)\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 46, "score": 46258.533324723445 }, { "content": " [Blocked, Open, Open, Blocked, Blocked],\n\n ],\n\n escalators: arrayvec::ArrayVec::new_const(),\n\n };\n\n\n\n const TILE3_STR: &str = \"\n\n+-+-+6+-+\n\n|t 4|\n\n+-+ + +-+\n\n8 3|\n\n+-+ + +-+\n\n|2 | 5\n\n+-+ + +-+\n\n|1 | |\n\n+-+7+-+-+\n\nE: 01-23, 00-33, 33-02\n\n\";\n\n\n\n static TILE3: Lazy<Tile> = Lazy::new(|| Tile {\n\n cell_grid: [\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 47, "score": 46255.63809503473 }, { "content": " } else {\n\n return Err(err.with_msg(\"invalid escalator hunk\"));\n\n };\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use once_cell::sync::Lazy;\n\n\n\n use super::*;\n\n use CellItemAvailability::*;\n\n use Pawn::*;\n\n use TileCell::*;\n\n use WallState::*;\n\n\n\n const TILE1_STR: &str = \"\n\n+-+-+7+-+\n\n| 1 c|\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 48, "score": 46254.97129505011 }, { "content": "}\n\n\n\nimpl Display for InvalidEscalator {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"on line {}: {:?}; {}\",\n\n self.line_number, self.line, self.msg\n\n )\n\n }\n\n}\n\n\n\nimpl InvalidEscalator {\n\n fn with_msg(&self, msg: &'static str) -> Self {\n\n Self {\n\n msg,\n\n ..self.clone()\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 49, "score": 46254.09046008874 }, { "content": " })?;\n\n let digit_byte: u8 = digit\n\n .try_into()\n\n .expect(\"Failed to convert decimal digit to u8\");\n\n Ok(digit_byte)\n\n };\n\n let a = byte_to_digit(*a)?;\n\n let b = byte_to_digit(*b)?;\n\n let c = byte_to_digit(*c)?;\n\n let d = byte_to_digit(*d)?;\n\n\n\n let point1 = TileGridCoord::new(a, b)\n\n .ok_or_else(|| err.clone().with_msg(\"Invalid tile coordinates\"))?;\n\n let point2 =\n\n TileGridCoord::new(c, d).ok_or_else(|| err.with_msg(\"Invalid tile coordinates\"))?;\n\n\n\n let escalator = EscalatorLocation([point1, point2]);\n\n tile.escalators\n\n .try_push(escalator)\n\n .map_err(|_| err.clone().with_msg(\"Exceeded max escalators\"))?;\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 50, "score": 46253.02232500125 }, { "content": "use std::fmt::Display;\n\n\n\nuse log::{debug, info, trace};\n\nuse thiserror::Error;\n\n\n\nuse super::*;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum RowType {\n\n Wall,\n\n Cell,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 51, "score": 46252.79039870171 }, { "content": " );\n\n\n\n let tileset_31 = format!(\"@tile3\\n{}\\n@tile1\\n{}\", TILE3_STR, TILE1_STR);\n\n assert_eq!(\n\n tileset_from_str(&tileset_31),\n\n Ok(vec![\n\n (\"tile3\".to_string(), TILE3.clone()),\n\n (\"tile1\".to_string(), TILE1),\n\n ]),\n\n );\n\n }\n\n\n\n #[test]\n\n fn tile_negative() {\n\n crate::init_logging();\n\n assert_eq!(tileset_from_str(\"\"), Ok(vec![]));\n\n let actual = tileset_from_str(TILE_MISSING_NAME);\n\n assert!(\n\n matches!(actual, Err(TileParsingError::InvalidNameLeader { .. })),\n\n \"actual = {:?}\",\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 52, "score": 46252.789959116584 }, { "content": " #[test]\n\n fn tile_simple() {\n\n crate::init_logging();\n\n\n\n assert_eq!(TILE1_STR.parse::<Tile>(), Ok(TILE1));\n\n assert_eq!(TILE2_STR.parse::<Tile>(), Ok(TILE2));\n\n\n\n let tileset_1 = format!(\"@tile1\\n{}\", TILE1_STR);\n\n assert_eq!(\n\n tileset_from_str(&tileset_1),\n\n Ok(vec![(\"tile1\".to_string(), TILE1),])\n\n );\n\n\n\n let tileset_12 = format!(\"@tile1\\n{}\\n@tile2\\n{}\", TILE1_STR, TILE2_STR);\n\n assert_eq!(\n\n tileset_from_str(&tileset_12),\n\n Ok(vec![\n\n (\"tile1\".to_string(), TILE1),\n\n (\"tile2\".to_string(), TILE2),\n\n ]),\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 53, "score": 46251.96041367528 }, { "content": " if row_num >= Tile::CELL_GRID_WIDTH as u32 {\n\n return Err(TileParsingError::WrongNumberOfRows {\n\n line_number,\n\n num_rows: row_num + 1,\n\n });\n\n }\n\n let (mut walls, cells) = if let (Some(walls), Some(cells)) = (\n\n tile.vert_walls.get_mut(row_num as usize),\n\n tile.cell_grid.get_mut(row_num as usize),\n\n ) {\n\n (walls.iter_mut(), cells.iter_mut())\n\n } else {\n\n return Err(TileParsingError::WrongNumberOfRows {\n\n line_number,\n\n num_rows: row_num + 1,\n\n });\n\n };\n\n\n\n *walls.next().unwrap() = eat_thing(&ctx, &mut cursor)?;\n\n for (cell, wall) in cells.zip(walls) {\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 54, "score": 46251.918584547515 }, { "content": " [Open, Entrance, Open, Open],\n\n ],\n\n vert_walls: [\n\n [Blocked, Open, Open, Open, Blocked],\n\n [Blocked, Open, Blocked, Open, Blocked],\n\n [Blocked, Open, Blocked, Open, Blocked],\n\n [Blocked, Blocked, Open, Open, Blocked],\n\n ],\n\n escalators: arrayvec::ArrayVec::new_const(),\n\n };\n\n\n\n const TILE2_STR: &str = \"\n\n+-+-+6+-+\n\n|t 4|\n\n+-+ + +-+\n\n8 3|\n\n+-+ + +-+\n\n|2 | 5\n\n+-+ + +-+\n\n|1 | |\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 55, "score": 46250.45747713522 }, { "content": " ItemParse {\n\n line_number: u32,\n\n col_number: u32,\n\n line: String,\n\n char: char,\n\n name: &'static str,\n\n allowed: &'static str,\n\n },\n\n\n\n #[error(\"Invalid escalator specification {0:?}\")]\n\n InvalidEscalator(#[from] InvalidEscalator),\n\n\n\n #[error(\"No more tiles found\")]\n\n NoMoreTiles,\n\n}\n\n#[derive(Error, Clone, Debug, PartialEq, Eq)]\n\npub struct InvalidEscalator {\n\n line_number: u32,\n\n line: String,\n\n msg: &'static str,\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 56, "score": 46249.48783492312 }, { "content": " line_number,\n\n num_rows: row_num + 1,\n\n });\n\n };\n\n\n\n eat_thing::<_, Placeholder>(&ctx, &mut cursor)?;\n\n for wall in walls {\n\n *wall = eat_thing(&ctx, &mut cursor)?;\n\n eat_thing::<_, Placeholder>(&ctx, &mut cursor)?;\n\n }\n\n\n\n if row_num == Tile::CELL_GRID_WIDTH as u32 {\n\n state = ParsingState::Elevator;\n\n allow_line_skips = false;\n\n continue;\n\n } else {\n\n state = ParsingState::CellRow { row_num };\n\n }\n\n }\n\n ParsingState::CellRow { row_num } => {\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 57, "score": 46249.081911196015 }, { "content": " _ => {\n\n return Err(InvalidEscalator {\n\n msg: \"Invalid prefix\",\n\n ..err\n\n });\n\n }\n\n };\n\n let escalator_hunks = std::str::from_utf8(rest)\n\n .map_err(|_| InvalidEscalator {\n\n msg: \"Invalid UTF-8\",\n\n ..err.clone()\n\n })?\n\n .split(',');\n\n for hunk in escalator_hunks {\n\n if let [a, b, b'-', c, d] = hunk.trim().as_bytes() {\n\n let byte_to_digit = |c: u8| -> Result<u8, InvalidEscalator> {\n\n let c = char::from_u32(c as u32).expect(\"Invalid UTF-8 byte after conversion\");\n\n let digit = c.to_digit(10).ok_or_else(|| InvalidEscalator {\n\n msg: \"Unable to parse digit\",\n\n ..err.clone()\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 58, "score": 46249.051128914376 }, { "content": " });\n\n }\n\n }\n\n match state {\n\n ParsingState::CellRow { .. } | ParsingState::WallRow { .. } => {\n\n Err(TileParsingError::IncompleteTile { line_number })\n\n }\n\n ParsingState::Elevator => {\n\n debug!(\"Parsed tile {:#?}\", tile);\n\n Ok(tile)\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 59, "score": 46248.9686916681 }, { "content": " *cell = eat_thing(&ctx, &mut cursor)?;\n\n *wall = eat_thing(&ctx, &mut cursor)?;\n\n }\n\n\n\n state = ParsingState::WallRow {\n\n row_num: row_num + 1,\n\n };\n\n }\n\n ParsingState::Elevator => {\n\n elevators_from_line(&ctx, line, &mut tile)?;\n\n break;\n\n }\n\n }\n\n\n\n // Ensure row does not have extra\n\n if let Some((col_number, _c)) = cursor.next() {\n\n return Err(TileParsingError::RowHasExtra {\n\n line: String::from_utf8_lossy(line).to_string(),\n\n line_number,\n\n col_number: col_number as u32,\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 60, "score": 46248.37028501075 }, { "content": " actual\n\n );\n\n }\n\n\n\n #[test]\n\n fn fail_elevators() {\n\n crate::init_logging();\n\n\n\n let elevator_lines = [\n\n \"foo:\",\n\n \"E::\",\n\n \"E: 01-23, 00-33z\",\n\n \"E: 01-23, 00-34\",\n\n \"E: 01-23, 00-3s\",\n\n ];\n\n for el in elevator_lines {\n\n let tile_str = format!(\"{}\\n{}\", TILE1_STR.trim(), el);\n\n let actual = tile_str.parse::<Tile>();\n\n assert!(\n\n matches!(actual, Err(TileParsingError::InvalidEscalator { .. })),\n\n \"Did not get expected Err for tile_str: {}; actual={:?}\",\n\n tile_str,\n\n actual\n\n );\n\n }\n\n }\n\n}\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 61, "score": 46247.58608008654 }, { "content": "\n\n debug!(\n\n \"line {}: {:?} ; state={:?}; allow_line_skips={:?}\",\n\n line_number,\n\n String::from_utf8_lossy(line),\n\n state,\n\n allow_line_skips,\n\n );\n\n\n\n if allow_line_skips && skip_tile_line(line) {\n\n continue;\n\n }\n\n\n\n let mut cursor = line.iter().copied().enumerate();\n\n match state {\n\n ParsingState::WallRow { row_num } => {\n\n let walls = if let Some(walls) = tile.horz_walls.get_mut(row_num as usize) {\n\n walls.iter_mut()\n\n } else {\n\n return Err(TileParsingError::WrongNumberOfRows {\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 62, "score": 46247.58327894305 }, { "content": "+ +-+-+ +\n\n| |t |\n\n+-+ + +-+\n\n| | |\n\n+ +-+-+ +\n\n|O| |\n\n+ +^+ + +\n\n\";\n\n const TILE1: Tile = Tile {\n\n cell_grid: [\n\n [Empty, Warp(Green), Empty, Camera(Available)],\n\n [Empty, Empty, TimerFlip(Available), Empty],\n\n [Empty, Empty, Empty, Empty],\n\n [FinalExit(Orange), Empty, Empty, Empty],\n\n ],\n\n horz_walls: [\n\n [Blocked, Blocked, Explore(Yellow), Blocked],\n\n [Open, Blocked, Blocked, Open],\n\n [Blocked, Open, Open, Blocked],\n\n [Open, Blocked, Blocked, Open],\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 63, "score": 46246.65646376275 }, { "content": "+-+7+-+-+\n\n\";\n\n const TILE2: Tile = Tile {\n\n cell_grid: [\n\n [TimerFlip(Available), Empty, Empty, Warp(Purple)],\n\n [Empty, Empty, Empty, Warp(Yellow)],\n\n [Warp(Orange), Empty, Empty, Empty],\n\n [Warp(Green), Empty, Empty, Empty],\n\n ],\n\n horz_walls: [\n\n [Blocked, Blocked, Explore(Orange), Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Explore(Yellow), Blocked, Blocked],\n\n ],\n\n vert_walls: [\n\n [Blocked, Open, Open, Open, Blocked],\n\n [Explore(Purple), Open, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked, Explore(Green)],\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 64, "score": 46246.37216664726 }, { "content": " [TimerFlip(Available), Empty, Empty, Warp(Purple)],\n\n [Empty, Empty, Empty, Warp(Yellow)],\n\n [Warp(Orange), Empty, Empty, Empty],\n\n [Warp(Green), Empty, Empty, Empty],\n\n ],\n\n horz_walls: [\n\n [Blocked, Blocked, Explore(Orange), Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Explore(Yellow), Blocked, Blocked],\n\n ],\n\n vert_walls: [\n\n [Blocked, Open, Open, Open, Blocked],\n\n [Explore(Purple), Open, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked, Explore(Green)],\n\n [Blocked, Open, Open, Blocked, Blocked],\n\n ],\n\n escalators: [\n\n EscalatorLocation([TileGridCoord { x: 0, y: 1 }, TileGridCoord { x: 2, y: 3 }]),\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 65, "score": 46245.00623104135 }, { "content": " EscalatorLocation([TileGridCoord { x: 0, y: 0 }, TileGridCoord { x: 3, y: 3 }]),\n\n EscalatorLocation([TileGridCoord { x: 3, y: 3 }, TileGridCoord { x: 0, y: 2 }]),\n\n ]\n\n .iter()\n\n .copied()\n\n .collect(),\n\n });\n\n\n\n const TILE_MISSING_NAME: &str = \"\n\n+-+-+7+-+\n\n| 1 c|\n\n+ +-+-+ +\n\n| |t |\n\n+-+ + +-+\n\n| | |\n\n+ +-+-+ +\n\n|O| |\n\n+ +^+ + +\n\n\";\n\n\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 66, "score": 46244.81882738336 }, { "content": " #[error(\"Incomple tile at line {line_number}\")]\n\n IncompleteTile { line_number: u32 },\n\n\n\n #[error(\"Expected ASCII tilename, at line {line_number} found {name:?}\")]\n\n InvalidTileName { line_number: u32, name: String },\n\n\n\n #[error(\"Invalid number of rows at line {line_number} found {num_rows} rows\")]\n\n WrongNumberOfRows { line_number: u32, num_rows: u32 },\n\n\n\n #[error(\"Row has extra characters on line {line_number}: {line}\")]\n\n RowHasExtra {\n\n line_number: u32,\n\n col_number: u32,\n\n line: String,\n\n },\n\n\n\n #[error(\"Unexpected end-of-line while for line {line_number}: {line:?}\")]\n\n IncompleteLine { line_number: u32, line: String },\n\n\n\n #[error(\"Failed to parse item {char} as {name} on line {line_number}: {line:?}; must be in {allowed}\")]\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 67, "score": 46242.50284716548 }, { "content": "\n\n if skip_tile_line(line) {\n\n continue;\n\n }\n\n\n\n let (leader, tail) = match *line {\n\n [] => unreachable!(),\n\n [leader, ref tail @ ..] => (leader, tail),\n\n };\n\n if leader != b'@' {\n\n let line = String::from_utf8_lossy(line).to_string();\n\n return Err(TileParsingError::InvalidNameLeader { line_number, line });\n\n }\n\n if !tail.is_ascii() {\n\n return Err(TileParsingError::InvalidTileName {\n\n line_number,\n\n name: String::from_utf8_lossy(tail).to_string(),\n\n });\n\n }\n\n let tile_name = String::from_utf8(tail.to_vec()).unwrap();\n", "file_path": "crates/m-mazing-core/src/tile/tileset.rs", "rank": 68, "score": 46242.08576110918 }, { "content": " }\n\n\n\n pub fn cells_iter_mut(&mut self) -> impl Iterator<Item = &mut TileCell> {\n\n self.cell_grid.iter_mut().flat_map(|row| row.iter_mut())\n\n }\n\n\n\n pub fn rotate(&mut self, spin: SpinDirection) {\n\n let mut new_tile = Self {\n\n escalators: self.escalators.clone(),\n\n ..Self::default()\n\n };\n\n rotate_2d_array(&self.cell_grid, &mut new_tile.cell_grid, spin);\n\n rotate_2d_array(&self.horz_walls, &mut new_tile.vert_walls, spin);\n\n rotate_2d_array(&self.vert_walls, &mut new_tile.horz_walls, spin);\n\n\n\n for new_esc in new_tile.escalators.iter_mut() {\n\n new_esc.rotate(spin);\n\n }\n\n *self = new_tile;\n\n }\n\n\n\n pub fn has_camera(&self) -> bool {\n\n self.cells_iter()\n\n .any(|cell| matches!(cell, TileCell::Camera(_)))\n\n }\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 69, "score": 46214.50802645526 }, { "content": "use std::{collections::HashSet, str::FromStr};\n\n\n\nuse crate::prelude::*;\n\n\n\npub mod cell;\n\npub mod direction;\n\npub mod escalator;\n\npub mod grid_coord;\n\npub mod tileset;\n\npub mod wall;\n\n\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 70, "score": 46208.623412029374 }, { "content": " pub fn reachable_coords(\n\n &self,\n\n ) -> [[bool; Tile::CELL_GRID_WIDTH as usize]; Tile::CELL_GRID_WIDTH as usize] {\n\n let mut explore_coords = self.reachable_coords_starting();\n\n let mut visited_coords = HashSet::new();\n\n let mut is_reachable_coord: [[bool; Tile::CELL_GRID_WIDTH as usize];\n\n Tile::CELL_GRID_WIDTH as usize] = Default::default();\n\n\n\n while let Some(coord) = explore_coords.pop() {\n\n visited_coords.insert(coord);\n\n is_reachable_coord[coord.y as usize][coord.x as usize] = true;\n\n\n\n for neighbor in self.cell_immediate_neighbor_coords(coord) {\n\n if !(visited_coords.contains(&neighbor) || explore_coords.contains(&neighbor)) {\n\n explore_coords.push(neighbor);\n\n }\n\n }\n\n }\n\n\n\n is_reachable_coord\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 71, "score": 46207.80229741963 }, { "content": " pub fn horz_walls(\n\n &self,\n\n ) -> &[[WallState; Self::CELL_GRID_WIDTH as usize]; (Self::CELL_GRID_WIDTH + 1) as usize] {\n\n &self.horz_walls\n\n }\n\n\n\n pub fn vert_walls(\n\n &self,\n\n ) -> &[[WallState; (Self::CELL_GRID_WIDTH + 1) as usize]; Self::CELL_GRID_WIDTH as usize] {\n\n &self.vert_walls\n\n }\n\n\n\n pub fn escalators(\n\n &self,\n\n ) -> &arrayvec::ArrayVec<EscalatorLocation, { Self::MAX_ESCALATORS_PER_TILE as usize }> {\n\n &self.escalators\n\n }\n\n\n\n pub fn cells_iter(&self) -> impl Iterator<Item = &TileCell> {\n\n self.cell_grid.iter().flat_map(|row| row.iter())\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 72, "score": 46206.82859799043 }, { "content": "\n\n let dir = match open_exit_dirs.as_slice() {\n\n [dir1] => *dir1,\n\n _ => {\n\n warn!(\n\n \"Unable to find a good direction for exit direction at {:?}\",\n\n coord\n\n );\n\n CartesianDirection::Right\n\n }\n\n };\n\n dir\n\n }\n\n\n\n pub fn cell_grid(\n\n &self,\n\n ) -> &[[TileCell; Self::CELL_GRID_WIDTH as usize]; Self::CELL_GRID_WIDTH as usize] {\n\n &self.cell_grid\n\n }\n\n\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 73, "score": 46205.15049855714 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use once_cell::sync::Lazy;\n\n\n\n use super::*;\n\n use CellItemAvailability::*;\n\n use Pawn::*;\n\n use TileCell::*;\n\n use WallState::*;\n\n\n\n static TILE_1A: Lazy<Tile> = Lazy::new(|| Tile {\n\n cell_grid: [\n\n [TimerFlip(Available), Empty, Empty, Warp(Purple)],\n\n [Empty, Empty, Empty, Warp(Yellow)],\n\n [Warp(Orange), Empty, Empty, Empty],\n\n [Warp(Green), Empty, Empty, Empty],\n\n ],\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 74, "score": 46203.02228681423 }, { "content": " .iter()\n\n .copied()\n\n .collect(),\n\n };\n\n\n\n {\n\n let mut actual_counterclockwise = tile.clone();\n\n actual_counterclockwise.rotate(SpinDirection::CounterClockwise);\n\n assert_eq!(actual_counterclockwise, tile_counterclockwise);\n\n }\n\n\n\n {\n\n let mut actual_clockwise = tile.clone();\n\n actual_clockwise.rotate(SpinDirection::Clockwise);\n\n assert_eq!(actual_clockwise, tile_clockwise);\n\n }\n\n\n\n {\n\n let mut revert1 = tile.clone();\n\n revert1.rotate(SpinDirection::Clockwise);\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 75, "score": 46202.56804326451 }, { "content": " }\n\n\n\n pub fn cell_wall(&self, coord: TileGridCoord, direction: CartesianDirection) -> WallState {\n\n let x = coord.x() as usize;\n\n let y = coord.y() as usize;\n\n match direction {\n\n CartesianDirection::Up => self.horz_walls[y][x],\n\n CartesianDirection::Down => self.horz_walls[y + 1][x],\n\n CartesianDirection::Left => self.vert_walls[y][x],\n\n CartesianDirection::Right => self.vert_walls[y][x + 1],\n\n }\n\n }\n\n\n\n pub fn cell_exit_direction(&self, coord: TileGridCoord) -> CartesianDirection {\n\n let open_exit_dirs: Vec<CartesianDirection> = self\n\n .cell_outer_edge_directions(coord)\n\n .iter()\n\n .copied()\n\n .filter(|dir| self.cell_wall(coord, *dir) == WallState::Open)\n\n .collect();\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 76, "score": 46201.7305412348 }, { "content": " dirs\n\n }\n\n\n\n pub fn cell_value(&self, coord: TileGridCoord) -> TileCell {\n\n self.cell_grid[coord.y() as usize][coord.x() as usize]\n\n }\n\n\n\n /// Neighbor coords accessible via cardinal direction walk\n\n pub fn cell_cardinal_neighbor_coords(\n\n &self,\n\n coord: TileGridCoord,\n\n direction: CartesianDirection,\n\n ) -> Option<TileGridCoord> {\n\n coord.added(direction.neighbor_transform())\n\n }\n\n\n\n /// Neighbor accessible via cardinal direction walk\n\n pub fn cell_cardinal_neighbor(\n\n &self,\n\n coord: TileGridCoord,\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 77, "score": 46201.61728949829 }, { "content": " [Blocked, Blocked, Open, Open, Blocked],\n\n ],\n\n escalators: [EscalatorLocation([\n\n TileGridCoord { x: 0, y: 1 },\n\n TileGridCoord { x: 1, y: 3 },\n\n ])]\n\n .iter()\n\n .copied()\n\n .collect(),\n\n });\n\n\n\n #[test]\n\n fn rotate() {\n\n let tile = Tile {\n\n cell_grid: [\n\n [\n\n Loot(Yellow),\n\n Camera(Available),\n\n FinalExit(Purple),\n\n Loot(Purple),\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 78, "score": 46200.55635818344 }, { "content": "\n\n /// CartesianDirections pointing to edge of tile\n\n ///\n\n /// Internal tiles will have no directions\n\n pub fn cell_outer_edge_directions(&self, coord: TileGridCoord) -> Vec<CartesianDirection> {\n\n const MAX_IDX: u8 = Tile::CELL_GRID_WIDTH - 1;\n\n\n\n let mut dirs = Vec::with_capacity(4);\n\n if coord.x() == 0 {\n\n dirs.push(CartesianDirection::Left);\n\n }\n\n if coord.x() == MAX_IDX {\n\n dirs.push(CartesianDirection::Right);\n\n }\n\n if coord.y() == 0 {\n\n dirs.push(CartesianDirection::Up);\n\n }\n\n if coord.y() == MAX_IDX {\n\n dirs.push(CartesianDirection::Down);\n\n }\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 79, "score": 46200.119770860176 }, { "content": " });\n\n\n\n static TILE_2: Lazy<Tile> = Lazy::new(|| Tile {\n\n cell_grid: [\n\n [FinalExit(Purple), Empty, Empty, Empty],\n\n [Empty, Empty, Empty, Warp(Purple)],\n\n [Empty, Empty, Empty, Empty],\n\n [Empty, Empty, Empty, Warp(Green)],\n\n ],\n\n horz_walls: [\n\n [Open, Blocked, Blocked, Blocked],\n\n [Open, Open, Open, Blocked],\n\n [Blocked, Open, Blocked, Open],\n\n [Open, Blocked, Open, Blocked],\n\n [Blocked, Explore(Orange), Blocked, Blocked],\n\n ],\n\n vert_walls: [\n\n [Blocked, Blocked, Open, Open, Blocked],\n\n [Blocked, Blocked, Open, Blocked, Blocked],\n\n [Blocked, Blocked, Blocked, Open, Entrance],\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 80, "score": 46199.49463807497 }, { "content": " [Blocked, Open, Blocked, Blocked],\n\n ],\n\n vert_walls: [\n\n [Blocked, Open, Open, Open, Blocked],\n\n [Open, Open, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked, Open],\n\n [Blocked, Open, Open, Blocked, Blocked],\n\n ],\n\n escalators: [EscalatorLocation([\n\n TileGridCoord { x: 2, y: 3 },\n\n TileGridCoord { x: 3, y: 2 },\n\n ])]\n\n .iter()\n\n .copied()\n\n .collect(),\n\n };\n\n\n\n let tile_counterclockwise = Tile {\n\n cell_grid: [\n\n [Loot(Purple), Loot(Green), FinalExit(Green), Empty],\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 81, "score": 46198.99055171276 }, { "content": " direction: CartesianDirection,\n\n ) -> Option<TileCell> {\n\n let neighbor_point = self.cell_cardinal_neighbor_coords(coord, direction)?;\n\n Some(self.cell_value(neighbor_point))\n\n }\n\n\n\n /// Coordinates of \"neighbors\" in current tile that are \"one step\" away\n\n /// (either by cardinal direction walk or escalator).\n\n pub fn cell_immediate_neighbor_coords(&self, coord: TileGridCoord) -> Vec<TileGridCoord> {\n\n let cardinal_neighbors = CartesianDirection::ALL_DIRECTIONS\n\n .iter()\n\n .copied()\n\n .filter_map(|dir| {\n\n self.cell_cardinal_neighbor_coords(coord, dir)\n\n // todo: handle orange-only walls\n\n .filter(|_| self.cell_wall(coord, dir) == WallState::Open)\n\n });\n\n let escalator_neighbors = self\n\n .escalators\n\n .iter()\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 82, "score": 46198.783013882094 }, { "content": " [Blocked, Blocked, Open, Blocked],\n\n [Open, Open, Blocked, Blocked],\n\n [Open, Open, Open, Open],\n\n [Open, Open, Open, Open],\n\n [Blocked, Open, Blocked, Blocked],\n\n ],\n\n vert_walls: [\n\n [Blocked, Blocked, Blocked, Blocked, Blocked],\n\n [Open, Open, Open, Open, Blocked],\n\n [Blocked, Open, Open, Open, Open],\n\n [Blocked, Blocked, Blocked, Blocked, Blocked],\n\n ],\n\n escalators: [EscalatorLocation([\n\n TileGridCoord { x: 3, y: 1 },\n\n TileGridCoord { x: 2, y: 0 },\n\n ])]\n\n .iter()\n\n .copied()\n\n .collect(),\n\n };\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 83, "score": 46196.98031416256 }, { "content": " revert1.rotate(SpinDirection::CounterClockwise);\n\n assert_eq!(revert1, tile);\n\n\n\n revert1.rotate(SpinDirection::CounterClockwise);\n\n revert1.rotate(SpinDirection::Clockwise);\n\n assert_eq!(revert1, tile);\n\n\n\n revert1.rotate(SpinDirection::CounterClockwise);\n\n revert1.rotate(SpinDirection::CounterClockwise);\n\n revert1.rotate(SpinDirection::CounterClockwise);\n\n revert1.rotate(SpinDirection::CounterClockwise);\n\n assert_eq!(revert1, tile);\n\n\n\n revert1.rotate(SpinDirection::Clockwise);\n\n revert1.rotate(SpinDirection::Clockwise);\n\n revert1.rotate(SpinDirection::Clockwise);\n\n revert1.rotate(SpinDirection::Clockwise);\n\n assert_eq!(revert1, tile);\n\n }\n\n }\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 84, "score": 46196.9396955163 }, { "content": " .filter_map(|esc_loc| esc_loc.coord_neighbor(coord));\n\n\n\n let mut neighbors: Vec<_> = cardinal_neighbors.chain(escalator_neighbors).collect();\n\n\n\n // we could have duplicates\n\n neighbors.sort_unstable();\n\n neighbors.dedup();\n\n\n\n neighbors\n\n }\n\n\n\n const POSSIBLE_ENTRANCE_COORDS: [TileGridCoord; 4] = [\n\n TileGridCoord { x: 0, y: 1 },\n\n TileGridCoord { x: 1, y: 3 },\n\n TileGridCoord { x: 2, y: 0 },\n\n TileGridCoord { x: 3, y: 2 },\n\n ];\n\n\n\n fn reachable_coords_starting(&self) -> Vec<TileGridCoord> {\n\n Self::POSSIBLE_ENTRANCE_COORDS\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 85, "score": 46196.90682425188 }, { "content": " ],\n\n [Empty, FinalExit(Green), Loot(Green), Loot(Purple)],\n\n ],\n\n horz_walls: [\n\n [Blocked, Blocked, Open, Blocked],\n\n [Open, Open, Open, Open],\n\n [Open, Open, Open, Open],\n\n [Blocked, Blocked, Open, Open],\n\n [Blocked, Open, Blocked, Blocked],\n\n ],\n\n vert_walls: [\n\n [Blocked, Blocked, Blocked, Blocked, Blocked],\n\n [Open, Open, Open, Open, Blocked],\n\n [Blocked, Open, Open, Open, Open],\n\n [Blocked, Blocked, Blocked, Blocked, Blocked],\n\n ],\n\n escalators: [EscalatorLocation([\n\n TileGridCoord { x: 0, y: 2 },\n\n TileGridCoord { x: 1, y: 3 },\n\n ])]\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 86, "score": 46196.84917870558 }, { "content": " horz_walls: [\n\n [Blocked, Blocked, Explore(Orange), Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Explore(Yellow), Blocked, Blocked],\n\n ],\n\n vert_walls: [\n\n [Blocked, Open, Open, Open, Blocked],\n\n [Explore(Purple), Open, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked, Explore(Green)],\n\n [Blocked, Open, Open, Blocked, Blocked],\n\n ],\n\n escalators: [EscalatorLocation([\n\n TileGridCoord { x: 2, y: 3 },\n\n TileGridCoord { x: 3, y: 2 },\n\n ])]\n\n .iter()\n\n .copied()\n\n .collect(),\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 87, "score": 46196.77405190174 }, { "content": "\n\n #[test]\n\n fn neighbor() {\n\n assert_eq!(\n\n TILE_1A.cell_immediate_neighbor_coords(TileGridCoord { x: 0, y: 0 }),\n\n [TileGridCoord { x: 1, y: 0 }]\n\n );\n\n assert_eq!(\n\n TILE_1A.cell_immediate_neighbor_coords(TileGridCoord { x: 1, y: 0 }),\n\n [\n\n TileGridCoord { x: 0, y: 0 },\n\n TileGridCoord { x: 1, y: 1 },\n\n TileGridCoord { x: 2, y: 0 },\n\n ]\n\n );\n\n assert_eq!(\n\n TILE_1A.cell_immediate_neighbor_coords(TileGridCoord { x: 3, y: 3 }),\n\n []\n\n );\n\n assert_eq!(\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 88, "score": 46196.77390877732 }, { "content": " TILE_1A.cell_immediate_neighbor_coords(TileGridCoord { x: 2, y: 3 }),\n\n [\n\n TileGridCoord { x: 1, y: 3 },\n\n TileGridCoord { x: 2, y: 2 },\n\n TileGridCoord { x: 3, y: 2 },\n\n ]\n\n );\n\n assert_eq!(\n\n TILE_1A.cell_immediate_neighbor_coords(TileGridCoord { x: 3, y: 2 }),\n\n [TileGridCoord { x: 2, y: 3 },]\n\n );\n\n }\n\n\n\n #[test]\n\n fn start_coords() {\n\n assert_eq!(\n\n TILE_1A.reachable_coords_starting(),\n\n Tile::POSSIBLE_ENTRANCE_COORDS\n\n );\n\n\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 89, "score": 46196.44283861199 }, { "content": " .iter()\n\n .copied()\n\n .filter(|coord| {\n\n let dir = match *self.cell_outer_edge_directions(*coord).as_slice() {\n\n [dir] => dir,\n\n _ => {\n\n panic!(\"there should only be one edge direction for possible entrance node\")\n\n }\n\n };\n\n match self.cell_wall(*coord, dir) {\n\n WallState::Entrance\n\n | WallState::Explore(_)\n\n | WallState::Open\n\n | WallState::OrangeOnly => true,\n\n WallState::Blocked => false,\n\n }\n\n })\n\n .collect()\n\n }\n\n\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 90, "score": 46195.083489185025 }, { "content": "\n\n let tile_clockwise = Tile {\n\n cell_grid: [\n\n [\n\n Loot(Orange),\n\n TimerFlip(Available),\n\n FinalExit(Yellow),\n\n Loot(Yellow),\n\n ],\n\n [\n\n FinalExit(Orange),\n\n Warp(Yellow),\n\n Warp(Green),\n\n Camera(Available),\n\n ],\n\n [\n\n CrystalBall(Available),\n\n Warp(Purple),\n\n Warp(Orange),\n\n FinalExit(Purple),\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 91, "score": 46194.98011744807 }, { "content": " ],\n\n [FinalExit(Yellow), Warp(Green), Warp(Orange), Loot(Green)],\n\n [\n\n TimerFlip(Available),\n\n Warp(Yellow),\n\n Warp(Purple),\n\n FinalExit(Green),\n\n ],\n\n [\n\n Loot(Orange),\n\n FinalExit(Orange),\n\n CrystalBall(Available),\n\n Empty,\n\n ],\n\n ],\n\n horz_walls: [\n\n [Blocked, Blocked, Open, Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked],\n\n [Blocked, Open, Open, Blocked],\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 92, "score": 46194.62924944707 }, { "content": " assert_eq!(\n\n TILE_2.reachable_coords_starting(),\n\n [TileGridCoord { x: 1, y: 3 }, TileGridCoord { x: 3, y: 2 },]\n\n );\n\n }\n\n\n\n #[test]\n\n fn reachable_coords() {\n\n assert_eq!(\n\n TILE_1A.reachable_coords(),\n\n [\n\n [true, true, true, true],\n\n [true, true, true, true],\n\n [true, true, true, true],\n\n [true, true, true, false],\n\n ]\n\n );\n\n\n\n assert_eq!(\n\n TILE_2.reachable_coords(),\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 93, "score": 46193.693414579866 }, { "content": " [\n\n FinalExit(Purple),\n\n Warp(Orange),\n\n Warp(Purple),\n\n CrystalBall(Available),\n\n ],\n\n [\n\n Camera(Available),\n\n Warp(Green),\n\n Warp(Yellow),\n\n FinalExit(Orange),\n\n ],\n\n [\n\n Loot(Yellow),\n\n FinalExit(Yellow),\n\n TimerFlip(Available),\n\n Loot(Orange),\n\n ],\n\n ],\n\n horz_walls: [\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 94, "score": 46190.90413903002 }, { "content": " [\n\n [true, false, false, false],\n\n [true, false, false, true],\n\n [false, false, true, true],\n\n [false, true, true, true],\n\n ]\n\n );\n\n }\n\n}\n", "file_path": "crates/m-mazing-core/src/tile/mod.rs", "rank": 95, "score": 46190.90413903002 }, { "content": "pub mod camera;\n\npub mod render_tile;\n\npub mod shape;\n\npub mod theme;\n\n\n\nuse crate::prelude::*;\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct RenderState {\n\n pub theme: Theme,\n\n}\n\n\n", "file_path": "crates/m-mazing-core/src/render/mod.rs", "rank": 96, "score": 27464.548702389653 } ]
Rust
common/machine.rs
camas/aoc2019
53b41dcf9990c9a4e460197b5e5031ec2158b453
use crate::common::get_digits; use num_derive::FromPrimitive; use num_traits::FromPrimitive; use std::collections::HashMap; use std::ops::Range; use Instr::*; const MEM_CHUNK_SIZE: usize = 1000; pub struct Machine { memory: Memory, ip: usize, rel_base: usize, } impl Machine { pub fn new(mem: &[i64]) -> Machine { Machine { memory: Memory::from(&mem), ip: 0, rel_base: 0, } } pub fn run(&mut self, input: impl FnMut() -> i64, output: impl FnMut(i64)) { self.run_until(Instr::Halt, input, output); } #[allow(clippy::cognitive_complexity)] pub fn run_until( &mut self, halt_instr: Instr, mut input: impl FnMut() -> i64, mut output: impl FnMut(i64), ) -> bool { let mut last_instr: Option<Instr> = None; loop { if let Some(x) = last_instr { if x == halt_instr { return true; } } let raw_opcode = self.get_mem(self.ip); let digits = get_digits(raw_opcode); let opcode_i = if digits.len() == 1 { *digits.last().unwrap() } else { digits[digits.len() - 2] * 10 + digits.last().unwrap() }; let instr: Instr = FromPrimitive::from_i64(opcode_i).unwrap(); let instr_size = Machine::get_instruction_size(instr); let values: Vec<i64> = self .get_mem_range(self.ip + 1..self.ip + 1 + instr_size) .to_vec(); let mut modes = vec![0; instr_size]; if digits.len() > 2 { let diff = modes.len() + 2 - digits.len(); modes[diff..].clone_from_slice(&digits[..digits.len() - 2]); modes.reverse(); } macro_rules! read_mem { ($index:expr) => {{ let mode = modes[$index]; let val = values[$index]; match mode { 0 => self.memory.get(val as usize), 1 => val, 2 => self.memory.get((self.rel_base as i64 + val) as usize), _ => panic!("Unknown mode"), } }}; } macro_rules! write_mem { ($index:expr, $set_value:expr) => {{ let mode = modes[$index]; let val: i64 = values[$index]; match mode { 0 => self.memory.set(val as usize, $set_value), 1 => panic!("Can't write in immediate mode"), 2 => self .memory .set((self.rel_base as i64 + val) as usize, $set_value), _ => panic!("Unknown mode"), }; }}; } last_instr = Some(instr); match instr { Halt => return false, Add => { let added = read_mem!(0) + read_mem!(1); write_mem!(2, added); } Mult => { let multiplied = read_mem!(0) * read_mem!(1); write_mem!(2, multiplied); } Input => write_mem!(0, input()), Output => output(read_mem!(0)), JumpTrue => { if read_mem!(0) != 0 { self.ip = read_mem!(1) as usize; continue; } } JumpFalse => { if read_mem!(0) == 0 { self.ip = read_mem!(1) as usize; continue; } } LessThan => { if read_mem!(0) < read_mem!(1) { write_mem!(2, 1); } else { write_mem!(2, 0); } } Equals => { if read_mem!(0) == read_mem!(1) { write_mem!(2, 1); } else { write_mem!(2, 0); } } RelBase => self.rel_base = (self.rel_base as i64 + read_mem!(0)) as usize, } self.ip += instr_size + 1; } } fn get_instruction_size(instr: Instr) -> usize { match instr { Halt => 0, Add => 3, Mult => 3, Input => 1, Output => 1, JumpTrue => 2, JumpFalse => 2, LessThan => 3, Equals => 3, RelBase => 1, } } pub fn set_mem(&mut self, index: usize, value: i64) { self.memory.set(index, value); } pub fn get_mem(&mut self, index: usize) -> i64 { self.memory.get(index) } pub fn get_mem_range(&mut self, range: Range<usize>) -> Vec<i64> { let mut out = Vec::new(); for i in range { out.push(self.get_mem(i)); } out } } pub struct Memory { data: HashMap<usize, [i64; MEM_CHUNK_SIZE]>, } impl Memory { pub fn from(mem: &[i64]) -> Memory { let mut m: HashMap<usize, [i64; MEM_CHUNK_SIZE]> = HashMap::new(); for (i, chunk) in mem.to_vec().chunks(MEM_CHUNK_SIZE).enumerate() { let mut d = [0; MEM_CHUNK_SIZE]; for (i, c) in chunk.iter().enumerate() { d[i] = *c; } m.insert(i, d); } Memory { data: m } } pub fn get(&mut self, index: usize) -> i64 { let chunk_index = index / MEM_CHUNK_SIZE; let chunk_offset = index % MEM_CHUNK_SIZE; self.data.entry(chunk_index).or_insert([0; MEM_CHUNK_SIZE]); self.data[&chunk_index][chunk_offset] } pub fn set(&mut self, index: usize, value: i64) { let chunk_index = index / MEM_CHUNK_SIZE; let chunk_offset = index % MEM_CHUNK_SIZE; self.data.entry(chunk_index).or_insert([0; MEM_CHUNK_SIZE]); self.data.get_mut(&chunk_index).unwrap()[chunk_offset] = value; } } #[derive(FromPrimitive, Clone, Copy, PartialEq, Eq, Hash, Debug)] pub enum Instr { Halt = 99, Add = 1, Mult = 2, Input = 3, Output = 4, JumpTrue = 5, JumpFalse = 6, LessThan = 7, Equals = 8, RelBase = 9, } #[cfg(test)] mod tests { use super::*; #[test] fn test_rel_base() { let mem = vec![ 109, 1, 204, -1, 1001, 100, 1, 100, 1008, 100, 16, 101, 1006, 101, 0, 99, ]; let mut m = Machine::new(&mem); let mut output = Vec::new(); m.run(|| unreachable!(), |x| output.push(x)); assert_eq!(mem, output); } #[test] fn test_large_number() { let mem = vec![1102, 34_915_192, 34_915_192, 7, 4, 7, 99, 0]; let mut m = Machine::new(&mem); let mut output = Vec::new(); m.run(|| unreachable!(), |x| output.push(x)); assert!( output[0] == 1_219_070_632_396_864, "Result was {}", output[0] ); } #[test] fn test_large_memory() { let mem = vec![104, 1_125_899_906_842_624, 99]; let mut m = Machine::new(&mem); let mut output = Vec::new(); m.run(|| unreachable!(), |x| output.push(x)); assert!( output[0] == 1_125_899_906_842_624, "Result was {}", output[0] ); } }
use crate::common::get_digits; use num_derive::FromPrimitive; use num_traits::FromPrimitive; use std::collections::HashMap; use std::ops::Range; use Instr::*; const MEM_CHUNK_SIZE: usize = 1000; pub struct Machine { memory: Memory, ip: usize, rel_base: usize, } impl Machine {
pub fn run(&mut self, input: impl FnMut() -> i64, output: impl FnMut(i64)) { self.run_until(Instr::Halt, input, output); } #[allow(clippy::cognitive_complexity)] pub fn run_until( &mut self, halt_instr: Instr, mut input: impl FnMut() -> i64, mut output: impl FnMut(i64), ) -> bool { let mut last_instr: Option<Instr> = None; loop { if let Some(x) = last_instr { if x == halt_instr { return true; } } let raw_opcode = self.get_mem(self.ip); let digits = get_digits(raw_opcode); let opcode_i = if digits.len() == 1 { *digits.last().unwrap() } else { digits[digits.len() - 2] * 10 + digits.last().unwrap() }; let instr: Instr = FromPrimitive::from_i64(opcode_i).unwrap(); let instr_size = Machine::get_instruction_size(instr); let values: Vec<i64> = self .get_mem_range(self.ip + 1..self.ip + 1 + instr_size) .to_vec(); let mut modes = vec![0; instr_size]; if digits.len() > 2 { let diff = modes.len() + 2 - digits.len(); modes[diff..].clone_from_slice(&digits[..digits.len() - 2]); modes.reverse(); } macro_rules! read_mem { ($index:expr) => {{ let mode = modes[$index]; let val = values[$index]; match mode { 0 => self.memory.get(val as usize), 1 => val, 2 => self.memory.get((self.rel_base as i64 + val) as usize), _ => panic!("Unknown mode"), } }}; } macro_rules! write_mem { ($index:expr, $set_value:expr) => {{ let mode = modes[$index]; let val: i64 = values[$index]; match mode { 0 => self.memory.set(val as usize, $set_value), 1 => panic!("Can't write in immediate mode"), 2 => self .memory .set((self.rel_base as i64 + val) as usize, $set_value), _ => panic!("Unknown mode"), }; }}; } last_instr = Some(instr); match instr { Halt => return false, Add => { let added = read_mem!(0) + read_mem!(1); write_mem!(2, added); } Mult => { let multiplied = read_mem!(0) * read_mem!(1); write_mem!(2, multiplied); } Input => write_mem!(0, input()), Output => output(read_mem!(0)), JumpTrue => { if read_mem!(0) != 0 { self.ip = read_mem!(1) as usize; continue; } } JumpFalse => { if read_mem!(0) == 0 { self.ip = read_mem!(1) as usize; continue; } } LessThan => { if read_mem!(0) < read_mem!(1) { write_mem!(2, 1); } else { write_mem!(2, 0); } } Equals => { if read_mem!(0) == read_mem!(1) { write_mem!(2, 1); } else { write_mem!(2, 0); } } RelBase => self.rel_base = (self.rel_base as i64 + read_mem!(0)) as usize, } self.ip += instr_size + 1; } } fn get_instruction_size(instr: Instr) -> usize { match instr { Halt => 0, Add => 3, Mult => 3, Input => 1, Output => 1, JumpTrue => 2, JumpFalse => 2, LessThan => 3, Equals => 3, RelBase => 1, } } pub fn set_mem(&mut self, index: usize, value: i64) { self.memory.set(index, value); } pub fn get_mem(&mut self, index: usize) -> i64 { self.memory.get(index) } pub fn get_mem_range(&mut self, range: Range<usize>) -> Vec<i64> { let mut out = Vec::new(); for i in range { out.push(self.get_mem(i)); } out } } pub struct Memory { data: HashMap<usize, [i64; MEM_CHUNK_SIZE]>, } impl Memory { pub fn from(mem: &[i64]) -> Memory { let mut m: HashMap<usize, [i64; MEM_CHUNK_SIZE]> = HashMap::new(); for (i, chunk) in mem.to_vec().chunks(MEM_CHUNK_SIZE).enumerate() { let mut d = [0; MEM_CHUNK_SIZE]; for (i, c) in chunk.iter().enumerate() { d[i] = *c; } m.insert(i, d); } Memory { data: m } } pub fn get(&mut self, index: usize) -> i64 { let chunk_index = index / MEM_CHUNK_SIZE; let chunk_offset = index % MEM_CHUNK_SIZE; self.data.entry(chunk_index).or_insert([0; MEM_CHUNK_SIZE]); self.data[&chunk_index][chunk_offset] } pub fn set(&mut self, index: usize, value: i64) { let chunk_index = index / MEM_CHUNK_SIZE; let chunk_offset = index % MEM_CHUNK_SIZE; self.data.entry(chunk_index).or_insert([0; MEM_CHUNK_SIZE]); self.data.get_mut(&chunk_index).unwrap()[chunk_offset] = value; } } #[derive(FromPrimitive, Clone, Copy, PartialEq, Eq, Hash, Debug)] pub enum Instr { Halt = 99, Add = 1, Mult = 2, Input = 3, Output = 4, JumpTrue = 5, JumpFalse = 6, LessThan = 7, Equals = 8, RelBase = 9, } #[cfg(test)] mod tests { use super::*; #[test] fn test_rel_base() { let mem = vec![ 109, 1, 204, -1, 1001, 100, 1, 100, 1008, 100, 16, 101, 1006, 101, 0, 99, ]; let mut m = Machine::new(&mem); let mut output = Vec::new(); m.run(|| unreachable!(), |x| output.push(x)); assert_eq!(mem, output); } #[test] fn test_large_number() { let mem = vec![1102, 34_915_192, 34_915_192, 7, 4, 7, 99, 0]; let mut m = Machine::new(&mem); let mut output = Vec::new(); m.run(|| unreachable!(), |x| output.push(x)); assert!( output[0] == 1_219_070_632_396_864, "Result was {}", output[0] ); } #[test] fn test_large_memory() { let mem = vec![104, 1_125_899_906_842_624, 99]; let mut m = Machine::new(&mem); let mut output = Vec::new(); m.run(|| unreachable!(), |x| output.push(x)); assert!( output[0] == 1_125_899_906_842_624, "Result was {}", output[0] ); } }
pub fn new(mem: &[i64]) -> Machine { Machine { memory: Memory::from(&mem), ip: 0, rel_base: 0, } }
function_block-full_function
[]
Rust
tests/admin_general.rs
gearbot/Gearbot-Animal-API
e103efc146324d411e641938617fc9eba69b5447
use animal_api::*; use animal_facts::*; mod generator; use crate::generator::*; use actix_web::web::{self, Bytes, Data}; use actix_web::{test, App}; #[actix_rt::test] async fn no_admins_loaded() { let dir = make_dir(); let mut state = gen_state(&dir); state.config.admins = Vec::new(); let req_json = AdminFactRequest { fact_id: None, fact_content: Some("SpookyFact".to_string()), animal_type: Animal::Cat, key: gen_admin_all_perms().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", state).await, RESP_BAD_AUTH ) } #[actix_rt::test] async fn invalid_key() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: None, fact_content: Some("SpookyFact".to_string()), animal_type: Animal::Cat, key: "BadKey".to_string(), }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", gen_state(&dir)).await, RESP_BAD_AUTH ) } #[actix_rt::test] async fn missing_permission_add() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: None, fact_content: Some("SpookyFact".to_string()), animal_type: Animal::Cat, key: gen_admin_delete_only().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", gen_state(&dir)).await, RESP_MISSING_PERMS ) } #[actix_rt::test] async fn missing_permission_delete() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Cat, key: gen_admin_add_only().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/delete", gen_state(&dir)).await, RESP_MISSING_PERMS ) } #[actix_rt::test] async fn missing_permission_view() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Cat, key: gen_admin_view_only().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", gen_state(&dir)).await, RESP_MISSING_PERMS ) } #[actix_rt::test] async fn missing_permission_all() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Cat, key: gen_admin_no_perms().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/delete", gen_state(&dir)).await, RESP_MISSING_PERMS ) } #[actix_rt::test] async fn modify_cat_unloaded() { let dir = make_dir(); let mut state = gen_state(&dir); state.fact_lists.cat_facts = None; let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Cat, key: state.config.admins[0].key.clone(), }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", state).await, RESP_NOT_LOADED ) } #[actix_rt::test] async fn modify_dog_unloaded() { let dir = make_dir(); let mut state = gen_state(&dir); state.fact_lists.dog_facts = None; let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Dog, key: state.config.admins[4].key.clone(), }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", state).await, RESP_NOT_LOADED ) } #[actix_rt::test] async fn list_facts() { let dir = make_dir(); let uri = "/admin/fact/list"; let state = gen_state(&dir); let req_json = AdminFactRequest { fact_id: None, fact_content: None, animal_type: Animal::Cat, key: gen_admin_all_perms().key, }; let raw = gen_state(&dir).fact_lists.cat_facts.unwrap(); let expected = raw.read().unwrap(); let mock_state = Data::new(state); let app = test::init_service( App::new() .app_data(mock_state.clone()) .service(web::resource(uri).route(web::post().to(admin::modify_fact))), ) .await; let req = test::TestRequest::post() .uri(uri) .set_json(&req_json) .to_request(); let received: Vec<Fact> = test::call_and_read_body_json(&app, req).await; assert_eq!(received, *expected) } #[actix_rt::test] async fn add_fact_no_content() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: None, fact_content: None, animal_type: Animal::Cat, key: gen_admin_all_perms().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", gen_state(&dir)).await, RESP_NO_CONTENT_SPECIFIED ) } #[actix_rt::test] async fn add_fact_ok() { let dir = make_dir(); let state = gen_state(&dir); let req_json = AdminFactRequest { fact_id: None, fact_content: Some("Huzaaah, a new fact!".to_string()), animal_type: Animal::Dog, key: gen_admin_all_perms().key, }; let resp = test_admin_fact_req(req_json, "/admin/fact/add", state).await; let word = CreatedAction::Fact { animal: Animal::Dog, }; let expected = JsonResp::new(201, word.as_str()); assert_eq!(resp, expected); } #[actix_rt::test] async fn delete_fact_no_id() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: None, fact_content: None, animal_type: Animal::Dog, key: gen_admin_all_perms().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/delete", gen_state(&dir)).await, RESP_NO_ID_SUPPLIED ) } #[actix_rt::test] async fn delete_fact_bad_id() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: Some(3), fact_content: None, animal_type: Animal::Dog, key: gen_admin_all_perms().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/delete", gen_state(&dir)).await, RESP_ID_NOT_FOUND ) } #[actix_rt::test] async fn delete_fact_ok() { let dir = make_dir(); let uri = "/admin/fact/delete"; let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Cat, key: gen_admin_all_perms().key, }; let state = gen_state(&dir); let mock_state = Data::new(state); let app = test::init_service( App::new() .app_data(mock_state.clone()) .service(web::resource(uri).route(web::post().to(admin::modify_fact))), ) .await; let req = test::TestRequest::post() .uri(uri) .set_json(&req_json) .to_request(); let resp = test::call_and_read_body(&app, req).await; assert_eq!(resp, Bytes::from_static(b"")) }
use animal_api::*; use animal_facts::*; mod generator; use crate::generator::*; use actix_web::web::{self, Bytes, Data}; use actix_web::{test, App}; #[actix_rt::test] async fn no_admins_loaded() { let dir = make_dir(); let mut state = gen_state(&dir); state.config.admins = Vec::new(); let req_json = AdminFactRequest { fact_id: None, fact_content: Some("SpookyFact".to_string()), animal_type: Animal::Cat, key: gen_admin_all_perms().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", state).await, RESP_BAD_AUTH ) } #[actix_rt::test] async fn invalid_key() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: None, fact_content: Some("SpookyFact".to_string()), animal_type: Animal::Cat, key: "BadKey".to_string(), }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", gen_state(&dir)).await, RESP_BAD_AUTH ) } #[actix_rt::test] async fn missing_permission_add() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: None, fact_content: Some("SpookyFact".to_string()), animal_type: Animal::Cat, key: gen_admin_delete_only().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", gen_state(&dir)).await, RESP_MISSING_PERMS ) } #[actix_rt::test] async fn missing_permission_delete() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Cat, key: gen_admin_add_only().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/delete", gen_state(&dir)).await, RESP_MISSING_PERMS ) } #[actix_rt::test] async fn missing_permission_view() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Cat, key: gen_admin_view_only().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", gen_state(&dir)).await, RESP_MISSING_PERMS ) } #[actix_rt::test] async fn missing_permission_all() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Cat, key: gen_admin_no_perms().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/delete", gen_state(&dir)).await, RESP_MISSING_PERMS ) } #[actix_rt::test] async fn modify_cat_unloaded() { let dir = make_dir(); let mut state = gen_state(&dir); state.fact_lists.cat_facts = None; let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Cat, key: state.config.admins[0].key.clone(), }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", state).await, RESP_NOT_LOADED ) } #[actix_rt::test] async fn modify_dog_unloaded() { let dir = ma
#[actix_rt::test] async fn list_facts() { let dir = make_dir(); let uri = "/admin/fact/list"; let state = gen_state(&dir); let req_json = AdminFactRequest { fact_id: None, fact_content: None, animal_type: Animal::Cat, key: gen_admin_all_perms().key, }; let raw = gen_state(&dir).fact_lists.cat_facts.unwrap(); let expected = raw.read().unwrap(); let mock_state = Data::new(state); let app = test::init_service( App::new() .app_data(mock_state.clone()) .service(web::resource(uri).route(web::post().to(admin::modify_fact))), ) .await; let req = test::TestRequest::post() .uri(uri) .set_json(&req_json) .to_request(); let received: Vec<Fact> = test::call_and_read_body_json(&app, req).await; assert_eq!(received, *expected) } #[actix_rt::test] async fn add_fact_no_content() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: None, fact_content: None, animal_type: Animal::Cat, key: gen_admin_all_perms().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", gen_state(&dir)).await, RESP_NO_CONTENT_SPECIFIED ) } #[actix_rt::test] async fn add_fact_ok() { let dir = make_dir(); let state = gen_state(&dir); let req_json = AdminFactRequest { fact_id: None, fact_content: Some("Huzaaah, a new fact!".to_string()), animal_type: Animal::Dog, key: gen_admin_all_perms().key, }; let resp = test_admin_fact_req(req_json, "/admin/fact/add", state).await; let word = CreatedAction::Fact { animal: Animal::Dog, }; let expected = JsonResp::new(201, word.as_str()); assert_eq!(resp, expected); } #[actix_rt::test] async fn delete_fact_no_id() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: None, fact_content: None, animal_type: Animal::Dog, key: gen_admin_all_perms().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/delete", gen_state(&dir)).await, RESP_NO_ID_SUPPLIED ) } #[actix_rt::test] async fn delete_fact_bad_id() { let dir = make_dir(); let req_json = AdminFactRequest { fact_id: Some(3), fact_content: None, animal_type: Animal::Dog, key: gen_admin_all_perms().key, }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/delete", gen_state(&dir)).await, RESP_ID_NOT_FOUND ) } #[actix_rt::test] async fn delete_fact_ok() { let dir = make_dir(); let uri = "/admin/fact/delete"; let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Cat, key: gen_admin_all_perms().key, }; let state = gen_state(&dir); let mock_state = Data::new(state); let app = test::init_service( App::new() .app_data(mock_state.clone()) .service(web::resource(uri).route(web::post().to(admin::modify_fact))), ) .await; let req = test::TestRequest::post() .uri(uri) .set_json(&req_json) .to_request(); let resp = test::call_and_read_body(&app, req).await; assert_eq!(resp, Bytes::from_static(b"")) }
ke_dir(); let mut state = gen_state(&dir); state.fact_lists.dog_facts = None; let req_json = AdminFactRequest { fact_id: Some(6682463169732688062), fact_content: None, animal_type: Animal::Dog, key: state.config.admins[4].key.clone(), }; assert_eq!( test_admin_fact_req(req_json, "/admin/fact/add", state).await, RESP_NOT_LOADED ) }
function_block-function_prefixed
[ { "content": "pub fn get_cat_fact(app_data: Data<APIState>) -> HttpResponse {\n\n if let Some(fact_list) = &app_data.fact_lists.cat_facts {\n\n let mut rng = thread_rng();\n\n let list_lock = fact_list.read().unwrap();\n\n\n\n // This should never panic since `Some()` means there is >= 1 fact.\n\n let rand_pick = list_lock.choose(&mut rng).unwrap();\n\n\n\n app_data\n\n .req_counter\n\n .with_label_values(&[Animal::Cat.as_str()])\n\n .inc();\n\n\n\n HttpResponse::Ok().json(rand_pick)\n\n } else {\n\n generate_response(&RESP_NOT_LOADED)\n\n }\n\n}\n\n\n", "file_path": "src/animal_facts.rs", "rank": 0, "score": 95556.37723891423 }, { "content": "pub fn get_dog_fact(app_data: Data<APIState>) -> HttpResponse {\n\n if let Some(fact_list) = &app_data.fact_lists.dog_facts {\n\n let mut rng = thread_rng();\n\n let list_lock = fact_list.read().unwrap();\n\n\n\n // This should never panic since `Some()` means there is >= 1 fact.\n\n let rand_pick = list_lock.choose(&mut rng).unwrap();\n\n\n\n app_data\n\n .req_counter\n\n .with_label_values(&[Animal::Dog.as_str()])\n\n .inc();\n\n\n\n HttpResponse::Ok().json(rand_pick)\n\n } else {\n\n generate_response(&RESP_NOT_LOADED)\n\n }\n\n}\n", "file_path": "src/animal_facts.rs", "rank": 1, "score": 95556.37723891423 }, { "content": "// The user is responsible for ensuring proper ACLs or rate limiting to this\n\npub fn set_flag(app_data: Data<APIState>, body: Json<FactFlagRequest>) -> HttpResponse {\n\n if !app_data.config.flagging_enabled {\n\n return generate_response(&RESP_NOT_LOADED);\n\n }\n\n\n\n let body = body.into_inner();\n\n\n\n // Make sure the request is allowed\n\n let location = match check_flagger(body.key, &app_data.config.flaggers) {\n\n Some(location) => location,\n\n None => return generate_response(&RESP_BAD_AUTH),\n\n };\n\n\n\n let flag_list = app_data.fact_flags.as_ref().unwrap();\n\n {\n\n // Allow users to submit a name to flag it under or fallback to the submitter location\n\n // This allows a use case of sending specific user IDs when used by another system\n\n let flagger = match body.flagger {\n\n Some(flagger) => flagger,\n\n None => location.location,\n", "file_path": "src/flagging.rs", "rank": 2, "score": 86367.61440268898 }, { "content": "pub fn make_dir() -> TempDir {\n\n let dir = tempdir::TempDir::new(\"facts\").unwrap();\n\n\n\n for file in std::fs::read_dir(\"./example_facts\").unwrap() {\n\n let file = file.unwrap();\n\n std::fs::copy(\n\n file.path(),\n\n dir.path().join(file.path().file_name().unwrap()),\n\n )\n\n .unwrap();\n\n }\n\n\n\n dir\n\n}\n\n\n\npub async fn test_fact_consumer_req(animal: Animal, uri: &str, state: APIState) {\n\n let mock_state = Data::new(state);\n\n\n\n let endpoint = match animal {\n\n Animal::Cat => animal_facts::get_cat_fact,\n", "file_path": "tests/generator.rs", "rank": 3, "score": 76580.77800652565 }, { "content": "pub fn gen_state(tmp_dir: &TempDir) -> APIState {\n\n let config = Config {\n\n logging_dir: \"./test_logs\".to_string(),\n\n logging_level: \"info\".to_string(),\n\n facts_dir: tmp_dir.path().as_os_str().to_string_lossy().to_string(),\n\n animal_fact_types: vec![Animal::Cat, Animal::Dog],\n\n flagging_enabled: true,\n\n server: ServerConfig {\n\n ip: \"127.0.0.1\".parse().unwrap(),\n\n port: 8080,\n\n },\n\n admins: vec![\n\n gen_admin_add_only(),\n\n gen_admin_view_only(),\n\n gen_admin_delete_only(),\n\n gen_admin_no_perms(),\n\n gen_admin_all_perms(),\n\n ],\n\n flaggers: vec![gen_flagger()],\n\n };\n", "file_path": "tests/generator.rs", "rank": 4, "score": 76525.91964929686 }, { "content": "fn list_flags(state: &APIState) -> HttpResponse {\n\n let flag_list = state.fact_flags.as_ref().unwrap().read().unwrap();\n\n\n\n HttpResponse::Ok().json(&*flag_list)\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 5, "score": 70739.47537532775 }, { "content": "fn view_facts(animal: Animal, state: &APIState) -> HttpResponse {\n\n // Unwrap is already verified before function call\n\n let FactLists {\n\n cat_facts,\n\n dog_facts,\n\n } = &state.fact_lists;\n\n let fact_list = match animal {\n\n Animal::Cat => cat_facts.as_ref().unwrap().read().unwrap(),\n\n Animal::Dog => dog_facts.as_ref().unwrap().read().unwrap(),\n\n };\n\n\n\n HttpResponse::Ok().status(StatusCode::OK).json(&*fact_list)\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 6, "score": 65176.32914006343 }, { "content": "fn delete_flag(state: &APIState, rem_id: u64, user: &Admin) -> HttpResponse {\n\n let flag_list = state.fact_flags.as_ref().unwrap();\n\n\n\n let mut list_lock = flag_list.write().unwrap();\n\n if let Some(found) = list_lock\n\n .iter()\n\n .enumerate()\n\n .find(|(_, flag)| flag.id == rem_id)\n\n {\n\n let pos = found.0;\n\n list_lock.remove(pos);\n\n\n\n info!(\"Flag #{} removed by {}\", rem_id, user.name);\n\n\n\n HttpResponse::NoContent().finish()\n\n } else {\n\n generate_response(&RESP_ID_NOT_FOUND)\n\n }\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 7, "score": 59408.11416486303 }, { "content": "fn modify_persistent_flag(flag_list: RwLockWriteGuard<Vec<FactFlag>>, state: &APIState) {\n\n let path = Path::new(&state.config.facts_dir).join(\"fact_flags.json\");\n\n fs::write(path, serde_json::to_string_pretty(&*flag_list).unwrap()).unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod permission_tests {\n\n use super::check_admin_perms;\n\n use super::{Admin, Perms};\n\n\n\n fn gen_admin_add_only() -> Admin {\n\n Admin {\n\n name: \"Tester\".to_string(),\n\n key: \"add_only\".to_string(),\n\n permissions: Perms {\n\n view_facts: true,\n\n add_fact: true,\n\n delete_fact: false,\n\n view_flags: true,\n\n add_flag: true,\n", "file_path": "src/admin.rs", "rank": 8, "score": 57502.610602806766 }, { "content": "fn delete_fact(\n\n animal: Animal,\n\n user: &Admin,\n\n request: AdminFactRequest,\n\n state: &APIState,\n\n) -> HttpResponse {\n\n if let Some(rem_id) = request.fact_id {\n\n let fact_list = determine_list(animal, &state.fact_lists);\n\n let mut list_lock = fact_list.write().unwrap();\n\n if let Some(found) = list_lock\n\n .iter()\n\n .enumerate()\n\n .find(|(_, fact)| fact.id == rem_id)\n\n {\n\n let pos = found.0;\n\n list_lock.remove(pos);\n\n modify_persistent_fact(animal, list_lock, state);\n\n\n\n warn!(\"{} fact removed by {}\", animal.as_str(), user.name);\n\n\n\n HttpResponse::NoContent().finish()\n\n } else {\n\n generate_response(&RESP_ID_NOT_FOUND)\n\n }\n\n } else {\n\n generate_response(&RESP_NO_ID_SUPPLIED)\n\n }\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 9, "score": 31975.554212842333 }, { "content": "fn add_fact(\n\n animal: Animal,\n\n user: &Admin,\n\n request: AdminFactRequest,\n\n state: &APIState,\n\n) -> HttpResponse {\n\n let id = rand::thread_rng().next_u64();\n\n\n\n let fact_list = determine_list(animal, &state.fact_lists);\n\n let mut list_lock = fact_list.write().unwrap();\n\n\n\n match request.fact_content {\n\n Some(content) => {\n\n list_lock.push(Fact { id, content });\n\n }\n\n None => {\n\n return generate_response(&RESP_NO_CONTENT_SPECIFIED);\n\n }\n\n }\n\n\n\n modify_persistent_fact(animal, list_lock, state);\n\n\n\n let message = CreatedAction::Fact { animal };\n\n warn!(\"{} by {}\", message.as_str(), user.name);\n\n\n\n let resp = JsonResp::new(201, message.as_str());\n\n generate_response(&resp)\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 10, "score": 31975.554212842333 }, { "content": "// This will allow an admin to add a flag and bypass the user-restricted method\n\nfn add_flag(\n\n state: &APIState,\n\n user: &Admin,\n\n set_flag: (Animal, u64, Option<String>),\n\n) -> HttpResponse {\n\n let flag_list = state.fact_flags.as_ref().unwrap();\n\n let id = rand::thread_rng().next_u64();\n\n\n\n {\n\n let mut flag_list = flag_list.write().unwrap();\n\n\n\n if !flag_list\n\n .iter()\n\n .enumerate()\n\n .any(|(_, flag)| flag.fact_id == set_flag.1)\n\n {\n\n return generate_response(&RESP_ID_NOT_FOUND);\n\n }\n\n\n\n flag_list.push(FactFlag {\n", "file_path": "src/admin.rs", "rank": 11, "score": 31975.554212842333 }, { "content": "fn check_user<'a>(\n\n action: AdminAction,\n\n key: &str,\n\n state: &'a APIState,\n\n) -> Result<&'a Admin, HttpResponse> {\n\n if let Some((user, perms)) = check_admin_perms(key, &state.config.admins) {\n\n if let Some(perms) = perms {\n\n // Check if they are allowed to perform the desired action\n\n let missing_perms_resp = generate_response(&RESP_MISSING_PERMS);\n\n match action {\n\n AdminAction::View => {\n\n if !perms.view_facts {\n\n return Err(missing_perms_resp);\n\n }\n\n }\n\n AdminAction::Delete => {\n\n if !perms.delete_fact {\n\n return Err(missing_perms_resp);\n\n }\n\n }\n", "file_path": "src/admin.rs", "rank": 12, "score": 31305.35277260924 }, { "content": "fn modify_persistent_fact(\n\n animal: Animal,\n\n fact_list: RwLockWriteGuard<Vec<Fact>>,\n\n state: &APIState,\n\n) {\n\n let path = animal.get_filepath(&state.config.facts_dir);\n\n fs::write(path, serde_json::to_string_pretty(&*fact_list).unwrap()).unwrap()\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 13, "score": 30928.207862002597 }, { "content": "pub fn modify_flag(\n\n state: Data<APIState>,\n\n req: HttpRequest,\n\n body: Json<AdminFlagRequest>,\n\n) -> HttpResponse {\n\n if !state.config.flagging_enabled {\n\n return generate_response(&RESP_NOT_LOADED);\n\n }\n\n\n\n let action = determine_action(req.path());\n\n\n\n // Check if they have the needed flag related perms\n\n let user = match check_user(action, &body.key, &state) {\n\n Ok(user) => user,\n\n Err(resp) => return resp,\n\n };\n\n\n\n match action {\n\n AdminAction::View => list_flags(&state),\n\n AdminAction::Add => {\n", "file_path": "src/admin.rs", "rank": 14, "score": 30258.006421769504 }, { "content": "pub fn modify_fact(\n\n state: Data<APIState>,\n\n req: HttpRequest,\n\n body: Json<AdminFactRequest>,\n\n) -> HttpResponse {\n\n let action = determine_action(req.path());\n\n\n\n let user = match check_user(action, &body.key, &state) {\n\n Ok(user) => user,\n\n Err(resp) => return resp,\n\n };\n\n\n\n // Check if the requested animal list is loaded\n\n match body.animal_type {\n\n Animal::Cat => {\n\n if state.fact_lists.cat_facts.is_none() {\n\n return generate_response(&RESP_NOT_LOADED);\n\n }\n\n }\n\n Animal::Dog => {\n", "file_path": "src/admin.rs", "rank": 15, "score": 30258.006421769504 }, { "content": "fn check_admin_perms<'a>(\n\n unchecked_auth: &str,\n\n admin_list: &'a [Admin],\n\n) -> Option<(&'a Admin, Option<Perms>)> {\n\n if !admin_list.is_empty() {\n\n let unchecked_auth = unchecked_auth.as_bytes();\n\n match admin_list\n\n .iter()\n\n .find(|admin| bool::from(unchecked_auth.ct_eq(admin.key.as_bytes())))\n\n {\n\n Some(admin) => {\n\n let perms = admin.permissions;\n\n if perms.add_fact || perms.delete_fact || perms.view_flags || perms.delete_flag {\n\n Some((admin, Some(admin.permissions)))\n\n } else {\n\n Some((admin, None))\n\n }\n\n }\n\n None => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 16, "score": 30258.006421769504 }, { "content": "pub fn gen_flagger() -> Flagger {\n\n Flagger {\n\n location: \"test_location\".to_string(),\n\n key: \"flag_key\".to_string(),\n\n }\n\n}\n\n\n", "file_path": "tests/generator.rs", "rank": 17, "score": 28724.863863321767 }, { "content": "pub fn gen_admin_view_only() -> Admin {\n\n Admin {\n\n name: \"Tester\".to_string(),\n\n key: \"view_only\".to_string(),\n\n permissions: Perms {\n\n view_facts: true,\n\n add_fact: false,\n\n delete_fact: false,\n\n view_flags: false,\n\n add_flag: false,\n\n delete_flag: false,\n\n },\n\n }\n\n}\n\n\n", "file_path": "tests/generator.rs", "rank": 18, "score": 27848.51084239325 }, { "content": "pub fn gen_admin_all_perms() -> Admin {\n\n Admin {\n\n name: \"Tester\".to_string(),\n\n key: \"all_perms\".to_string(),\n\n permissions: Perms {\n\n view_facts: true,\n\n add_fact: true,\n\n delete_fact: true,\n\n view_flags: true,\n\n add_flag: false,\n\n delete_flag: true,\n\n },\n\n }\n\n}\n\n\n", "file_path": "tests/generator.rs", "rank": 19, "score": 27848.51084239325 }, { "content": "pub fn gen_admin_delete_only() -> Admin {\n\n Admin {\n\n name: \"Tester\".to_string(),\n\n key: \"delete_only\".to_string(),\n\n permissions: Perms {\n\n view_facts: true,\n\n add_fact: false,\n\n delete_fact: true,\n\n view_flags: true,\n\n add_flag: false,\n\n delete_flag: true,\n\n },\n\n }\n\n}\n\n\n", "file_path": "tests/generator.rs", "rank": 20, "score": 27848.51084239325 }, { "content": "pub fn gen_admin_add_only() -> Admin {\n\n Admin {\n\n name: \"Tester\".to_string(),\n\n key: \"add_only\".to_string(),\n\n permissions: Perms {\n\n view_facts: true,\n\n add_fact: true,\n\n delete_fact: false,\n\n view_flags: true,\n\n add_flag: true,\n\n delete_flag: false,\n\n },\n\n }\n\n}\n\n\n", "file_path": "tests/generator.rs", "rank": 21, "score": 27848.51084239325 }, { "content": "pub fn gen_admin_no_perms() -> Admin {\n\n Admin {\n\n name: \"Tester\".to_string(),\n\n key: \"no_perms\".to_string(),\n\n permissions: Perms {\n\n view_facts: false,\n\n add_fact: false,\n\n delete_fact: false,\n\n view_flags: false,\n\n add_flag: false,\n\n delete_flag: false,\n\n },\n\n }\n\n}\n\n\n", "file_path": "tests/generator.rs", "rank": 22, "score": 27848.51084239325 }, { "content": "fn determine_action(path: &str) -> AdminAction {\n\n if path.ends_with(\"list\") {\n\n AdminAction::View\n\n } else if path.ends_with(\"delete\") {\n\n AdminAction::Delete\n\n } else {\n\n AdminAction::Add\n\n }\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 23, "score": 26540.38111500356 }, { "content": "pub fn generate_response(resp: &JsonResp) -> HttpResponse {\n\n let status = StatusCode::from_u16(resp.code).unwrap();\n\n\n\n if status.is_server_error() {\n\n warn!(\"A request to an unloaded part of the server occured!\")\n\n }\n\n\n\n match status {\n\n StatusCode::CREATED => HttpResponse::Created().json(resp),\n\n StatusCode::NOT_IMPLEMENTED => HttpResponse::NotImplemented().json(resp),\n\n StatusCode::UNAUTHORIZED => HttpResponse::Unauthorized().json(resp),\n\n StatusCode::BAD_REQUEST => HttpResponse::BadRequest().json(resp),\n\n StatusCode::NOT_FOUND => HttpResponse::NotFound().json(resp),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 24, "score": 24666.74855340168 }, { "content": "fn check_flagger(unchecked_auth: String, flagger_list: &[Flagger]) -> Option<Flagger> {\n\n if !flagger_list.is_empty() {\n\n let unchecked_auth = unchecked_auth.as_bytes();\n\n flagger_list\n\n .iter()\n\n .find(|flagger| bool::from(unchecked_auth.ct_eq(flagger.key.as_bytes())))\n\n .cloned()\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/flagging.rs", "rank": 25, "score": 22694.140559696818 }, { "content": "fn determine_list(animal: Animal, fact_lists: &FactLists) -> &RwLock<Vec<Fact>> {\n\n match animal {\n\n // These unwraps are safe due to previous checks\n\n Animal::Cat => fact_lists.cat_facts.as_ref().unwrap(),\n\n Animal::Dog => fact_lists.dog_facts.as_ref().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 26, "score": 21306.916638202787 }, { "content": "pub fn load_fact_lists(fact_count: &IntCounterVec, config: &Config) -> animal_facts::FactLists {\n\n let mut fact_lists = animal_facts::FactLists::default();\n\n for animal in &config.animal_fact_types {\n\n let file_name = animal.get_filepath(&config.facts_dir);\n\n\n\n if let Ok(fact_file) = fs::read_to_string(file_name) {\n\n let facts: Vec<animal_facts::Fact> = serde_json::from_str(&fact_file).unwrap();\n\n\n\n if facts.is_empty() {\n\n warn!(\n\n \"While loading {} facts, none were found in the file!\",\n\n animal.as_str()\n\n );\n\n continue;\n\n }\n\n\n\n fact_count\n\n .with_label_values(&[animal.as_str()])\n\n .inc_by(facts.len() as u64);\n\n\n", "file_path": "src/lib.rs", "rank": 27, "score": 19931.098682292966 }, { "content": "pub fn load_fact_flags(flag_count: &IntCounter, config: &Config) -> Option<RwLock<Vec<FactFlag>>> {\n\n let file_name = Path::new(&config.facts_dir).join(\"fact_flags.json\");\n\n\n\n if config.flagging_enabled {\n\n match fs::read_to_string(&file_name) {\n\n Ok(contents) => {\n\n let flags: Vec<FactFlag> =\n\n serde_json::from_str(&contents).expect(\"The flags file was malformed!\");\n\n flag_count.inc_by(flags.len() as u64);\n\n Some(RwLock::new(flags))\n\n }\n\n Err(_) => {\n\n warn!(\"Fact flagging was enabled, but the flagging history couldn't be found!\");\n\n None\n\n }\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 28, "score": 18989.035751385803 }, { "content": "use actix_web::web::{self, Bytes, Data};\n\nuse actix_web::{test, App};\n\n\n\nuse animal_api::*;\n\nmod generator;\n\nuse crate::generator::*;\n\n\n\n#[actix_rt::test]\n\nasync fn flagging_not_loaded() {\n\n let dir = make_dir();\n\n let mut state = gen_state(&dir);\n\n state.config.flagging_enabled = false;\n\n\n\n let req_json = AdminFlagRequest {\n\n key: gen_admin_all_perms().key,\n\n fact_id: None,\n\n flag_id: None,\n\n reason: None,\n\n fact_type: None,\n\n };\n", "file_path": "tests/admin_flag.rs", "rank": 29, "score": 21.913274135885214 }, { "content": "#[actix_rt::test]\n\nasync fn set_flag_unloaded() {\n\n let dir = make_dir();\n\n let mut state = gen_state(&dir);\n\n state.config.flagging_enabled = false;\n\n\n\n let req_json = FactFlagRequest {\n\n fact_type: Animal::Cat,\n\n fact_id: 6682463169732688062,\n\n reason: None,\n\n key: gen_flagger().key,\n\n flagger: None,\n\n };\n\n\n\n assert_eq!(\n\n test_flag_consumer_req(req_json, \"/flag\", state).await,\n\n RESP_NOT_LOADED\n\n )\n\n}\n\n\n", "file_path": "tests/consumption.rs", "rank": 31, "score": 12.532076154460608 }, { "content": "async fn get_unloaded_cat() {\n\n let dir = make_dir();\n\n let mut state = gen_state(&dir);\n\n state.fact_lists.cat_facts = None;\n\n\n\n // This will fail because the JSON returned is a JsonResp, not a Fact\n\n test_fact_consumer_req(Animal::Cat, \"/cat/fact\", state).await\n\n}\n\n\n\n#[actix_rt::test]\n\n#[should_panic]\n\nasync fn get_unloaded_dog() {\n\n let dir = make_dir();\n\n let mut state = gen_state(&dir);\n\n state.fact_lists.dog_facts = None;\n\n\n\n test_fact_consumer_req(Animal::Dog, \"/dog/fact\", state).await\n\n}\n\n\n\n// Flag tests\n", "file_path": "tests/consumption.rs", "rank": 34, "score": 11.833115070338284 }, { "content": " key: gen_admin_all_perms().key,\n\n fact_id: None,\n\n flag_id: None,\n\n reason: None,\n\n fact_type: None,\n\n };\n\n\n\n let mock_state = Data::new(state2);\n\n let app = test::init_service(\n\n App::new()\n\n .app_data(mock_state.clone())\n\n .service(web::resource(uri).route(web::post().to(admin::modify_flag))),\n\n )\n\n .await;\n\n\n\n let req = test::TestRequest::post()\n\n .uri(uri)\n\n .set_json(&req_json)\n\n .to_request();\n\n\n", "file_path": "tests/admin_flag.rs", "rank": 35, "score": 11.704049538312972 }, { "content": "async fn add_flag_bad_factid() {\n\n let dir = make_dir();\n\n let req_json = AdminFlagRequest {\n\n key: gen_admin_all_perms().key,\n\n fact_id: Some(18446744073709551615),\n\n flag_id: None,\n\n reason: None,\n\n fact_type: Some(Animal::Cat),\n\n };\n\n\n\n assert_eq!(\n\n test_admin_flag_req(req_json, \"/admin/flag/add\", gen_state(&dir)).await,\n\n RESP_ID_NOT_FOUND\n\n )\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn add_flag_ok() {\n\n let dir = make_dir();\n\n let state = gen_state(&dir);\n", "file_path": "tests/admin_flag.rs", "rank": 36, "score": 11.212775578969662 }, { "content": " let returned: Vec<FactFlag> = test::call_and_read_body_json(&app, req).await;\n\n\n\n assert_eq!(returned, *expected);\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn add_flag_no_type() {\n\n let dir = make_dir();\n\n let req_json = AdminFlagRequest {\n\n key: gen_admin_all_perms().key,\n\n fact_id: None,\n\n flag_id: None,\n\n reason: None,\n\n fact_type: None,\n\n };\n\n\n\n assert_eq!(\n\n test_admin_flag_req(req_json, \"/admin/flag/add\", gen_state(&dir)).await,\n\n RESP_NO_TYPE_SUPPLIED\n\n )\n", "file_path": "tests/admin_flag.rs", "rank": 38, "score": 11.085569216791603 }, { "content": "\n\n let state = gen_state(&dir);\n\n\n\n let mock_state = Data::new(state);\n\n let app = test::init_service(\n\n App::new()\n\n .app_data(mock_state.clone())\n\n .service(web::resource(uri).route(web::post().to(admin::modify_flag))),\n\n )\n\n .await;\n\n\n\n let req = test::TestRequest::post()\n\n .uri(uri)\n\n .set_json(&req_json)\n\n .to_request();\n\n\n\n let resp = test::call_and_read_body(&app, req).await;\n\n\n\n assert_eq!(resp, Bytes::from_static(b\"\"))\n\n}\n", "file_path": "tests/admin_flag.rs", "rank": 40, "score": 10.991379510295335 }, { "content": "use animal_api::*;\n\nmod generator;\n\nuse crate::generator::*;\n\n\n\n// Fact tests\n\n#[actix_rt::test]\n\nasync fn get_fact_cat() {\n\n // This will fail if for some reason its not returning a fact\n\n let dir = make_dir();\n\n test_fact_consumer_req(Animal::Cat, \"/cat/fact\", gen_state(&dir)).await\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn get_fact_dog() {\n\n let dir = make_dir();\n\n test_fact_consumer_req(Animal::Dog, \"/dog/fact\", gen_state(&dir)).await\n\n}\n\n\n\n#[actix_rt::test]\n\n#[should_panic]\n", "file_path": "tests/consumption.rs", "rank": 41, "score": 10.734946610077575 }, { "content": " fact_id: None,\n\n flag_id: None,\n\n reason: None,\n\n fact_type: None,\n\n };\n\n\n\n assert_eq!(\n\n test_admin_flag_req(req_json, \"/admin/flag/delete\", gen_state(&dir)).await,\n\n RESP_NO_ID_SUPPLIED\n\n )\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn delete_flag_invalid_id() {\n\n let dir = make_dir();\n\n let req_json = AdminFlagRequest {\n\n key: gen_admin_all_perms().key,\n\n fact_id: None,\n\n flag_id: Some(18446744073709551615),\n\n reason: None,\n", "file_path": "tests/admin_flag.rs", "rank": 43, "score": 10.22404666636907 }, { "content": " fact_type: None,\n\n };\n\n\n\n assert_eq!(\n\n test_admin_flag_req(req_json, \"/admin/flag/delete\", gen_state(&dir)).await,\n\n RESP_ID_NOT_FOUND\n\n )\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn delete_flag_ok() {\n\n let dir = make_dir();\n\n let uri = \"/admin/flag/delete\";\n\n let req_json = AdminFlagRequest {\n\n key: gen_admin_all_perms().key,\n\n fact_id: None,\n\n flag_id: Some(6682463169732628062),\n\n reason: None,\n\n fact_type: None,\n\n };\n", "file_path": "tests/admin_flag.rs", "rank": 44, "score": 10.083274823461295 }, { "content": "}\n\n\n\n#[actix_rt::test]\n\nasync fn add_flag_no_factid() {\n\n let dir = make_dir();\n\n let req_json = AdminFlagRequest {\n\n key: gen_admin_all_perms().key,\n\n fact_id: None,\n\n flag_id: None,\n\n reason: None,\n\n fact_type: Some(Animal::Cat),\n\n };\n\n\n\n assert_eq!(\n\n test_admin_flag_req(req_json, \"/admin/flag/add\", gen_state(&dir)).await,\n\n RESP_NO_ID_SUPPLIED\n\n )\n\n}\n\n\n\n#[actix_rt::test]\n", "file_path": "tests/admin_flag.rs", "rank": 45, "score": 9.991405959097495 }, { "content": "#[actix_rt::test]\n\nasync fn set_flag_invalid_auth() {\n\n let dir = make_dir();\n\n let req_json = FactFlagRequest {\n\n fact_type: Animal::Cat,\n\n fact_id: 6682463169732688062,\n\n reason: None,\n\n key: \"AGreatPassword\".to_string(),\n\n flagger: None,\n\n };\n\n\n\n assert_eq!(\n\n test_flag_consumer_req(req_json, \"/flag\", gen_state(&dir)).await,\n\n RESP_BAD_AUTH\n\n )\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn set_flag_bad_factid() {\n\n let dir = make_dir();\n", "file_path": "tests/consumption.rs", "rank": 47, "score": 9.855603935100307 }, { "content": " let req_json = FactFlagRequest {\n\n fact_type: Animal::Cat,\n\n fact_id: 18446744073709551615,\n\n reason: Some(\"A Reason\".to_string()),\n\n key: gen_flagger().key,\n\n flagger: None,\n\n };\n\n\n\n assert_eq!(\n\n test_flag_consumer_req(req_json, \"/flag\", gen_state(&dir)).await,\n\n RESP_ID_NOT_FOUND\n\n )\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn set_flag_valid() {\n\n let dir = make_dir();\n\n let state = gen_state(&dir);\n\n let req_json = FactFlagRequest {\n\n fact_type: Animal::Cat,\n", "file_path": "tests/consumption.rs", "rank": 48, "score": 9.769942796774693 }, { "content": " Animal::Dog => animal_facts::get_dog_fact,\n\n };\n\n\n\n let app = test::init_service(\n\n App::new()\n\n .app_data(mock_state.clone())\n\n .service(web::resource(uri).route(web::get().to(endpoint))),\n\n )\n\n .await;\n\n\n\n let req = test::TestRequest::get().uri(uri).to_request();\n\n let _: Fact = test::call_and_read_body_json(&app, req).await;\n\n}\n\n\n\npub async fn test_flag_consumer_req(req: FactFlagRequest, uri: &str, state: APIState) -> JsonResp {\n\n let mock_state = Data::new(state);\n\n\n\n let app = test::init_service(\n\n App::new()\n\n .app_data(mock_state.clone())\n", "file_path": "tests/generator.rs", "rank": 51, "score": 9.412237447370549 }, { "content": "\n\n let req_json = AdminFlagRequest {\n\n key: gen_admin_all_perms().key,\n\n fact_id: Some(6682463169732688062),\n\n flag_id: None,\n\n reason: None,\n\n fact_type: Some(Animal::Cat),\n\n };\n\n\n\n let resp = test_admin_flag_req(req_json, \"/admin/flag/add\", state).await;\n\n\n\n let expected = JsonResp::new(201, CreatedAction::Flag.as_str());\n\n assert_eq!(resp, expected)\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn delete_flag_no_id() {\n\n let dir = make_dir();\n\n let req_json = AdminFlagRequest {\n\n key: gen_admin_all_perms().key,\n", "file_path": "tests/admin_flag.rs", "rank": 53, "score": 9.179553057017635 }, { "content": " .service(web::resource(uri).route(web::post().to(flagging::set_flag))),\n\n )\n\n .await;\n\n\n\n let req = test::TestRequest::post()\n\n .uri(uri)\n\n .set_json(&req)\n\n .to_request();\n\n\n\n test::call_and_read_body_json(&app, req).await\n\n}\n\n\n\npub async fn test_admin_fact_req<T: Serialize>(req: T, uri: &str, state: APIState) -> JsonResp {\n\n let mock_state = Data::new(state);\n\n let app = test::init_service(\n\n App::new()\n\n .app_data(mock_state.clone())\n\n .service(web::resource(uri).route(web::post().to(admin::modify_fact))),\n\n )\n\n .await;\n", "file_path": "tests/generator.rs", "rank": 56, "score": 8.594521994016375 }, { "content": "\n\n let req = test::TestRequest::post()\n\n .uri(uri)\n\n .set_json(&req)\n\n .to_request();\n\n\n\n test::call_and_read_body_json(&app, req).await\n\n}\n\n\n\npub async fn test_admin_flag_req<T: Serialize>(req: T, uri: &str, state: APIState) -> JsonResp {\n\n let mock_state = Data::new(state);\n\n let app = test::init_service(\n\n App::new()\n\n .app_data(mock_state.clone())\n\n .service(web::resource(uri).route(web::post().to(admin::modify_flag))),\n\n )\n\n .await;\n\n\n\n let req = test::TestRequest::post()\n\n .uri(uri)\n\n .set_json(&req)\n\n .to_request();\n\n\n\n test::call_and_read_body_json(&app, req).await\n\n}\n", "file_path": "tests/generator.rs", "rank": 57, "score": 8.495276631913981 }, { "content": "#![allow(dead_code)]\n\n\n\nuse actix_web::{test, web, web::Data, App};\n\nuse animal_api::*;\n\nuse animal_facts::*;\n\nuse prometheus::{IntCounter, IntCounterVec, Opts, Registry};\n\nuse serde::{Deserialize, Serialize};\n\nuse tempdir::TempDir;\n\n\n\n// This structure and the below comparision exists because we can't deserialize\n\n// the crate's JsonResp because it uses &'static str's for all the messages.\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct JsonResp {\n\n code: u16,\n\n message: String,\n\n}\n\n\n\nimpl PartialEq<animal_api::JsonResp> for JsonResp {\n\n fn eq(&self, other: &animal_api::JsonResp) -> bool {\n\n self.code == other.code && self.message == other.message\n\n }\n\n}\n\n\n", "file_path": "tests/generator.rs", "rank": 60, "score": 7.242470547067791 }, { "content": "\n\n assert_eq!(\n\n test_admin_flag_req(req_json, \"/admin/flag/list\", state).await,\n\n RESP_NOT_LOADED\n\n );\n\n}\n\n\n\n// The user permission checks are tested in the general tests\n\n\n\n#[actix_rt::test]\n\nasync fn view_flags() {\n\n let dir = make_dir();\n\n let uri = \"/admin/flag/list\";\n\n\n\n let (state, state2) = (gen_state(&dir), gen_state(&dir));\n\n\n\n let raw = state.fact_flags.unwrap();\n\n let expected = raw.read().unwrap();\n\n\n\n let req_json = AdminFlagRequest {\n", "file_path": "tests/admin_flag.rs", "rank": 61, "score": 6.713210099579081 }, { "content": "#![deny(warnings)]\n\n#![deny(unsafe_code)]\n\n\n\nuse actix_web::http::StatusCode;\n\nuse actix_web::web::HttpResponse;\n\nuse log::{info, warn};\n\nuse prometheus::{IntCounter, IntCounterVec, Registry};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse std::fmt;\n\nuse std::fs;\n\nuse std::net::IpAddr;\n\nuse std::path::{Path, PathBuf};\n\nuse std::sync::RwLock;\n\n\n\npub mod admin;\n\npub mod animal_facts;\n\npub mod flagging;\n\n\n\n#[derive(Clone, Copy, Serialize, Deserialize, Debug, PartialEq)]\n", "file_path": "src/lib.rs", "rank": 62, "score": 6.001942375356779 }, { "content": " fact_id: 6682463169732688062,\n\n reason: Some(\"A Reason\".to_string()),\n\n key: gen_flagger().key,\n\n flagger: None,\n\n };\n\n\n\n let resp = test_flag_consumer_req(req_json, \"/flag\", state).await;\n\n\n\n let expected = JsonResp::new(201, CreatedAction::Flag.as_str());\n\n assert_eq!(resp, expected)\n\n}\n", "file_path": "tests/consumption.rs", "rank": 63, "score": 5.876239018388441 }, { "content": "use actix_web::web::{Data, Json};\n\nuse actix_web::HttpResponse;\n\nuse rand::RngCore;\n\nuse subtle::ConstantTimeEq;\n\n\n\nuse std::{fs, path::Path};\n\n\n\nuse crate::*;\n\n\n", "file_path": "src/flagging.rs", "rank": 64, "score": 5.652779411126762 }, { "content": "use actix_web::web::{Data, HttpResponse};\n\nuse rand::{seq::SliceRandom, thread_rng};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::sync::RwLock;\n\n\n\nuse crate::{generate_response, APIState, Animal, RESP_NOT_LOADED};\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]\n\npub struct Fact {\n\n pub id: u64,\n\n pub content: String,\n\n}\n\n\n\n// The system can support all listed fact types, but they aren't required to be present\n\n#[derive(Serialize, Deserialize, Debug, Default)]\n\npub struct FactLists {\n\n pub cat_facts: Option<RwLock<Vec<Fact>>>,\n\n pub dog_facts: Option<RwLock<Vec<Fact>>>,\n\n}\n\n\n", "file_path": "src/animal_facts.rs", "rank": 65, "score": 5.454141698587025 }, { "content": "use actix_web::http::StatusCode;\n\nuse actix_web::web::{Data, Json};\n\nuse actix_web::{HttpRequest, HttpResponse};\n\nuse log::{info, warn};\n\nuse rand::RngCore;\n\nuse subtle::ConstantTimeEq;\n\n\n\nuse std::fs;\n\nuse std::path::Path;\n\nuse std::sync::{RwLock, RwLockWriteGuard};\n\n\n\nuse crate::animal_facts::{Fact, FactLists};\n\nuse crate::*;\n\n\n", "file_path": "src/admin.rs", "rank": 66, "score": 5.153778849942588 }, { "content": "\n\n flag_list.push(FactFlag {\n\n id,\n\n fact_type,\n\n fact_id,\n\n reason,\n\n flagger,\n\n })\n\n }\n\n\n\n let file_path = Path::new(&app_data.config.facts_dir).join(\"fact_flags.json\");\n\n fs::write(file_path, serde_json::to_string_pretty(flag_list).unwrap())\n\n .expect(\"Failed writing to flags file!\");\n\n\n\n let resp = JsonResp::new(201, CreatedAction::Flag.as_str());\n\n generate_response(&resp)\n\n}\n", "file_path": "src/flagging.rs", "rank": 67, "score": 4.905636979370473 }, { "content": " #[test]\n\n fn invalid_key() {\n\n let admin_list = vec![gen_admin_no_perms(), gen_admin_all_perms()];\n\n assert_eq!(check_admin_perms(\"TesterKey\", &admin_list), None);\n\n }\n\n\n\n #[test]\n\n fn no_perms() {\n\n let admin_list = vec![gen_admin_no_perms(), gen_admin_all_perms()];\n\n let expected = (&gen_admin_no_perms(), None);\n\n assert_eq!(\n\n check_admin_perms(&admin_list[0].key, &admin_list),\n\n Some(expected)\n\n );\n\n }\n\n\n\n #[test]\n\n fn some_perms() {\n\n let admin_list = vec![gen_admin_add_only(), gen_admin_no_perms()];\n\n let expected = Some((\n", "file_path": "src/admin.rs", "rank": 68, "score": 4.360197225180334 }, { "content": " if state.fact_lists.dog_facts.is_none() {\n\n return generate_response(&RESP_NOT_LOADED);\n\n }\n\n }\n\n }\n\n\n\n match action {\n\n AdminAction::Add => add_fact(body.animal_type, user, body.into_inner(), &state),\n\n AdminAction::Delete => delete_fact(body.animal_type, user, body.into_inner(), &state),\n\n AdminAction::View => view_facts(body.animal_type, &state),\n\n }\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 69, "score": 4.208378467982891 }, { "content": "}\n\n\n\n#[derive(Clone, Debug, Deserialize)]\n\npub struct Config {\n\n pub logging_dir: String,\n\n pub logging_level: String,\n\n pub facts_dir: String,\n\n pub animal_fact_types: Vec<Animal>,\n\n pub flagging_enabled: bool,\n\n pub flaggers: Vec<Flagger>,\n\n pub server: ServerConfig,\n\n pub admins: Vec<Admin>,\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug)]\n\npub struct AdminFactRequest {\n\n // Only used on removals\n\n pub fact_id: Option<u64>,\n\n // Only used on additions/updates\n\n pub fact_content: Option<String>,\n", "file_path": "src/lib.rs", "rank": 70, "score": 4.20787644883571 }, { "content": "#### Flags\n\nThe AnimalAPI has optional support for submitting fact \"flags\", or notices. This feature is meant to\n\nbe used behind another service, such as a dashboard, because the API takes no responsibility managing who flags are sent by. All it sees is a authorization key. It is up to the implementor to rate limit, allow users, etc.\n\n\n\nIn order to enable this feature, two things must be done first. First, set `flagging_enabled` to `true` in the configuration. Second, create a flagger location inside the config as well. See the default config for an example.\n\n\n\nIn order to set a flag attempt, a request must be POSTed to the `/flag` endpoint:\n\n```json\n\n{\n\n \"fact_type\": \"Dog\",\n\n \"fact_id\": 29230202030113,\n\n \"reason\": \"Why I Don't Like It\",\n\n \"key\": \"SecretKey\",\n\n \"flagger\": \"SomeLocation\"\n\n}\n\n```\n\n\n\nThe `key` field is required to be populated with a key defined inside a flagger location in the config.\n\n\n\nThe `reason` and `flagger` fields are both optional. If the `flagger` field is not set, then the entry's submitter field will default to the name associated with the authorization key used. This is so implementors have the ability to make an end user the flagger, if they so choose. So instead of `Dashboard` being the flagger, it could be `Sam12345`.\n\n\n\n\n\n### Admin Interface:\n\nAn easy to use admin API is provided under `/admin`. Admins are defined in the `config.toml` file. When no admins exist\n\nin the configuration, any requests to the admin endpoints will fail due to invalid authorization.\n\n\n\nAn admin can possess any combination of add, delete, view, or no permissions:\n\n```toml\n\n[[admins]]\n\nname = \"SpookyAdmin\"\n\nkey = \"SuperSecretKey\"\n\n[admins.permissions]\n\nadd_fact = true\n\ndelete_fact = false\n\nview_facts = true\n\n...\n\n```\n", "file_path": "README.md", "rank": 71, "score": 4.165727627897352 }, { "content": "\n\nimpl Animal {\n\n pub fn get_filepath(self, dir: &str) -> PathBuf {\n\n match self {\n\n Animal::Cat => Path::new(dir).join(\"cat_facts.json\"),\n\n Animal::Dog => Path::new(dir).join(\"dog_facts.json\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Animal {\n\n fn as_str(self) -> &'static str {\n\n match self {\n\n Animal::Cat => \"Cat\",\n\n Animal::Dog => \"Dog\",\n\n }\n\n }\n\n}\n\n\n\npub struct APIState {\n", "file_path": "src/lib.rs", "rank": 72, "score": 4.10249272014047 }, { "content": " let req = body.into_inner();\n\n\n\n // Make sure that they provided the required values\n\n if req.fact_type.is_none() {\n\n return generate_response(&RESP_NO_TYPE_SUPPLIED);\n\n }\n\n if req.fact_id.is_none() {\n\n return generate_response(&RESP_NO_ID_SUPPLIED);\n\n }\n\n\n\n add_flag(\n\n &state,\n\n user,\n\n (req.fact_type.unwrap(), req.fact_id.unwrap(), req.reason),\n\n )\n\n }\n\n AdminAction::Delete => {\n\n if let Some(id) = body.into_inner().flag_id {\n\n delete_flag(&state, id, user)\n\n } else {\n\n generate_response(&RESP_NO_ID_SUPPLIED)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 73, "score": 4.0006297278760465 }, { "content": " fn gen_admin_all_perms() -> Admin {\n\n Admin {\n\n name: \"Tester\".to_string(),\n\n key: \"all_perms\".to_string(),\n\n permissions: Perms {\n\n view_facts: true,\n\n add_fact: true,\n\n delete_fact: true,\n\n view_flags: true,\n\n add_flag: true,\n\n delete_flag: true,\n\n },\n\n }\n\n }\n\n\n\n #[test]\n\n fn no_admin_list() {\n\n assert_eq!(check_admin_perms(\"TesterKey\", &Vec::new()), None);\n\n }\n\n\n", "file_path": "src/admin.rs", "rank": 74, "score": 3.843951974217504 }, { "content": "# Gearbot Animal API\n\n[![Build Status](https://dev.azure.com/GearBot/GearBot/_apis/build/status/gearbot.Gearbot-Animal-API?branchName=master)](https://dev.azure.com/GearBot/GearBot/_build/latest?definitionId=1&branchName=master)\n\n\n\n## About\n\nThis is the API that [Gearbot](https://github.com/gearbot/Gearbot) uses for retrieving animal facts. It does exactly what it\n\nsounds like.\n\n\n\n## Setup\n\n1. After downloading the repository and generating the desired binary, copy `default_config.toml` to `config.toml`.\n\n2. Next, edit all the values needed inside the config. If you want to use the admin interface, make sure to add some admins.\n\nSee `default_config.toml` for more information.\n\n\n\n3. Make sure the defined `facts_dir` has either/or `cat_facts.json` and `dog_facts.json`.\n\n4. Start the app\n\n\n\n## Usage\n\n### Consumption:\n\n\n\n#### Facts:\n\nTo get a fact, visit `/{animal}/fact/`. Currently, only dog and cat facts are supported.\n\nFacts are returned in the format of:\n\n```json\n\n{\n\n \"id\": 15889153292243741260,\n\n \"content\": \"Some cool fact\"\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 75, "score": 3.773595383602959 }, { "content": " pub animal_type: Animal,\n\n pub key: String,\n\n}\n\n\n\n#[derive(Clone, Copy, Serialize, Deserialize, Debug, PartialEq)]\n\npub enum AdminAction {\n\n Add,\n\n Delete,\n\n View,\n\n}\n\n\n\nimpl fmt::Display for AdminAction {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n AdminAction::Add => write!(f, \"add\"),\n\n AdminAction::Delete => write!(f, \"delete\"),\n\n AdminAction::View => write!(f, \"view\"),\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 76, "score": 3.339012265135356 }, { "content": "#### Facts\n\nTo add a new fact to the system, send a `POST` request to `/admin/fact/add`:\n\n```json\n\n{\n\n \"animal_type\": \"Cat\",\n\n \"content\": \"Huzzah, a new fact!\",\n\n \"key\": \"SuperSecretKey\"\n\n}\n\n```\n\n\n\nTo delete a fact from the system, send a `POST` request to `/admin/fact/delete`:\n\n```json\n\n{\n\n \"animal_type\": \"Dog\",\n\n \"fact_id\": 82872012121262,\n\n \"key\": \"SuperSecretKey\"\n\n}\n\n```\n\n\n\nTo see all the current facts loaded for a specific animal at once, send a `POST` request to `/admin/fact/list`:\n\n```json\n\n{\n\n \"animal_type\": \"Dog\",\n\n \"key\": \"SuperSecretKey\"\n\n}\n\n```\n\n\n\n#### Flags\n\nThe admin interface also provides a way for users to add, delete, or view the current fact flags in a similar fashion to handling facts. The required permissions can be found in the example config.\n\n\n\nTo add a flag, send a `POST` request to `/admin/flag/add`:\n\n```json\n\n {\n\n \"key\": \"SuperSecretKey\",\n\n \"fact_id\": 6682463169732688062,\n\n \"reason\": \"It was weird\",\n\n \"fact_type\": \"Dog\"\n\n}\n\n```\n\nTo delete a flag, send a `POST` request to `/admin/flag/delete`:\n\n```json\n\n{\n\n \"key\": \"SuperSecretKey\",\n\n \"flag_id\": 6682463169732628062\n\n}\n\n```\n\n\n\nTo view all the current flags, send a `POST` request to `/admin/flag/list` with just a key:\n\n```json\n\n{\n\n \"key\": \"SuperSecretKey\"\n\n}\n\n```\n\n\n\nNote: For all of the above, the animal type must be capitalized. Ex: `Cat` works, but `cat` does not.\n\n\n\nIf the admin request was malformed somehow, an error will be returned in the format of:\n\n```json\n\n{\n\n \"code\": 401,\n\n \"message\": \"Bad authorization!\"\n\n}\n\n```\n\n\n\n## Build Steps:\n\n1. Make sure Rust is installed on your system with the appropriate toolchains\n\n2. Clone this repository to a folder somewhere\n\n3. `cd` into the created directory\n\n4. Run `cargo build` to produce a binary \n\n5. Find some facts and put them in the `facts` directory as `cat_facts.json` and `dog_facts.json` respectively\n", "file_path": "README.md", "rank": 77, "score": 2.727062706413487 }, { "content": "\n\n#[derive(Clone, Serialize, Deserialize, Debug)]\n\npub struct AdminFlagRequest {\n\n pub key: String,\n\n pub fact_id: Option<u64>,\n\n pub flag_id: Option<u64>,\n\n pub reason: Option<String>,\n\n pub fact_type: Option<Animal>,\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug)]\n\npub struct Flagger {\n\n pub location: String,\n\n pub key: String,\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug)]\n\npub struct FactFlagRequest {\n\n pub fact_type: Animal,\n\n pub fact_id: u64,\n", "file_path": "src/lib.rs", "rank": 78, "score": 1.9733775487294456 }, { "content": " };\n\n\n\n let FactFlagRequest {\n\n fact_type,\n\n fact_id,\n\n reason,\n\n ..\n\n } = body;\n\n let id = rand::thread_rng().next_u64();\n\n\n\n let mut flag_list = flag_list.write().unwrap();\n\n\n\n // Check to make sure the targeted fact exists\n\n if !flag_list\n\n .iter()\n\n .enumerate()\n\n .any(|(_, flag)| flag.fact_id == fact_id)\n\n {\n\n return generate_response(&RESP_ID_NOT_FOUND);\n\n }\n", "file_path": "src/flagging.rs", "rank": 79, "score": 1.902191415849452 }, { "content": " &admin_list[0],\n\n Some(Perms {\n\n view_facts: true,\n\n add_fact: true,\n\n delete_fact: false,\n\n view_flags: true,\n\n add_flag: true,\n\n delete_flag: false,\n\n }),\n\n ));\n\n assert_eq!(check_admin_perms(&admin_list[0].key, &admin_list), expected);\n\n }\n\n}\n", "file_path": "src/admin.rs", "rank": 80, "score": 1.7901076347184022 }, { "content": "6. Admin keys are loaded from `admin_keys.json`. See `example_keys.json` for the required structure.\n\n\n\n## Testing\n\nThe entire API is extensively tested, covering all possible *valid* behavior. Bad syntax, variable types, etc are automatically handled by Actix.\n\nWhen contributing, we ask that you assure that existing tests pass, or are modified to fit changed behavior if required. It is also preferable that\n\nany new endpoints added have corresponding integration tests.\n\n\n", "file_path": "README.md", "rank": 81, "score": 1.7697086387716219 }, { "content": " delete_flag: false,\n\n },\n\n }\n\n }\n\n\n\n fn gen_admin_no_perms() -> Admin {\n\n Admin {\n\n name: \"Tester\".to_string(),\n\n key: \"no_perms\".to_string(),\n\n permissions: Perms {\n\n view_facts: false,\n\n add_fact: false,\n\n delete_fact: false,\n\n view_flags: false,\n\n add_flag: false,\n\n delete_flag: false,\n\n },\n\n }\n\n }\n\n\n", "file_path": "src/admin.rs", "rank": 82, "score": 1.7540813182772892 }, { "content": " id,\n\n fact_type: set_flag.0,\n\n fact_id: set_flag.1,\n\n reason: set_flag.2,\n\n flagger: user.name.clone(),\n\n });\n\n\n\n modify_persistent_flag(flag_list, state)\n\n }\n\n\n\n info!(\"Flag #{} added by {}\", id, user.name);\n\n let resp = JsonResp::new(201, CreatedAction::Flag.as_str());\n\n generate_response(&resp)\n\n}\n\n\n", "file_path": "src/admin.rs", "rank": 83, "score": 1.5882037658373007 }, { "content": " pub reason: Option<String>,\n\n pub key: String,\n\n // This shouldn't be abusable because it still requires auth from a known flagger\n\n pub flagger: Option<String>,\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]\n\npub struct FactFlag {\n\n pub id: u64,\n\n pub fact_type: Animal,\n\n pub fact_id: u64,\n\n pub reason: Option<String>,\n\n pub flagger: String,\n\n}\n\n\n\n#[derive(Clone, Copy, Serialize, Deserialize, Debug, PartialEq)]\n\npub enum Animal {\n\n Cat,\n\n Dog,\n\n}\n", "file_path": "src/lib.rs", "rank": 84, "score": 1.4128695344625424 }, { "content": "pub struct Perms {\n\n pub view_facts: bool,\n\n pub add_fact: bool,\n\n pub delete_fact: bool,\n\n pub view_flags: bool,\n\n pub add_flag: bool,\n\n pub delete_flag: bool,\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]\n\npub struct Admin {\n\n pub name: String,\n\n pub key: String,\n\n pub permissions: Perms,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, Deserialize)]\n\npub struct ServerConfig {\n\n pub ip: IpAddr,\n\n pub port: u16,\n", "file_path": "src/lib.rs", "rank": 85, "score": 1.4015100331134605 }, { "content": "\n\n let fact_count: IntCounterVec = IntCounterVec::new(\n\n Opts::new(\"fact_count\", \"How many animal facts are currently loaded\"),\n\n &[\"animal\"],\n\n )\n\n .unwrap();\n\n let flag_count: IntCounter =\n\n IntCounter::new(\"flag_count\", \"How many facts have been flagged\").unwrap();\n\n let req_count: IntCounterVec = IntCounterVec::new(\n\n Opts::new(\"api_request_count\", \"How many requests we have served\"),\n\n &[\"animal\"],\n\n )\n\n .unwrap();\n\n\n\n let reg = Registry::new();\n\n reg.register(Box::new(fact_count.clone())).unwrap();\n\n reg.register(Box::new(req_count.clone())).unwrap();\n\n\n\n APIState {\n\n fact_lists: load_fact_lists(&fact_count, &config),\n\n fact_flags: load_fact_flags(&flag_count, &config),\n\n config,\n\n stat_register: reg,\n\n req_counter: req_count,\n\n }\n\n}\n\n\n", "file_path": "tests/generator.rs", "rank": 86, "score": 1.0209105088460109 } ]
Rust
examples/bank-emulator/integration-tests/src/fees.rs
m10io/sdk
62f773304da0567986d5fc33c282649238c9f5c2
use m10_bank_emulator::models::*; use rust_decimal::Decimal; use super::base_url; use super::utils::*; #[tokio::test] async fn fees_routes() { let jwt = default_user_jwt().await; let client = reqwest::Client::default(); let tr_fees = FeeMetadata { schedule: FeeSchedule { fees: vec![ FeeBracket { range: 0..=10, polynomial: vec![Decimal::new(25, 0)], }, FeeBracket { range: 11..=20, polynomial: vec![Decimal::new(50, 0)], }, FeeBracket { range: 21..=501, polynomial: vec![Decimal::new(100, 0)], }, FeeBracket { range: 502..=u64::MAX, polynomial: vec![Decimal::new(125, 0)], }, ], }, split: vec![ FeeSplit { name: "M10".to_string(), percent: Decimal::new(75, 2), account: [0, 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], }, FeeSplit { name: "DD".to_string(), percent: Decimal::new(25, 2), account: [0, 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], }, ], }; let wd_fees = FeeMetadata { schedule: FeeSchedule { fees: vec![ FeeBracket { range: 0..=10, polynomial: vec![Decimal::new(50, 0)], }, FeeBracket { range: 11..=20, polynomial: vec![Decimal::new(75, 0)], }, FeeBracket { range: 21..=501, polynomial: vec![Decimal::new(100, 0)], }, FeeBracket { range: 502..=u64::MAX, polynomial: vec![Decimal::new(125, 0)], }, ], }, split: vec![ FeeSplit { name: "M10".to_string(), percent: Decimal::new(50, 2), account: [0, 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], }, FeeSplit { name: "DD".to_string(), percent: Decimal::new(50, 2), account: [0, 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], }, ], }; client .post(format!("{}/api/v1/fees/ttt/transfer", base_url())) .bearer_auth(&jwt) .json(&tr_fees) .send() .await .expect("insert/update transfer fee schedule response") .assert_json::<FeeMetadata>() .await; client .post(format!("{}/api/v1/fees/ttt/withdraw", base_url())) .bearer_auth(&jwt) .json(&wd_fees) .send() .await .expect("insert/update withdraw fee schedule response") .assert_json::<FeeMetadata>() .await; let resp = client .get(format!("{}/api/v1/fees/ttt/transfer", base_url())) .bearer_auth(&jwt) .send() .await .unwrap() .assert_json::<FeeMetadata>() .await; assert_eq!(Decimal::new(75, 2), resp.split[0].percent); let resp = client .get(format!("{}/api/v1/fees/ttt/withdraw", base_url())) .bearer_auth(&jwt) .send() .await .unwrap() .assert_json::<FeeMetadata>() .await; assert_eq!(Decimal::new(50, 2), resp.split[0].percent); let resp = client .get(format!("{}/api/v1/fees/ttt/transfer/30300", base_url())) .bearer_auth(&jwt) .send() .await .expect("get transfer fee amount response") .assert_json::<FeeResponse>() .await; assert_eq!(94, resp.fees[0].amount); let resp = client .get(format!("{}/api/v1/fees/ttt/withdraw/60600", base_url())) .bearer_auth(&jwt) .send() .await .expect("get withdraw fee amount response") .assert_json::<FeeResponse>() .await; assert_eq!(63, resp.fees[0].amount); client .get(format!("{}/api/v1/fees/ttt/deposit/60600", base_url())) .bearer_auth(&jwt) .send() .await .expect("get deposit fee amount response") .assert_status(404) .await; }
use m10_bank_emulator::models::*; use rust_decimal::Decimal; use super::base_url; use super::utils::*; #[tokio::test] async fn fees_routes() { let jwt = default_user_jwt().await; let client = reqwest::Client::default(); let tr_fees = FeeMetadata { schedule: FeeSchedule { fees: vec![ FeeBracket { range: 0..=10, polynomial: vec![Decimal::new(25, 0)], }, FeeBracket { range: 11..=20, polynomial: vec![Decimal::new(50, 0)], }, FeeBracket { range: 21..=501, polynomial: vec![Decimal::new(100, 0)], }, FeeBracket { range: 502..=u64::MAX, polynomial: vec![Decimal::new(125, 0)], }, ], }, split: vec![ FeeSplit { name: "M10".to_string(), percent: Decimal::new(75, 2), account: [0, 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], }, FeeSplit { name: "DD".to_string(), percent: Decimal::new(25, 2), account: [0, 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], }, ], }; let wd_fees = FeeMetadata { schedule: FeeSchedule { fees: vec![ FeeBracket { range: 0..=10, polynomial: vec![Decimal::new(50, 0)], }, FeeBracket { range: 11..=20, polynomial: vec![Decimal::new(75, 0)], }, FeeBracket { range: 21..=501, polynomial: vec![Decimal::new(100, 0)], }, FeeBracket { range: 502..=u64::MAX, polynomial: vec![Decimal::new(125, 0)], }, ], }, split: vec![ FeeSplit { name: "M10".to_string(), percent: Decimal::new(50, 2), account: [0, 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], }, FeeSplit { name: "DD".to_string(), percent: Decimal::new(50, 2), account: [0, 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], }, ], }; client .post(format!("{}/api/v1/fees/ttt/transfer", base_url())) .bearer_auth(&jwt) .json(&tr_fees) .send() .await .expect("insert/update transfer fee schedule response") .assert_json::<FeeMetadata>() .await; client .post(format!("{}/api/v1/fees/ttt/withdraw", base_url())) .bearer_auth(&jwt) .json(&wd_fees) .send() .await .expect("insert/update withdraw fee schedule response") .assert_json::<FeeMetadata>() .await; let resp = client .get(format!("{}/api/v1/fees/ttt/transfer", base_url())) .bearer_auth(&jwt) .send() .await .unwrap() .assert_json::<FeeMetadata>() .await; assert_eq!(Decimal::new(75, 2), resp.split[0].percent); let resp = client .get(format!("{}/api/v1/fees/ttt/withdraw", base_url())) .bearer_auth(&jwt) .send() .await .unwrap() .assert_json::<FeeMetadata>() .await; assert_eq!(Decimal::new(50, 2), resp.split[0].percent); let resp = client .get(format!("{}/api/v1/fees/ttt/transfer/30300", base_url())) .bearer_auth(&jwt) .send() .await .expect("get transfer fee amount response") .assert_json::<FeeResponse>() .await; assert_eq!(94, resp.fees[0].amount); let resp = client .get(format!("{}/api/v1/fees/ttt/withdraw/60600", base_url())) .bearer_auth(&jwt) .send() .awai
t .expect("get withdraw fee amount response") .assert_json::<FeeResponse>() .await; assert_eq!(63, resp.fees[0].amount); client .get(format!("{}/api/v1/fees/ttt/deposit/60600", base_url())) .bearer_auth(&jwt) .send() .await .expect("get deposit fee amount response") .assert_status(404) .await; }
function_block-function_prefixed
[ { "content": "fn with_additional_protos(mut protos: Vec<String>, additional_paths: &[&str]) -> Vec<String> {\n\n for path in additional_paths {\n\n protos.push(proto_path(path).display().to_string());\n\n }\n\n protos\n\n}\n", "file_path": "rust/protos/build.rs", "rank": 0, "score": 121661.76002624292 }, { "content": "CREATE FUNCTION public.instant_transfer(p_txn_id uuid, p_from_account bigint, p_to_account bigint, p_amount bigint, p_reference text)\n\nRETURNS uuid\n\nLANGUAGE\n\nplpgsql\n\nAS\n\n$$\n\nDECLARE\n\n source_account record;\n\n target_account record;\n\n debit_txn bank_transaction_data;\n\n credit_txn bank_transaction_data;\n\nBEGIN\n\n -- check source account conditions\n\n SELECT *\n\n INTO source_account\n\n FROM public.bank_accounts\n\n WHERE id = p_from_account;\n\n \n\n IF NOT found THEN\n\n raise 'Source account with id % not found', p_from_account;\n", "file_path": "examples/bank-emulator/migrations/20220314114211_bank_emu.sql", "rank": 1, "score": 117612.73002511625 }, { "content": "CREATE FUNCTION public.withdraw(p_txn_id uuid, p_from_account bigint, p_to_account bigint, p_amount bigint, p_reference text, p_routing jsonb)\n\nRETURNS uuid\n\nLANGUAGE\n\nplpgsql\n\nAS\n\n$$\n\nDECLARE\n\n source_account record;\n\n target_account record;\n\n debit_txn bank_transaction_data;\n\n credit_txn bank_transaction_data;\n\nBEGIN\n\n -- check source account conditions\n\n SELECT *\n\n INTO source_account\n\n FROM public.bank_accounts\n\n WHERE id = p_from_account;\n\n \n\n IF NOT found THEN\n\n raise 'Source account with id % not found', p_from_account;\n", "file_path": "examples/bank-emulator/migrations/20220314114211_bank_emu.sql", "rank": 2, "score": 116192.2840194012 }, { "content": "pub fn scope() -> Scope {\n\n actix_web::web::scope(\"fees\")\n\n .service(create_schedule)\n\n .service(get_schedule)\n\n .service(get)\n\n}\n", "file_path": "examples/bank-emulator/src/controllers/fees.rs", "rank": 3, "score": 108248.809790972 }, { "content": "pub fn scope() -> Scope {\n\n actix_web::web::scope(\"accounts\")\n\n .service(create)\n\n .service(list)\n\n .service(delete)\n\n .service(get)\n\n .service(list_assets)\n\n .service(get_asset)\n\n .service(freeze_asset)\n\n .service(list_payments)\n\n .service(get_payment)\n\n .service(unfreeze_asset)\n\n .service(list_notification_preferences)\n\n .service(deposit)\n\n .service(fund)\n\n .service(open)\n\n .service(settle_deposit)\n\n .service(settle_withdraw)\n\n .service(withdraw)\n\n}\n", "file_path": "examples/bank-emulator/src/controllers/accounts.rs", "rank": 4, "score": 107012.8522963382 }, { "content": "pub fn scope() -> Scope {\n\n actix_web::web::scope(\"transfer_methods\")\n\n .service(create)\n\n .service(deactivate)\n\n .service(list)\n\n .service(get)\n\n}\n", "file_path": "examples/bank-emulator/src/controllers/transfer_methods.rs", "rank": 5, "score": 106541.30126067864 }, { "content": "CREATE FUNCTION public.withdraw_with_contact_method(p_txn_id uuid, p_contact bigint, p_to_account bigint, p_amount bigint, p_reference text)\n\nRETURNS uuid\n\nLANGUAGE\n\nplpgsql\n\nAS\n\n$$\n\nDECLARE\n\n contact record;\n\n source_account record;\n\n target_account record;\n\n debit_txn bank_transaction_data;\n\n credit_txn bank_transaction_data;\n\nBEGIN\n\n -- fetch contact\n\n SELECT *\n\n INTO contact\n\n FROM public.bank_contact\n\n WHERE id = p_from_account;\n\n\n\n IF NOT found THEN\n", "file_path": "examples/bank-emulator/migrations/20220314114211_bank_emu.sql", "rank": 6, "score": 104337.52987572139 }, { "content": "fn parse_account_id(val: &str) -> anyhow::Result<AccountId> {\n\n let bytes = hex::decode(&val)?;\n\n let id = AccountId::try_from_be_slice(&bytes)?;\n\n Ok(id)\n\n}\n", "file_path": "examples/m10_cli/src/commands/observe/mod.rs", "rank": 7, "score": 101923.69722878629 }, { "content": "fn find_protos_in_dirs(paths: &[&str]) -> Result<Vec<String>, Box<dyn std::error::Error>> {\n\n let mut files = vec![];\n\n for path in paths {\n\n files.extend(find_protos_in_dir(&proto_path(path))?);\n\n }\n\n Ok(files)\n\n}\n\n\n", "file_path": "rust/protos/build.rs", "rank": 8, "score": 96570.19992471006 }, { "content": "fn find_protos_in_dir(path: &Path) -> Result<Vec<String>, Box<dyn std::error::Error>> {\n\n let mut files = vec![];\n\n for dir_entry in fs::read_dir(path)? {\n\n let dir_entry = dir_entry?.path();\n\n if dir_entry.is_file()\n\n && dir_entry\n\n .extension()\n\n .map(|ext| ext == \"proto\")\n\n .unwrap_or(false)\n\n {\n\n files.push(dir_entry.as_path().display().to_string())\n\n }\n\n if dir_entry.is_dir() {\n\n let sub_dir_files = find_protos_in_dir(dir_entry.as_path())?;\n\n files.extend_from_slice(&sub_dir_files);\n\n }\n\n }\n\n Ok(files)\n\n}\n\n\n", "file_path": "rust/protos/build.rs", "rank": 9, "score": 96570.19992471006 }, { "content": "#[derive(sqlx::FromRow)]\n\nstruct Metadata<T: DeserializeOwned + Serialize + Send + Sync> {\n\n key: String,\n\n value: Json<T>,\n\n}\n\n\n\nimpl<T> Metadata<T>\n\nwhere\n\n T: DeserializeOwned + Serialize + Send + Sync + Unpin + 'static,\n\n{\n\n async fn insert(&self, db: impl Executor<'_, Database = Postgres>) -> Result<(), Error> {\n\n sqlx::query(\n\n \"INSERT INTO metadata (key, value) VALUES ($1, $2)\n\n ON CONFLICT (key) DO UPDATE SET value = $2 WHERE metadata.key = $1;\",\n\n )\n\n .bind(&self.key)\n\n .bind(&self.value)\n\n .execute(db)\n\n .await?;\n\n Ok(())\n\n }\n", "file_path": "examples/bank-emulator/src/models/fees.rs", "rank": 10, "score": 96007.26869903528 }, { "content": "#[cfg(test)]\n\nfn base_url() -> String {\n\n std::env::var(\"BANK_EMULATOR_URL\").unwrap_or_else(|_| \"http://localhost:8080\".to_string())\n\n}\n\n\n", "file_path": "examples/bank-emulator/integration-tests/src/lib.rs", "rank": 11, "score": 91212.074043045 }, { "content": "CREATE INDEX assets_linked_account ON public.assets USING btree (linked_account);\n", "file_path": "examples/bank-emulator/migrations/20220217094514_init.sql", "rank": 12, "score": 88933.77827374129 }, { "content": "#[derive(serde::Deserialize)]\n\nstruct JWTResp {\n\n access_token: String,\n\n}\n\n\n\n#[async_trait::async_trait]\n\npub(super) trait FallibleRequest {\n\n async fn assert_json<T: DeserializeOwned>(self) -> T;\n\n async fn assert_success(self);\n\n async fn assert_status<T>(self, expected_status: T)\n\n where\n\n T: Display + Debug + Send,\n\n StatusCode: PartialEq<T>;\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl FallibleRequest for Response {\n\n async fn assert_json<T: DeserializeOwned>(self) -> T {\n\n let status = self.status();\n\n if !status.is_success() {\n\n let msg = self.text().await.expect(\"Error did not contain text\");\n", "file_path": "examples/bank-emulator/integration-tests/src/utils.rs", "rank": 13, "score": 88065.93243487724 }, { "content": " sendMessage(content = {}) {\n\n this.stream.write(content)\n\n return this\n", "file_path": "node/m10-sdk/lib/utils/grpc/client-stream-request.js", "rank": 14, "score": 87837.97957581039 }, { "content": "CREATE INDEX assets_ledger_account_id ON public.assets USING btree (ledger_account_id);\n\n\n", "file_path": "examples/bank-emulator/migrations/20220217094514_init.sql", "rank": 15, "score": 86518.85943404771 }, { "content": "CREATE TABLE public.bank_account_ranges (\n\n id SERIAL PRIMARY KEY,\n\n account_type bank_account_type NOT NULL,\n\n idx integer NOT NULL UNIQUE,\n\n max_number integer NOT NULL,\n\n last_used_number integer DEFAULT 0\n\n);\n\n\n", "file_path": "examples/bank-emulator/migrations/20220314114211_bank_emu.sql", "rank": 16, "score": 83411.13603435861 }, { "content": " async createTransfer({\n\n fromAccountId = required('fromAccountId'),\n\n toAccountId = required('toAccountId'),\n\n amount = required('amount'),\n\n memo,\n\n contract,\n\n signer,\n\n contextId,\n\n }) {\n\n Hex.verify(fromAccountId)\n\n Hex.verify(toAccountId)\n\n\n\n // TODO @sadroeck: Make multi-transfer aware\n\n const step = new Transaction.TransferStep()\n\n step.setFromAccountId(Hex.toUint8Array(fromAccountId))\n\n step.setToAccountId(Hex.toUint8Array(toAccountId))\n\n step.setAmount(amount)\n\n if (memo != null) {\n\n step.addMetadata(memoMetadata(memo))\n\n }\n\n if (contract != null) {\n\n contextId = contractId(contract)\n\n step.addMetadata(contractMetadata(contract))\n\n }\n\n\n\n const transfer = new Transaction.CreateTransfer()\n\n transfer.setTransferStepsList([step])\n\n const request = createTransferFrom(transfer)\n\n const envelop = await signedEnvelopFrom({ request, signer, contextId })\n\n const res = await this.txClient.createTransaction(envelop)\n\n return res.getTxId()\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 17, "score": 81273.3113118457 }, { "content": " async getTransfer({ id = required('id'), signer }) {\n\n Hex.verify(id)\n\n\n\n const request = new Transaction.GetTransferRequest()\n\n request.setTxId(id)\n\n const envelop = await signedEnvelopFrom({ request, signer })\n\n const tx = await this.queryClient.getTransfer(envelop)\n\n return parseTransfer(tx)\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 18, "score": 81268.15651586287 }, { "content": " async observeTransfers({\n\n involvesAccounts = required('involvesAccounts'),\n\n startingFrom,\n\n signer,\n\n }, options = {}) {\n\n const request = new observe.Transfers()\n\n request.setInvolvedAccountsList(involvesAccounts.map(Hex.toUint8Array))\n\n if (startingFrom != null) {\n\n const txId = new TxId()\n\n txId.setTxId(startingFrom)\n\n request.setStartingFrom(txId)\n\n }\n\n const envelop = await signedEnvelopFrom({ request, signer })\n\n const stream = await this.queryClient.observeTransfers(envelop, options)\n\n\n\n stream.on('error', err => {\n\n // Ignore connection CANCELED (1) events\n\n if (err.code !== 1) {\n\n console.error(err)\n\n }\n\n })\n\n const resultStream = stream.pipe(transferResultStream())\n\n resultStream.on('close', () => {\n\n setImmediate(() => stream.cancel())\n\n stream.destroy()\n\n })\n\n return resultStream\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 19, "score": 81268.15651586287 }, { "content": " async listTransfers({\n\n accountId,\n\n contextId,\n\n minTxId = 0,\n\n limit = 10,\n\n includeChildAccounts = false,\n\n signer,\n\n }) {\n\n const request = new Transaction.ListTransferRequest()\n\n\n\n const filterCount = (contextId ? 1 : 0) + (accountId ? 1 : 0)\n\n if (filterCount !== 1) {\n\n throw new Error('Invalid filter')\n\n }\n\n\n\n if (accountId != null) {\n\n Hex.verify(accountId)\n\n request.setAccountId(Hex.toUint8Array(accountId))\n\n }\n\n if (contextId != null) {\n\n Hex.verify(contextId)\n\n request.setContextId(Hex.toUint8Array(contextId))\n\n }\n\n\n\n request.setMinTxId(minTxId)\n\n request.setLimit(limit)\n\n request.setIncludeChildAccounts(includeChildAccounts)\n\n\n\n const envelop = await signedEnvelopFrom({ request, signer })\n\n const transfers = await this.queryClient.listTransfers(envelop)\n\n return transfers.getTransfersList().map(parseTransfer)\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 20, "score": 81268.15651586287 }, { "content": " async _enhanceTransfer({ transfer = required('transfer'), signer }) {\n\n const from = await this.getAccountInfo({ id: transfer.fromAccountId, signer })\n\n const fromBank = from.parentId\n\n ? await this.getAccountInfo({ id: from.parentId, signer })\n\n : null\n\n const to = await this.getAccountInfo({ id: transfer.toAccountId, signer })\n\n const toBank = to.parentId ? await this.getAccountInfo({ id: to.parentId, signer }) : null\n\n\n\n return {\n\n to,\n\n from,\n\n fromBank,\n\n toBank\n\n }\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 21, "score": 81268.15651586287 }, { "content": "CREATE FUNCTION public.deposit(p_txn_id uuid, p_from_account bigint, p_to_account bigint, p_amount bigint, p_reference text, p_routing jsonb)\n\nRETURNS uuid\n\nLANGUAGE\n\nplpgsql\n\nAS\n\n$$\n\nDECLARE\n\n source_account record;\n\n target_account record;\n\n debit_txn bank_transaction_data;\n\n credit_txn bank_transaction_data;\n\nBEGIN\n\n --- check source account conditions\n\n SELECT *\n\n INTO source_account\n\n FROM public.bank_accounts\n\n WHERE id = p_from_account;\n\n\n\n IF NOT found THEN\n\n raise 'Source account with id % not found', p_from_account;\n", "file_path": "examples/bank-emulator/migrations/20220314114211_bank_emu.sql", "rank": 22, "score": 81039.82185251762 }, { "content": " async createAccount({\n\n id,\n\n name,\n\n publicName,\n\n profileImageUrl,\n\n owner,\n\n contextId,\n\n signer,\n\n }) {\n\n if (id) {\n\n Hex.verify(id)\n\n } else {\n\n id = Uuid.newId()\n\n id = Hex.fromUint8Array(id)\n\n }\n\n\n\n const account = new model.Account()\n\n account.setId(Hex.toUint8Array(id))\n\n\n\n if (name != null) {\n\n account.setName(name)\n\n }\n\n if (publicName != null) {\n\n account.setPublicName(publicName)\n\n }\n\n if (profileImageUrl != null) {\n\n account.setProfileImageUrl(profileImageUrl)\n\n }\n\n if (owner != null) {\n\n if (owner instanceof Uint8Array) {\n\n account.setOwner(owner)\n\n } else {\n\n Base64.verify(owner)\n\n account.setOwner(Base64.toUint8Array(owner))\n\n }\n\n } else {\n\n account.setOwner(await signer.getPublicKey())\n\n }\n\n\n\n const request = createTransactionFrom(account)\n\n const envelop = await signedEnvelopFrom({ request, signer, contextId })\n\n await this.txClient.createTransaction(envelop)\n\n\n\n // NOTE: account ids are in hex rather than uuid format\n\n return id\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 23, "score": 80162.08211642348 }, { "content": " async getAccount({ id = required('id'), signer }) {\n\n Hex.verify(id)\n\n\n\n const request = new query.GetAccountRequest()\n\n request.setId(Hex.toUint8Array(id))\n\n const envelop = await signedEnvelopFrom({ request, signer })\n\n const account = await this.queryClient.getAccount(envelop)\n\n\n\n return {\n\n id: id,\n\n owner: Base64.fromUint8Array(account.getOwner()),\n\n name: account.getName(),\n\n publicName: account.getPublicName(),\n\n profileImageUrl: account.getProfileImageUrl(),\n\n }\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 24, "score": 80157.35194090019 }, { "content": " async updateAccount({\n\n id = required('id'),\n\n name,\n\n publicName,\n\n profileImageUrl,\n\n signer,\n\n contextId,\n\n owner,\n\n }) {\n\n Hex.verify(id)\n\n\n\n const builder = new DocumentUpdateBuilder(new model.Account())\n\n builder.setId(Hex.toUint8Array(id))\n\n\n\n if (name != null) {\n\n builder.setName(name)\n\n builder.addMask('name')\n\n }\n\n if (publicName != null) {\n\n builder.setPublicName(publicName)\n\n builder.addMask('public_name')\n\n }\n\n if (profileImageUrl != null) {\n\n builder.setProfileImageUrl(profileImageUrl)\n\n builder.addMask('profile_image_url')\n\n }\n\n if (owner != null) {\n\n builder.setOwner(owner)\n\n builder.addMask('owner')\n\n }\n\n\n\n const request = updateTransactionFrom(builder)\n\n const envelop = await signedEnvelopFrom({ request, signer, contextId })\n\n const response = await this.txClient.createTransaction(envelop)\n\n if (response.hasError()) {\n\n throw response.getError()\n\n }\n\n return response.getTxId()\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 25, "score": 80157.35194090019 }, { "content": " async listEnhancedTransfers({\n\n accountId,\n\n contextId,\n\n minTxId = 0,\n\n limit = 10,\n\n includeChildAccounts = false,\n\n signer,\n\n }) {\n\n const transfers = await this.listTransfers({ accountId, contextId, minTxId, limit, includeChildAccounts, signer })\n\n return await Promise.all(transfers.map(async transfer => {\n\n const enhanced = await this._enhanceTransfer({ transfer, signer })\n\n return transferToObject({ transfer, ...enhanced })\n\n }))\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 26, "score": 79831.31400732875 }, { "content": " async getEnhancedTransfer({ id = required('id'), signer }) {\n\n const transfer = await this.getTransfer({ id, signer })\n\n const enhanced = await this._enhanceTransfer({ transfer, signer })\n\n return transferToObject({ transfer, ...enhanced })\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 27, "score": 79831.31400732875 }, { "content": " async getLedgerAccount({ accountId = required('accountId'), signer }) {\n\n Hex.verify(accountId)\n\n\n\n const request = new Transaction.GetAccountRequest()\n\n request.setId(Hex.toUint8Array(accountId))\n\n\n\n const envelop = await signedEnvelopFrom({ request, signer })\n\n const account = await this.queryClient.getIndexedAccount(envelop)\n\n const ledgerAccount = {\n\n id: Hex.fromUint8Array(account.getId()),\n\n balance: account.getBalance(),\n\n frozen: account.getFrozen(),\n\n }\n\n if (account.hasIssuance()) {\n\n const issuance = account.getIssuance()\n\n ledgerAccount.issuance = {\n\n issuedBalance: issuance.getIssuedBalance(),\n\n leafChildren: issuance.getLeafChildren(),\n\n nonLeafChildren: issuance.getNonLeafChildren(),\n\n }\n\n }\n\n return ledgerAccount\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 28, "score": 78744.87892666325 }, { "content": " async findAccountByOwner({ owner = required('owner'), signer }) {\n\n Base64.verify(owner)\n\n\n\n const request = new query.ListAccountsRequest()\n\n request.setOwner(Base64.toUint8Array(owner))\n\n const envelop = await signedEnvelopFrom({ request, signer })\n\n const res = (await this.queryClient.listAccounts(envelop))\n\n .getAccountsList()\n\n .map(account => {\n\n return {\n\n id: Hex.fromUint8Array(account.getId()),\n\n owner: Base64.fromUint8Array(account.getOwner()),\n\n name: account.getName(),\n\n publicName: account.getPublicName(),\n\n profileImageUrl: account.getProfileImageUrl(),\n\n }\n\n })\n\n\n\n return res\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 29, "score": 78740.14875113996 }, { "content": " async createLedgerAccount({\n\n parentId = required('parentId'),\n\n name,\n\n publicName,\n\n profileImageUrl,\n\n owner,\n\n contextId,\n\n signer,\n\n }) {\n\n Hex.verify(parentId)\n\n\n\n const txn = new Transaction.CreateLedgerAccount()\n\n txn.setParentId(Hex.toUint8Array(parentId))\n\n\n\n const request = createLedgerAccountRequestFrom(txn)\n\n const envelop = await signedEnvelopFrom({ request, signer, contextId })\n\n const response = await this.txClient.createTransaction(envelop)\n\n const hexId = Hex.fromUint8Array(response.getAccountCreated_asU8())\n\n if (owner) {\n\n await this.createAccount({\n\n id: hexId, name, publicName, profileImageUrl, owner, contextId, signer\n\n })\n\n }\n\n return hexId\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 30, "score": 78740.14875113996 }, { "content": " async getAccountInfo({ id = required('id'), signer }) {\n\n Hex.verify(id)\n\n\n\n const request = new query.GetAccountRequest()\n\n request.setId(Hex.toUint8Array(id))\n\n const envelop = await signedEnvelopFrom({ request, signer })\n\n const info = await this.queryClient.getAccountInfo(envelop)\n\n\n\n return {\n\n id: id,\n\n parentId: info.getParentAccountId() != null ? Hex.fromUint8Array(info.getParentAccountId()) : null,\n\n publicName: info.getPublicName(),\n\n profileImageUrl: info.getProfileImageUrl(),\n\n }\n", "file_path": "node/m10-sdk/lib/api/index.js", "rank": 31, "score": 78740.14875113996 }, { "content": " transfer({ ledgerId, fromAccountId, toAccountId, amount, memo = '' }) {\n\n const step = new Transaction.TransferStep()\n\n step.setFromAccountId(Hex.toUint8Array(fromAccountId))\n\n step.setToAccountId(Hex.toUint8Array(toAccountId))\n\n step.setAmount(amount)\n\n step.addMetadata(memoMetadata(memo))\n\n\n\n const transfer = new Transaction.CreateTransfer()\n\n transfer.setTransferStepsList([step])\n\n\n\n const transferRequest = new Transaction.CreateLedgerTransfer()\n\n transferRequest.setLedgerId(ledgerId)\n\n transferRequest.setNonce(\n\n Math.floor(Math.random() * Number.MAX_SAFE_INTEGER)\n\n )\n\n transferRequest.setTransfer(transfer)\n\n\n\n this.transfers.push(transferRequest)\n\n return this\n", "file_path": "node/m10-sdk/lib/api/contract/builder.js", "rank": 32, "score": 77099.94720120727 }, { "content": "CREATE FUNCTION public.new_account(p_idx integer, p_name text, p_currency text)\n\nRETURNS bigint\n\nLANGUAGE\n\nplpgsql\n\nAS\n\n$$\n\nDECLARE\n\n v_account_type record;\n\n v_account record;\n\n v_account_number integer;\n\nBEGIN\n\n SELECT *\n\n INTO v_account_type\n\n FROM public.bank_account_ranges\n\n WHERE idx = p_idx;\n\n\n\n IF NOT found THEN\n\n raise 'No such account index';\n\n END IF;\n\n\n", "file_path": "examples/bank-emulator/migrations/20220314114211_bank_emu.sql", "rank": 33, "score": 76277.43794086232 }, { "content": "fn main() {\n\n tauri_build::build()\n\n}\n", "file_path": "examples/cbdc-admin/src-tauri/build.rs", "rank": 34, "score": 74462.81882114767 }, { "content": "CREATE FUNCTION public.open_account(p_idx integer, p_name text, p_currency text, p_balance bigint)\n\nRETURNS bigint\n\nLANGUAGE\n\nplpgsql\n\nAS\n\n$$\n\nDECLARE\n\n account_id bigint;\n\nBEGIN\n\n account_id := public.new_account(p_idx, p_name, p_currency);\n\n UPDATE public.bank_accounts\n\n SET account_status = 'open',\n\n balance = p_balance\n\n WHERE id = account_id;\n\n\n\n IF NOT found THEN\n\n raise 'No such account index';\n\n END IF;\n\n\n\n return account_id; \n\nEND;\n\n$$;\n\n\n", "file_path": "examples/bank-emulator/migrations/20220314114211_bank_emu.sql", "rank": 35, "score": 73758.37796108902 }, { "content": "CREATE FUNCTION public.open_test_account(p_idx integer, p_name text, p_currency text, p_balance bigint)\n\nRETURNS bigint\n\nLANGUAGE\n\nplpgsql\n\nAS\n\n$$\n\nDECLARE\n\n account_id bigint;\n\n v_account record;\n\n v_bank_reference jsonb;\n\nBEGIN\n\n account_id := public.new_account(p_idx, p_name, p_currency);\n\n UPDATE public.bank_accounts\n\n SET account_status = 'open',\n\n balance = p_balance\n\n WHERE id = account_id\n\n RETURNING * INTO v_account;\n\n\n\n IF NOT found THEN\n\n raise 'No such account index';\n", "file_path": "examples/bank-emulator/migrations/20220316102110_iron_bank_test.sql", "rank": 36, "score": 71400.38258061011 }, { "content": "fn protobuf_dir() -> String {\n\n std::env::var(\"M10_PROTOBUFS\").unwrap_or_else(|_| \"../../protobuf\".to_string())\n\n}\n\n\n", "file_path": "rust/protos/build.rs", "rank": 37, "score": 70562.81369407881 }, { "content": "pub fn validate_token(\n\n jwks_r: &watch::Receiver<Jwks>,\n\n validation_options: &ValidationOptions,\n\n token: &str,\n\n) -> Result<Jwt, Error> {\n\n let jwt: Jwt = JWT::new_encoded(token).decode_with_jwks(&jwks_r.borrow(), None)?;\n\n jwt.validate(validation_options.clone()).map_err(|err| {\n\n error!(\"{:?}\", err);\n\n Error::unauthorized()\n\n })?;\n\n Ok(jwt)\n\n}\n\n\n\npub async fn watch_jwks(mut url: Url, jwks_s: watch::Sender<Jwks>) {\n\n url.set_path(\"/.well-known/jwks.json\");\n\n loop {\n\n if let Ok(jwks) = fetch_jwks(url.clone()).await {\n\n if jwks_s.send(jwks).is_err() {\n\n return;\n\n }\n", "file_path": "examples/bank-emulator/src/auth.rs", "rank": 38, "score": 68719.31153763083 }, { "content": "CREATE FUNCTION public.deposit_with_contact_method(p_txn_id uuid, p_from_account bigint, p_contact bigint, p_amount bigint, p_reference text, p_routing jsonb)\n\nRETURNS uuid\n\nLANGUAGE\n\nplpgsql\n\nAS\n\n$$\n\nDECLARE\n\n contact record;\n\n source_account record;\n\n target_account record;\n\n debit_txn bank_transaction_data;\n\n credit_txn bank_transaction_data;\n\nBEGIN\n\n -- fetch contact\n\n SELECT *\n\n INTO contact\n\n FROM public.bank_contact\n\n WHERE id = p_from_account;\n\n\n\n IF NOT found THEN\n", "file_path": "examples/bank-emulator/migrations/20220314114211_bank_emu.sql", "rank": 39, "score": 67106.19695508394 }, { "content": "#[cfg(test)]\n\nfn ledger_addr() -> String {\n\n std::env::var(\"LEDGER_ADDR\").unwrap_or_else(|_| \"https://develop.m10.net\".to_string())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {}\n", "file_path": "examples/bank-emulator/integration-tests/src/lib.rs", "rank": 40, "score": 67016.10570945796 }, { "content": "pub fn scope() -> Scope {\n\n actix_web::web::scope(\"assets\")\n\n .service(list_assets)\n\n .service(get_asset)\n\n .service(list_payments)\n\n .service(get_payment)\n\n .service(create_notification_preferences)\n\n .service(list_notification_preferences)\n\n}\n", "file_path": "examples/bank-emulator/src/controllers/assets.rs", "rank": 41, "score": 64403.782197165696 }, { "content": "pub fn scope() -> Scope {\n\n actix_web::web::scope(\"keys\").service(add)\n\n}\n", "file_path": "examples/bank-emulator/src/controllers/keys.rs", "rank": 42, "score": 64403.782197165696 }, { "content": "pub fn scope() -> Scope {\n\n actix_web::web::scope(\"documents\")\n\n .service(list)\n\n .service(upload)\n\n .service(get)\n\n .service(update)\n\n .service(verify)\n\n}\n", "file_path": "examples/bank-emulator/src/controllers/documents.rs", "rank": 43, "score": 64403.782197165696 }, { "content": "pub fn empty_jwks() -> Jwks {\n\n Jwks {\n\n keys: Vec::default(),\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct PrivateClaims {\n\n permissions: Vec<String>,\n\n scope: String,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct User {\n\n pub auth0_id: String,\n\n pub permissions: Vec<Permission>,\n\n pub token: String,\n\n}\n\n\n\nimpl User {\n", "file_path": "examples/bank-emulator/src/auth.rs", "rank": 44, "score": 64403.782197165696 }, { "content": "pub fn scope() -> Scope {\n\n actix_web::web::scope(\"contacts\")\n\n .service(create)\n\n .service(list)\n\n .service(update_own)\n\n .service(delete_own)\n\n .service(get)\n\n .service(update)\n\n .service(delete)\n\n .service(get_account)\n\n .service(list_assets)\n\n .service(get_asset)\n\n .service(freeze_asset)\n\n .service(list_payments)\n\n .service(get_payment)\n\n .service(unfreeze_asset)\n\n .service(list_notification_preferences)\n\n .service(list_documents)\n\n .service(upload_documents)\n\n .service(get_documents)\n", "file_path": "examples/bank-emulator/src/controllers/contacts.rs", "rank": 45, "score": 64403.782197165696 }, { "content": "pub fn scope() -> Scope {\n\n actix_web::web::scope(\"notification_preferences\")\n\n .service(list)\n\n .service(update)\n\n}\n", "file_path": "examples/bank-emulator/src/controllers/notification_preferences.rs", "rank": 46, "score": 63568.69594244873 }, { "content": "pub fn scope() -> Scope {\n\n actix_web::web::scope(\"public_key\").service(get)\n\n}\n", "file_path": "examples/bank-emulator/src/controllers/public_key.rs", "rank": 47, "score": 63568.69594244873 }, { "content": "#[async_trait::async_trait]\n\npub trait Signer: Send + Sync {\n\n /// Signs the passed message, and returns the signature.\n\n async fn sign(&self, msg: &[u8]) -> Result<Vec<u8>, SigningError>;\n\n\n\n /// Returns the public key associated with the signer\n\n fn public_key(&self) -> &[u8];\n\n\n\n /// Returns the signing algorithm used by the signer\n\n fn algorithm(&self) -> sdk::signature::Algorithm;\n\n\n\n /// Signs the payload, and returns a signature structure containing the algorithm, public-key, and signature\n\n async fn sign_payload(&self, payload: &[u8]) -> Result<sdk::Signature, SigningError> {\n\n Ok(sdk::Signature {\n\n algorithm: self.algorithm().into(),\n\n public_key: self.public_key().into(),\n\n signature: self.sign(payload).await?,\n\n })\n\n }\n\n\n\n /// Signs a [`Message`] and returns a [`SignedRequest`]\n", "file_path": "rust/signing/src/lib.rs", "rank": 48, "score": 63003.49442398848 }, { "content": "SELECT public.open_test_account(500000000, 'default-test-account', 'USD', 100000);\n\n\n", "file_path": "examples/bank-emulator/migrations/20220316102110_iron_bank_test.sql", "rank": 49, "score": 62649.948342507836 }, { "content": "// Aux functions\n\nfn proto_path(path: &str) -> PathBuf {\n\n Path::new(&protobuf_dir()).join(path)\n\n}\n\n\n", "file_path": "rust/protos/build.rs", "rank": 50, "score": 61823.665565518124 }, { "content": "pub fn init(config: &Config) {\n\n oneline_eyre::install().expect(\"Could not install logging\");\n\n let fmt_layer = tracing_subscriber::fmt::layer()\n\n .with_ansi(cfg!(debug_assertions))\n\n .with_target(false);\n\n let filter_layer = EnvFilter::new(&config.log_filter);\n\n tracing_subscriber::registry()\n\n .with(filter_layer)\n\n .with(fmt_layer)\n\n .init();\n\n}\n", "file_path": "examples/bank-emulator/src/logging.rs", "rank": 51, "score": 61823.665565518124 }, { "content": "pub fn memo(memo: &str) -> Any {\n\n Memo {\n\n plaintext: memo.to_string(),\n\n }\n\n .any()\n\n}\n\n\n", "file_path": "rust/protos/src/metadata.rs", "rank": 52, "score": 60688.242834026896 }, { "content": "class ClientStreamRequest {\n\n constructor(client, originalFunction, options = {}) {\n\n if (options == null) options = {}\n\n this.promise = new Promise((resolve, reject) => {\n\n // Deadline is advisable to be set\n\n // It should be a timestamp value in milliseconds\n\n let deadline\n\n if (options.timeout !== undefined) {\n\n deadline = Date.now() + options.timeout\n\n }\n\n this.stream = originalFunction.call(client, options.metadata, { deadline: deadline },\n\n function(error, response) {\n\n if (error) {\n\n reject(error)\n\n } else {\n\n resolve(response)\n\n }\n\n }\n\n )\n\n })\n\n }\n\n\n\n sendMessage(content = {}) {\n\n this.stream.write(content)\n\n return this\n\n }\n\n\n\n end() {\n\n this.stream.end()\n\n return this.promise\n\n }\n\n }\n\n\n\n const makeClientStreamRequest = function(client, originalFunction, options) {\n\n return function() {\n\n return new ClientStreamRequest(client, originalFunction, options)\n\n }\n\n }\n\n\n\n module.exports = makeClientStreamRequest\n", "file_path": "node/m10-sdk/lib/utils/grpc/client-stream-request.js", "rank": 53, "score": 60267.246680419965 }, { "content": "fn password(username: &str) -> String {\n\n // deterministic based on username, complies with Auth0 password policy\n\n let username_hash = digest::digest(&digest::SHA256, username.as_bytes());\n\n format!(\"{}-aA0\", &hex::encode(username_hash)[..10])\n\n}\n\n\n\nasync fn signup(email: &str) {\n\n let request = [\n\n (\"given_name\", \"integration tester\".to_string()),\n\n (\"family_name\", \"tester\".to_string()),\n\n (\"phone\", \"+15555555555\".to_string()),\n\n (\"tenant\", \"m10\".to_string()),\n\n (\"m10UserId\", uuid::Uuid::new_v4().to_string()),\n\n (\"email\", email.to_string()),\n\n (\"password\", password(email)),\n\n (\"connection\", \"Username-Password-Authentication\".to_string()),\n\n ]\n\n .into_iter()\n\n .collect::<HashMap<&'static str, String>>();\n\n let client = reqwest::Client::new();\n\n client\n\n .post(format!(\"{}/oauth/signup\", ledger_addr()))\n\n .json(&request)\n\n .send()\n\n .await\n\n .unwrap()\n\n .assert_success()\n\n .await;\n\n}\n\n\n", "file_path": "examples/bank-emulator/integration-tests/src/utils.rs", "rank": 54, "score": 60184.6761225277 }, { "content": "pub fn configure(config: &mut ServiceConfig) {\n\n config.service(\n\n actix_web::web::scope(\"/api/v1\")\n\n .service(accounts::scope())\n\n .service(assets::scope())\n\n .service(contacts::scope())\n\n .service(documents::scope())\n\n .service(fees::scope())\n\n .service(keys::scope())\n\n .service(notification_preferences::scope())\n\n .service(public_key::scope())\n\n .service(transfer_methods::scope()),\n\n );\n\n}\n", "file_path": "examples/bank-emulator/src/controllers/mod.rs", "rank": 55, "score": 57406.69909185488 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let out_dir = PathBuf::from(var(\"OUT_DIR\").expect(\"OUT_DIR environment variable not set\"));\n\n println!(\"cargo:rerun-if-changed={}\", protobuf_dir());\n\n let rerun_protobuf_dir = PathBuf::from(protobuf_dir());\n\n let mut rerun_dirs = vec![Cow::from(rerun_protobuf_dir)];\n\n while let Some(dir) = rerun_dirs.pop() {\n\n for entry_res in fs::read_dir(dir)? {\n\n let entry = entry_res?;\n\n let entry_path = entry.path();\n\n println!(\"cargo:rerun-if-changed={}\", entry_path.display());\n\n if entry.metadata()?.is_dir() {\n\n rerun_dirs.push(Cow::from(entry_path));\n\n }\n\n }\n\n }\n\n\n\n // m10.model.pb\n\n prost_build::Config::new()\n\n .file_descriptor_set_path(out_dir.join(\"m10.model.pb\"))\n\n .compile_protos(\n", "file_path": "rust/protos/build.rs", "rank": 56, "score": 56075.8023952786 }, { "content": " async getAccount() {\n\n try {\n\n this.setState({ isLoadingAccount: true, accountLoadError: null })\n\n const res = await getAccountById(this.props.id)\n\n const account = res?.data\n\n this.setState({ account, isLoadingAccount: false })\n\n } catch (e) {\n\n this.setState({ accountLoadError: e, isLoadingAccount: false })\n\n }\n", "file_path": "examples/bank-webapp/pages/admin/accounts/[id].js", "rank": 57, "score": 55403.227080953926 }, { "content": "fn ledger_id_to_instrument(ledger_id: &str) -> Result<&str, Error> {\n\n ledger_id\n\n .split('.')\n\n .next()\n\n .ok_or_else(|| Error::internal_msg(\"invalid ledger id\"))\n\n}\n", "file_path": "examples/bank-emulator/src/utils.rs", "rank": 58, "score": 54339.17018047848 }, { "content": "fn delete_operation<D>(id: PrettyId) -> sdk::Operation\n\nwhere\n\n D: Message + Pack + Default + 'static,\n\n{\n\n let id: Bytes = id.into();\n\n sdk::Operation::delete::<D>(id.to_vec())\n\n}\n", "file_path": "examples/m10_cli/src/commands/delete/mod.rs", "rank": 59, "score": 53644.03420546849 }, { "content": "/// Extension trait for [`AccountId`]\n\npub trait AccountIdExt {\n\n fn involves_account(&self, id: AccountId) -> bool;\n\n}\n\n\n\nimpl AccountIdExt for [u8] {\n\n #[inline]\n\n fn involves_account(&self, id: AccountId) -> bool {\n\n AccountId::try_from_be_slice(self)\n\n .map(|x| x.is_eq_or_descendant_of(id))\n\n .unwrap_or(false)\n\n }\n\n}\n\n\n\nimpl AccountIdExt for sdk::FinalizedTransfer {\n\n #[inline]\n\n fn involves_account(&self, id: AccountId) -> bool {\n\n self.transfer_steps.iter().any(|step| {\n\n step.from_account_id.involves_account(id) || step.to_account_id.involves_account(id)\n\n })\n\n }\n", "file_path": "rust/sdk/src/account/id/ext.rs", "rank": 60, "score": 52462.127608443865 }, { "content": "fn user_from_request(req: &actix_web::HttpRequest) -> Result<User, Error> {\n\n let jwks = req\n\n .app_data::<Data<watch::Receiver<Jwks>>>()\n\n .expect(\"missing JWKs receiver\");\n\n let validation_options = req\n\n .app_data::<Data<ValidationOptions>>()\n\n .expect(\"missing JWT validation options\");\n\n\n\n let token = req\n\n .headers()\n\n .get(\"Authorization\")\n\n .and_then(|header| header.to_str().ok())\n\n .and_then(|header| header.split_once(\"Bearer \").map(|x| x.1))\n\n .ok_or_else(Error::unauthorized)?;\n\n let jwt = validate_token(jwks, validation_options, token)?;\n\n let mut user = User::try_from(jwt)?;\n\n user.token = token.to_string();\n\n Ok(user)\n\n}\n\n\n", "file_path": "examples/bank-emulator/src/auth.rs", "rank": 61, "score": 52335.612319888714 }, { "content": "fn print_item<I>(item: I, format: Format) -> anyhow::Result<()>\n\nwhere\n\n I: Serialize,\n\n{\n\n match format {\n\n Format::Json => {\n\n let stdout = io::stdout();\n\n let handle = stdout.lock();\n\n let writer = LineWriter::new(handle);\n\n serde_json::to_writer_pretty(writer, &item)?;\n\n }\n\n Format::Yaml => {\n\n let stdout = io::stdout();\n\n let handle = stdout.lock();\n\n let writer = LineWriter::new(handle);\n\n serde_yaml::to_writer(writer, &item)?;\n\n }\n\n Format::Raw => {\n\n let pretty = PrettyConfig::new()\n\n .with_depth_limit(4)\n\n .with_separate_tuple_members(true);\n\n let s = to_string_pretty(&item, pretty)?;\n\n println!(\"{}\", s);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/m10_cli/src/commands/get/mod.rs", "rank": 62, "score": 51941.312551195035 }, { "content": "export const getAccounts = async query => (\n\n await axios.get(routes.ACCOUNTS_API, {\n\n params: { ...query },\n\n headers: {\n\n Authorization: `${Cookies.get('access_token') ? `Bearer ${Cookies.get('access_token')}` : ''}`\n\n }\n\n })\n", "file_path": "examples/bank-webapp/lib/api/accounts.js", "rank": 63, "score": 51838.67097966861 }, { "content": "export const getAccountById = async id => (\n\n await axios.get(`${routes.ACCOUNTS_API}/${id}`, {\n\n headers: {\n\n Authorization: `${Cookies.get('access_token') ? `Bearer ${Cookies.get('access_token')}` : ''}`\n\n }\n\n })\n", "file_path": "examples/bank-webapp/lib/api/accounts.js", "rank": 64, "score": 51234.21185221622 }, { "content": "const AccountsPage = ({ windowWidth }) => {\n\n const { accountId } = useParams()\n\n const [account, setAccount] = useState([])\n\n\n\n useEffect(() => {\n\n async function getAccount() {\n\n try {\n\n setAccount(await invoke('get_account', { id: accountId }))\n\n } catch (e) {\n\n toast.error(e.toString())\n\n }\n\n }\n\n getAccount()\n\n }, [accountId])\n\n\n\n return (\n\n <Page\n\n withGlobalNav\n\n windowWidth={windowWidth}\n\n includeBackButton\n\n >\n\n <AccountInfoCard account={account} accountId={accountId} />\n\n </Page>\n\n )\n", "file_path": "examples/cbdc-admin/src/pages/accounts-page.js", "rank": 65, "score": 51234.21185221622 }, { "content": "class AccountPage extends Component {\n\n state = {\n\n // accounts\n\n isLoadingAccount: false,\n\n accountLoadError: null,\n\n account: {},\n\n\n\n // payments\n\n payments: [],\n\n isLoadingPayments: false,\n\n hasLoadedPayments: false,\n\n paymentsLoadError: null,\n\n }\n\n\n\n async getAccount() {\n\n try {\n\n this.setState({ isLoadingAccount: true, accountLoadError: null })\n\n const res = await getAccountById(this.props.id)\n\n const account = res?.data\n\n this.setState({ account, isLoadingAccount: false })\n\n } catch (e) {\n\n this.setState({ accountLoadError: e, isLoadingAccount: false })\n\n }\n\n }\n\n\n\n getPayments = async() => {\n\n this.setState({ isLoadingPayments: true, paymentsLoadError: null })\n\n try {\n\n const paymentsRes = await getPaymentsByAccountIdAndAsset(this.props.id, DEFAULT_ASSET)\n\n const payments = parsePaymentsApi(paymentsRes?.data?.data || [])\n\n this.setState({ payments, isLoadingPayments: false, hasLoadedPayments: true })\n\n } catch (e) {\n\n this.setState({ paymentsLoadError: e, isLoadingPayments: false })\n\n }\n\n }\n\n\n\n async componentDidMount() {\n\n await this.getAccount()\n\n await this.getPayments()\n\n this.timerId = setInterval(() => this.getPayments(), 3000)\n\n }\n\n\n\n componentWillUnmount() {\n\n clearInterval(this.timerId)\n\n }\n\n\n\n render() {\n\n const { windowWidth, id } = this.props\n\n const {\n\n isLoadingAccount,\n\n accountLoadError,\n\n account,\n\n isLoadingPayments,\n\n hasLoadedPayments,\n\n payments,\n\n } = this.state\n\n return (\n\n <Page\n\n withSidebar\n\n withGlobalNav\n\n loadError={accountLoadError}\n\n isLoading={isLoadingAccount}\n\n windowWidth={windowWidth}\n\n >\n\n <AccountCard account={account} />\n\n <TablePayments\n\n payments={payments}\n\n isLoading={!hasLoadedPayments && isLoadingPayments}\n\n noPagination\n\n headerTheme={TABLE_HEADER_THEME_CARD}\n\n windowWidth={windowWidth}\n\n accountId={id}\n\n />\n\n </Page>\n\n )\n\n }\n", "file_path": "examples/bank-webapp/pages/admin/accounts/[id].js", "rank": 66, "score": 51234.21185221622 }, { "content": "class ClientStreamRequest {\n\n constructor(client, originalFunction, options = {}) {\n\n if (options == null) options = {}\n\n this.promise = new Promise((resolve, reject) => {\n\n // Deadline is advisable to be set\n\n // It should be a timestamp value in milliseconds\n\n let deadline\n\n if (options.timeout !== undefined) {\n\n deadline = Date.now() + options.timeout\n\n }\n\n this.stream = originalFunction.call(client, options.metadata, { deadline: deadline },\n\n function(error, response) {\n\n if (error) {\n\n reject(error)\n\n } else {\n\n resolve(response)\n\n }\n\n }\n\n )\n\n })\n\n }\n\n\n\n sendMessage(content = {}) {\n\n this.stream.write(content)\n\n return this\n\n }\n\n\n\n end() {\n\n this.stream.end()\n\n return this.promise\n\n }\n", "file_path": "node/m10-sdk/lib/utils/grpc/client-stream-request.js", "rank": 67, "score": 50847.61241373966 }, { "content": " render() {\n\n const { windowWidth, id } = this.props\n\n const {\n\n isLoadingAccount,\n\n accountLoadError,\n\n account,\n\n isLoadingPayments,\n\n hasLoadedPayments,\n\n payments,\n\n } = this.state\n\n return (\n\n <Page\n\n withSidebar\n\n withGlobalNav\n\n loadError={accountLoadError}\n\n isLoading={isLoadingAccount}\n\n windowWidth={windowWidth}\n\n >\n\n <AccountCard account={account} />\n\n <TablePayments\n\n payments={payments}\n\n isLoading={!hasLoadedPayments && isLoadingPayments}\n\n noPagination\n\n headerTheme={TABLE_HEADER_THEME_CARD}\n\n windowWidth={windowWidth}\n\n accountId={id}\n\n />\n\n </Page>\n\n )\n", "file_path": "examples/bank-webapp/pages/admin/accounts/[id].js", "rank": 68, "score": 50643.68670757585 }, { "content": "fn print_doc(item: impl Serialize, format: Format) -> anyhow::Result<()> {\n\n match format {\n\n Format::Json => {\n\n let stdout = io::stdout();\n\n let handle = stdout.lock();\n\n let writer = LineWriter::new(handle);\n\n serde_json::to_writer_pretty(writer, &item)?;\n\n }\n\n Format::Yaml => {\n\n let stdout = io::stdout();\n\n let handle = stdout.lock();\n\n let writer = LineWriter::new(handle);\n\n serde_yaml::to_writer(writer, &item)?;\n\n }\n\n Format::Raw => {\n\n let pretty = PrettyConfig::new()\n\n .with_depth_limit(4)\n\n .with_separate_tuple_members(true);\n\n let s = to_string_pretty(&item, pretty)?;\n\n println!(\"{}\", s);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/m10_cli/src/commands/observe/mod.rs", "rank": 69, "score": 50574.78148680127 }, { "content": " const makeClientStreamRequest = function(client, originalFunction, options) {\n\n return function() {\n\n return new ClientStreamRequest(client, originalFunction, options)\n\n }\n", "file_path": "node/m10-sdk/lib/utils/grpc/client-stream-request.js", "rank": 70, "score": 50281.130993264764 }, { "content": " end() {\n\n this.stream.end()\n\n return this.promise\n", "file_path": "node/m10-sdk/lib/utils/grpc/client-stream-request.js", "rank": 71, "score": 50281.130993264764 }, { "content": " constructor(client, originalFunction, options = {}) {\n\n if (options == null) options = {}\n\n this.promise = new Promise((resolve, reject) => {\n\n // Deadline is advisable to be set\n\n // It should be a timestamp value in milliseconds\n\n let deadline\n\n if (options.timeout !== undefined) {\n\n deadline = Date.now() + options.timeout\n\n }\n\n this.stream = originalFunction.call(client, options.metadata, { deadline: deadline },\n\n function(error, response) {\n\n if (error) {\n\n reject(error)\n\n } else {\n\n resolve(response)\n\n }\n\n }\n\n )\n\n })\n", "file_path": "node/m10-sdk/lib/utils/grpc/client-stream-request.js", "rank": 72, "score": 50281.130993264764 }, { "content": " async componentDidMount() {\n\n await this.getAccount()\n\n await this.getPayments()\n\n this.timerId = setInterval(() => this.getPayments(), 3000)\n", "file_path": "examples/bank-webapp/pages/admin/accounts/[id].js", "rank": 73, "score": 50066.619226848896 }, { "content": "fn print_document<D, I>(document: D, format: Format) -> anyhow::Result<()>\n\nwhere\n\n D: Message + Pack + Default,\n\n I: TryFrom<D, Error = anyhow::Error> + Serialize,\n\n{\n\n let printable = I::try_from(document)?;\n\n print_item(printable, format)?;\n\n Ok(())\n\n}\n", "file_path": "examples/m10_cli/src/commands/get/mod.rs", "rank": 74, "score": 49653.60077788579 }, { "content": " componentWillUnmount() {\n\n clearInterval(this.timerId)\n", "file_path": "examples/bank-webapp/pages/admin/accounts/[id].js", "rank": 75, "score": 49502.55455650963 }, { "content": " &self,\n\n transfer_step: &sdk::TransferStep,\n\n signer: &impl Signer,\n\n ) -> Result<EnhancedTransferStep, Status> {\n\n let from = async {\n\n Result::<_, Status>::Ok(\n\n self.query_client\n\n .clone()\n\n .get_account_info(Request::new(\n\n signer\n\n .sign_request(sdk::GetAccountRequest {\n\n id: transfer_step.from_account_id.clone(),\n\n })\n\n .await\n\n .map_err(|err| Status::internal(err.to_string()))?\n\n .into(),\n\n ))\n\n .await\n\n .map(|res| res.into_inner())\n\n .ok(),\n", "file_path": "rust/sdk/src/client.rs", "rank": 76, "score": 48487.10362095211 }, { "content": " self.query_client\n\n .list_roles(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n // Observations\n\n pub async fn observe_transfers(\n\n &self,\n\n request: SignedRequest<sdk::ObserveAccountsRequest>,\n\n ) -> Result<impl Stream<Item = Result<sdk::FinalizedTransactions, Status>>, Status> {\n\n self.query_client\n\n .clone()\n\n .observe_transfers(Request::new(request.into()))\n\n .await\n\n .map(tonic::Response::into_inner)\n\n }\n\n\n\n pub async fn observe_resources(\n\n &self,\n", "file_path": "rust/sdk/src/client.rs", "rank": 77, "score": 48486.92780043173 }, { "content": " )\n\n };\n\n let to = async {\n\n Result::<_, Status>::Ok(\n\n self.query_client\n\n .clone()\n\n .get_account_info(Request::new(\n\n signer\n\n .sign_request(sdk::GetAccountRequest {\n\n id: transfer_step.to_account_id.clone(),\n\n })\n\n .await\n\n .map_err(|err| Status::internal(err.to_string()))?\n\n .into(),\n\n ))\n\n .await\n\n .map(|res| res.into_inner())\n\n .ok(),\n\n )\n\n };\n", "file_path": "rust/sdk/src/client.rs", "rank": 78, "score": 48486.554705206785 }, { "content": " pub async fn observe_actions(\n\n &self,\n\n request: SignedRequest<sdk::ObserveActionsRequest>,\n\n ) -> Result<impl Stream<Item = Result<sdk::FinalizedTransactions, Status>>, Status> {\n\n self.query_client\n\n .clone()\n\n .observe_actions(Request::new(request.into()))\n\n .await\n\n .map(tonic::Response::into_inner)\n\n }\n\n\n\n pub async fn enhance_transfers(\n\n &self,\n\n transfers: Vec<sdk::FinalizedTransfer>,\n\n signer: &impl Signer,\n\n ) -> Result<Vec<EnhancedTransfer>, Status> {\n\n futures_util::future::try_join_all(\n\n transfers\n\n .into_iter()\n\n .map(|transfer| self.enhance_transfer(transfer, signer)),\n", "file_path": "rust/sdk/src/client.rs", "rank": 79, "score": 48486.51260718342 }, { "content": " )\n\n .await\n\n }\n\n\n\n pub async fn enhance_transfer(\n\n &self,\n\n transfer: sdk::FinalizedTransfer,\n\n signer: &impl Signer,\n\n ) -> Result<EnhancedTransfer, Status> {\n\n let mut enhanced_steps = Vec::default();\n\n for transfer_step in &transfer.transfer_steps {\n\n enhanced_steps.push(self.enhance_transfer_step(transfer_step, signer).await?);\n\n }\n\n Ok(EnhancedTransfer {\n\n enhanced_steps,\n\n transfer,\n\n })\n\n }\n\n\n\n async fn enhance_transfer_step(\n", "file_path": "rust/sdk/src/client.rs", "rank": 80, "score": 48485.053157256465 }, { "content": "use futures_core::Stream;\n\nuse m10_protos::sdk::AccountInfo;\n\nuse m10_protos::sdk::{\n\n self, m10_query_service_client::M10QueryServiceClient,\n\n m10_tx_service_client::M10TxServiceClient, transaction_data::Data, TransactionData,\n\n};\n\nuse m10_signing::SignedRequest;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\nuse tonic::{Request, Response};\n\n\n\nuse crate::transfer_ext::EnhancedTransfer;\n\nuse crate::{EnhancedTransferStep, Signer};\n\npub use tonic::transport::{Channel, ClientTlsConfig, Endpoint, Uri};\n\n\n\n// Re-export public error\n\npub use tonic::Status;\n\n\n\n#[derive(Clone)]\n\n/// A client for the M10 Ledger.\n\n///\n", "file_path": "rust/sdk/src/client.rs", "rank": 81, "score": 48484.632734394254 }, { "content": "\n\n // Indexed Accounts\n\n pub async fn get_indexed_account(\n\n &mut self,\n\n request: SignedRequest<sdk::GetAccountRequest>,\n\n ) -> Result<sdk::IndexedAccount, Status> {\n\n self.query_client\n\n .get_indexed_account(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n // AccountSets\n\n pub async fn get_account_set(\n\n &mut self,\n\n request: SignedRequest<sdk::GetAccountSetRequest>,\n\n ) -> Result<sdk::AccountSet, Status> {\n\n self.query_client\n\n .get_account_set(Request::new(request.into()))\n\n .await\n", "file_path": "rust/sdk/src/client.rs", "rank": 82, "score": 48484.38924466121 }, { "content": " pub async fn list_transfers(\n\n &mut self,\n\n request: SignedRequest<sdk::ListTransferRequest>,\n\n ) -> Result<sdk::FinalizedTransfers, Status> {\n\n self.query_client\n\n .list_transfers(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n // Actions\n\n pub async fn get_action(\n\n &mut self,\n\n request: SignedRequest<sdk::GetActionRequest>,\n\n ) -> Result<sdk::Action, Status> {\n\n self.query_client\n\n .get_action(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n", "file_path": "rust/sdk/src/client.rs", "rank": 83, "score": 48484.37406966164 }, { "content": " .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n pub async fn get_account_info(\n\n &mut self,\n\n request: SignedRequest<sdk::GetAccountRequest>,\n\n ) -> Result<sdk::AccountInfo, Status> {\n\n self.query_client\n\n .get_account_info(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n pub async fn list_accounts(\n\n &mut self,\n\n request: SignedRequest<sdk::ListAccountsRequest>,\n\n ) -> Result<sdk::ListAccountsResponse, Status> {\n\n self.query_client\n\n .list_accounts(Request::new(request.into()))\n", "file_path": "rust/sdk/src/client.rs", "rank": 84, "score": 48484.37055870405 }, { "content": " &mut self,\n\n payload: SignedRequest<sdk::TransactionRequestPayload>,\n\n ) -> Result<sdk::TransactionResponse, Status> {\n\n self.tx_client\n\n .create_transaction(Request::new(payload.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n // Transfers\n\n pub async fn get_transfer(\n\n &mut self,\n\n request: SignedRequest<sdk::GetTransferRequest>,\n\n ) -> Result<sdk::FinalizedTransfer, Status> {\n\n self.query_client\n\n .get_transfer(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n", "file_path": "rust/sdk/src/client.rs", "rank": 85, "score": 48483.72997321583 }, { "content": " .map(|res| res.into_inner())\n\n }\n\n\n\n pub async fn list_account_sets(\n\n &mut self,\n\n request: SignedRequest<sdk::ListAccountSetsRequest>,\n\n ) -> Result<sdk::ListAccountSetsResponse, Status> {\n\n self.query_client\n\n .list_account_sets(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n // Accounts\n\n pub async fn get_account(\n\n &mut self,\n\n request: SignedRequest<sdk::GetAccountRequest>,\n\n ) -> Result<sdk::Account, Status> {\n\n self.query_client\n\n .get_account(Request::new(request.into()))\n", "file_path": "rust/sdk/src/client.rs", "rank": 86, "score": 48483.27275504104 }, { "content": " let (from, to) = futures_util::future::try_join(from, to).await?;\n\n let from_bank = async {\n\n if let Some(ref from) = from {\n\n if from.parent_account_id.is_empty() {\n\n Ok(None)\n\n } else {\n\n Result::<Option<AccountInfo>, Status>::Ok(\n\n self.query_client\n\n .clone()\n\n .get_account_info(Request::new(\n\n signer\n\n .sign_request(sdk::GetAccountRequest {\n\n id: from.parent_account_id.clone(),\n\n })\n\n .await\n\n .map_err(|err| Status::internal(err.to_string()))?\n\n .into(),\n\n ))\n\n .await\n\n .map(|res| res.into_inner())\n", "file_path": "rust/sdk/src/client.rs", "rank": 87, "score": 48482.857919770555 }, { "content": "/// This client allows you to query and transact on the M10 ledger.\n\n///\n\n/// # Example\n\n/// ```no_run\n\n/// #[tokio::main(flavor = \"current_thread\")]\n\n/// async fn main() {\n\n/// let ledger_url = \"https://test.m10.net\".to_string();\n\n/// let mut client = m10_sdk::LedgerClient::new(\n\n/// tonic::transport::Endpoint::from_shared(ledger_url)\n\n/// .unwrap()\n\n/// .connect_lazy()\n\n/// .unwrap()\n\n/// );\n\n///\n\n/// let block_height = client.block_height().await;\n\n/// }\n\n/// ```\n\npub struct LedgerClient {\n\n tx_client: M10TxServiceClient<Channel>,\n\n query_client: M10QueryServiceClient<Channel>,\n", "file_path": "rust/sdk/src/client.rs", "rank": 88, "score": 48482.64214300173 }, { "content": " request: SignedRequest<sdk::ObserveResourcesRequest>,\n\n ) -> Result<impl Stream<Item = Result<sdk::FinalizedTransactions, Status>>, Status> {\n\n self.query_client\n\n .clone()\n\n .observe_resources(Request::new(request.into()))\n\n .await\n\n .map(tonic::Response::into_inner)\n\n }\n\n\n\n pub async fn observe_accounts(\n\n &self,\n\n request: SignedRequest<sdk::ObserveAccountsRequest>,\n\n ) -> Result<impl Stream<Item = Result<sdk::FinalizedTransactions, Status>>, Status> {\n\n self.query_client\n\n .clone()\n\n .observe_accounts(Request::new(request.into()))\n\n .await\n\n .map(tonic::Response::into_inner)\n\n }\n\n\n", "file_path": "rust/sdk/src/client.rs", "rank": 89, "score": 48482.01281781407 }, { "content": "\n\n pub async fn list_actions(\n\n &mut self,\n\n request: SignedRequest<sdk::ListActionsRequest>,\n\n ) -> Result<sdk::Actions, Status> {\n\n self.query_client\n\n .list_actions(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n pub async fn get_transaction(\n\n &mut self,\n\n request: SignedRequest<sdk::GetTransactionRequest>,\n\n ) -> Result<sdk::FinalizedTransaction, Status> {\n\n self.query_client\n\n .get_transaction(Request::new(request.into()))\n\n .await\n\n .map(Response::into_inner)\n\n }\n", "file_path": "rust/sdk/src/client.rs", "rank": 90, "score": 48479.30290995398 }, { "content": "\n\n pub async fn list_transactions(\n\n &mut self,\n\n request: SignedRequest<sdk::ListTransactionsRequest>,\n\n ) -> Result<sdk::FinalizedTransactions, Status> {\n\n self.query_client\n\n .list_transactions(Request::new(request.into()))\n\n .await\n\n .map(Response::into_inner)\n\n }\n\n\n\n pub async fn group_transactions(\n\n &mut self,\n\n request: SignedRequest<sdk::GroupTransactionsRequest>,\n\n ) -> Result<sdk::GroupedFinalizedTransactions, Status> {\n\n self.query_client\n\n .group_transactions(Request::new(request.into()))\n\n .await\n\n .map(Response::into_inner)\n\n }\n", "file_path": "rust/sdk/src/client.rs", "rank": 91, "score": 48479.24104864712 }, { "content": " .ok(),\n\n )\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n };\n\n let to_bank = async {\n\n if let Some(ref to) = to {\n\n if to.parent_account_id.is_empty() {\n\n Ok(None)\n\n } else {\n\n Result::<Option<AccountInfo>, Status>::Ok(\n\n self.query_client\n\n .clone()\n\n .get_account_info(Request::new(\n\n signer\n\n .sign_request(sdk::GetAccountRequest {\n\n id: to.parent_account_id.clone(),\n\n })\n", "file_path": "rust/sdk/src/client.rs", "rank": 92, "score": 48479.13675738223 }, { "content": " .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n // Role Bindings\n\n pub async fn get_role_binding(\n\n &mut self,\n\n request: SignedRequest<sdk::GetRoleBindingRequest>,\n\n ) -> Result<sdk::RoleBinding, Status> {\n\n self.query_client\n\n .get_role_binding(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n pub async fn list_role_bindings(\n\n &mut self,\n\n request: SignedRequest<sdk::ListRoleBindingsRequest>,\n\n ) -> Result<sdk::ListRoleBindingsResponse, Status> {\n\n self.query_client\n", "file_path": "rust/sdk/src/client.rs", "rank": 93, "score": 48478.94073275106 }, { "content": "}\n\n\n\nimpl LedgerClient {\n\n pub fn new(grpc_channel: Channel) -> Self {\n\n let tx_client = M10TxServiceClient::new(grpc_channel.clone());\n\n let query_client = M10QueryServiceClient::new(grpc_channel);\n\n Self {\n\n tx_client,\n\n query_client,\n\n }\n\n }\n\n\n\n pub fn transaction_request(\n\n data: impl Into<Data>,\n\n context_id: Vec<u8>,\n\n ) -> sdk::TransactionRequestPayload {\n\n let timestamp = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap()\n\n .as_micros() as u64;\n", "file_path": "rust/sdk/src/client.rs", "rank": 94, "score": 48478.68292324143 }, { "content": " .list_role_bindings(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n // Roles\n\n pub async fn get_role(\n\n &mut self,\n\n request: SignedRequest<sdk::GetRoleRequest>,\n\n ) -> Result<sdk::Role, Status> {\n\n self.query_client\n\n .get_role(Request::new(request.into()))\n\n .await\n\n .map(|res| res.into_inner())\n\n }\n\n\n\n pub async fn list_roles(\n\n &mut self,\n\n request: SignedRequest<sdk::ListRolesRequest>,\n\n ) -> Result<sdk::ListRolesResponse, Status> {\n", "file_path": "rust/sdk/src/client.rs", "rank": 95, "score": 48478.0263613097 }, { "content": " sdk::TransactionRequestPayload {\n\n nonce: fastrand::u64(..),\n\n timestamp,\n\n context_id,\n\n data: Some(TransactionData {\n\n data: Some(data.into()),\n\n }),\n\n }\n\n }\n\n\n\n pub async fn block_height(&mut self) -> Result<u64, tonic::Status> {\n\n let chain_info = self\n\n .query_client\n\n .get_chain_info(Request::new(()))\n\n .await?\n\n .into_inner();\n\n Ok(chain_info.block_height)\n\n }\n\n\n\n pub async fn create_transaction(\n", "file_path": "rust/sdk/src/client.rs", "rank": 96, "score": 48477.88598712991 }, { "content": " .await\n\n .map_err(|err| Status::internal(err.to_string()))?\n\n .into(),\n\n ))\n\n .await\n\n .map(|res| res.into_inner())\n\n .ok(),\n\n )\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n };\n\n let (from_bank, to_bank) = futures_util::future::try_join(from_bank, to_bank).await?;\n\n\n\n Ok(EnhancedTransferStep {\n\n from,\n\n to,\n\n from_bank,\n\n to_bank,\n\n })\n\n }\n\n}\n", "file_path": "rust/sdk/src/client.rs", "rank": 97, "score": 48475.76810148945 }, { "content": "use m10_protos::sdk;\n\n\n\npub struct EnhancedTransfer {\n\n pub transfer: sdk::FinalizedTransfer,\n\n pub enhanced_steps: Vec<EnhancedTransferStep>,\n\n}\n\n\n\npub struct EnhancedTransferStep {\n\n pub from: Option<sdk::AccountInfo>,\n\n pub from_bank: Option<sdk::AccountInfo>,\n\n pub to: Option<sdk::AccountInfo>,\n\n pub to_bank: Option<sdk::AccountInfo>,\n\n}\n", "file_path": "rust/sdk/src/transfer_ext.rs", "rank": 98, "score": 47507.54974858041 }, { "content": "fn parse_key_vals(s: &str) -> Result<HashMap<String, String>, serde_json::Error> {\n\n serde_json::from_str(s)\n\n}\n\n\n\nimpl super::BuildFromOptions for CreateRoleBindingOptions {\n\n type Document = sdk::RoleBinding;\n\n fn build_from_options(&self, default_owner: Vec<u8>) -> Result<Self::Document, anyhow::Error> {\n\n let id = self.id.unwrap_or_else(Uuid::new_v4).as_bytes().to_vec();\n\n let owner = self\n\n .owner\n\n .as_ref()\n\n .map_or::<Result<Vec<u8>, _>, _>(Ok(default_owner), base64::decode)?;\n\n let subjects = self\n\n .subjects\n\n .iter()\n\n .map(base64::decode)\n\n .collect::<Result<Vec<Vec<u8>>, _>>()?\n\n .into_iter()\n\n .map(bytes::Bytes::from)\n\n .collect();\n", "file_path": "examples/m10_cli/src/commands/create/role_bindings.rs", "rank": 99, "score": 47182.65997222344 } ]
Rust
src/bin/mpc.rs
ys-nuem/rust-gurobi-example
a94bf00d1452a4753c0dc0fd6ea83d67fb0d735f
#![allow(non_snake_case)] #![allow(dead_code)] extern crate gurobi; #[macro_use] extern crate itertools; use gurobi::*; use itertools::*; struct MPCModel(gurobi::Model); impl std::ops::Deref for MPCModel { type Target = gurobi::Model; fn deref(&self) -> &gurobi::Model { &self.0 } } impl std::ops::DerefMut for MPCModel { fn deref_mut(&mut self) -> &mut gurobi::Model { &mut self.0 } } impl MPCModel { fn new(modelname: &str, env: &Env) -> Result<MPCModel> { Model::new(modelname, &env).map(|model| MPCModel(model)) } fn add_var_series(&mut self, name: &str, len: usize, start: isize) -> Result<Vec<Var>> { let mut vars = Vec::with_capacity(len); for i in start..((len as isize) - start) { let v = try!(self.add_var(&format!("{}_{}", name, i), Continuous, 0.0, -INFINITY, INFINITY, &[], &[])); vars.push(v); } Ok(vars) } } fn main() { let mut env = Env::new("receding_horizon.log").unwrap(); env.set(param::OutputFlag, 0).unwrap(); let env = env; struct Solution { status: Status, u: Vec<f64>, x: Vec<f64>, } let solve_mpc = |x_t: f64, t: usize| -> Result<Solution> { let horizon = 10; let q = 100.0; let r = 0.42; let s = 0.01; let mut model = try!(MPCModel::new(&format!("mpc_{}", t), &env)); let u = try!(model.add_var_series("u", horizon, 0)); let x = try!(model.add_var_series("x", horizon + 2, -1)); try!(model.update()); for u in u.iter() { try!(u.set(&mut model, attr::LB, -1.0)); try!(u.set(&mut model, attr::UB, 1.0)); } try!(model.add_constr("initial", 1.0 * &x[0], Equal, x_t)); for (k, (u_k, x_k, x_k1)) in Zip::new((u.iter(), x.iter(), x.iter().skip(1))).enumerate() { try!(model.add_constr(&format!("ss_{}", k), x_k1 + (-0.9 * x_k) + (-1.0 * u_k), Equal, 0.0)); } let expr = Zip::new((x.iter().skip(1), u.iter())).fold(QuadExpr::new(), |expr, (x, u)| { expr + (x * x) * q + (u * u) * r }) + try!(x.last().map(|x_T| (x_T * x_T) * s).ok_or(Error::InconsitentDims)); try!(model.set_objective(expr, Minimize)); try!(model.optimize()); match try!(model.status()) { Status::Optimal => (), status => { return Ok(Solution { status: status, u: vec![], x: vec![], }) } } let mut sol_u = Vec::with_capacity(u.len()); for u in u.into_iter() { let u = try!(u.get(&model, attr::X)); sol_u.push(u); } let mut sol_x = Vec::with_capacity(x.len()); for x in x.into_iter() { let x = try!(x.get(&model, attr::X)); sol_x.push(x); } Ok(Solution { status: Status::Optimal, u: sol_u, x: sol_x, }) }; let n_times = 100; let n_rt = 10; let x_0 = 1.0; let mut state = Vec::with_capacity(n_times * n_rt); let mut input = Vec::with_capacity(n_times * n_rt); state.push(x_0); for t in 0..(n_times * n_rt) { let x_t = state.last().cloned().unwrap(); if t % n_rt == 0 { let sol = solve_mpc(x_t, t).unwrap(); let u = match sol.status { Status::Optimal => *sol.u.get(0).unwrap(), _ => { println!("step {}: cannot retrieve an optimal MIP solution", t); 0.0 } }; input.push(u); } let u_t = input.last().cloned().unwrap(); let x_t = 0.99 * x_t + u_t + 0.01; state.push(x_t); } println!("input = {:?}", input); }
#![allow(non_snake_case)] #![allow(dead_code)] extern crate gurobi; #[macro_use] extern crate itertools; use gurobi::*; use itertools::*; struct MPCModel(gurobi::Model); impl std::ops::Deref for MPCModel { type Target = gurobi::Model; fn deref(&self) -> &gurobi::Model { &self.0 } } impl std::ops::DerefMut for MPCModel { fn deref_mut(&mut self) -> &mut gurobi::Model { &mut self.0 } } impl MPCModel { fn new(modelname: &str, env: &Env) -> Result<MPCModel> { Model::new(modelname, &env).map(|model| MPCModel(model)) } fn add_var_series(&mut self, name: &str, len: usize, start: isize) -> Result<Vec<Var>> { let mut vars = Vec::with_capacity(len); for i in start..((len as isize) - start) { let v = try!(self.add_var(&format!("{}_{}", name, i), Continuous, 0.0, -INFINITY, INFINITY, &[], &[])); vars.push(v); } Ok(vars) } } fn main() { let mut env = Env::new("receding_horizon.log").unwrap(); env.set(param::OutputFlag, 0).unwrap(); let env = env; struct Solution { status: Status, u: Vec<f64>, x: Vec<f64>, } let solve_mpc = |x_t: f64, t: usize| -> Result<Solution> { let horizon = 10; let q = 100.0; let r = 0.42; let s = 0.01; let mut model = try!(MPCModel::new(&format!("mpc_{}", t), &env)); let u = try!(model.add_var_series("u", horizon, 0)); let x = try!(model.add_var_series("x", horizon + 2, -1)); try!(model.update()); for u in u.iter() { try!(u.set(&mut model, attr::LB, -1.0)); try!(u.set(&mut model, attr::UB, 1.0)); } try!(model.add_constr("initial", 1.0 * &x[0], Equal, x_t)); for (k, (u_k, x_k, x_k1)) in Zip::new((u.iter(), x.iter(), x.iter().skip(1))).enumerate() { try!(model.add_constr(&format!("ss_{}", k), x_k1 + (-0.9 * x_k) + (-1.0 * u_k), Equal, 0.0)); } let expr = Zip::new((x.iter().skip(1), u.iter())).fold(QuadExpr::new(), |expr, (x, u)| { expr + (x * x) * q + (u * u) * r }) + try!(x.last().map(|x_T| (x_T * x_T) * s).ok_or(Error::InconsitentDims)); try!(model.set_objective(expr, Minimize)); try!(model.optimize()); match try!(model.status()) { Status::Optimal => (), status => { return Ok(Solution { status: status, u: vec![], x: vec![], }) } } let mut sol_u = Vec::with_capacity(u.len()); for u in u.into_iter() { let u = try!(u.get(&model, attr::X)); sol_u.push(u); } let mut sol_x = Vec::with_capacity(x.len()); for x in x.into_iter() { let x = try!(x.get(&model, attr::X)); sol_x.push(x); } Ok(Solution { status: Status::Optimal, u: sol_u, x: sol_x, }) }; let n_times = 100; let n_rt = 10; let x_0 = 1.0; let mut state = Vec::with_capacity(n_times * n_rt); let mut input = Vec::with_capacity(n_times * n_rt); state.push(x_0); for t in 0..(n_times * n_rt) { let x_t = state.last().cloned().unwrap(); if t % n_rt == 0 { let sol = solve_mpc(x_t, t).unwrap(); let u =
; input.push(u); } let u_t = input.last().cloned().unwrap(); let x_t = 0.99 * x_t + u_t + 0.01; state.push(x_t); } println!("input = {:?}", input); }
match sol.status { Status::Optimal => *sol.u.get(0).unwrap(), _ => { println!("step {}: cannot retrieve an optimal MIP solution", t); 0.0 } }
if_condition
[ { "content": "# rust-gurobi-example", "file_path": "README.md", "rank": 0, "score": 2911.5059799016285 } ]
Rust
nft_api/src/ingest/main.rs
jarry-xiao/merkle-wallet
079c4c7b51ee9f90ad98c13a660ff97727db7127
extern crate core; use hyper::{Body, Client, Request, Response, Server, StatusCode}; use futures_util::future::join3; use redis::streams::{StreamId, StreamKey, StreamReadOptions, StreamReadReply}; use redis::{Commands, Value}; use routerify::prelude::*; use routerify::{Middleware, Router, RouterService}; use anchor_client::anchor_lang::prelude::Pubkey; use routerify_json_response::{json_failed_resp_with_message, json_success_resp}; use std::{net::SocketAddr, thread}; use gummyroll::state::change_log::{ChangeLogEvent, PathNode}; use nft_api_lib::error::*; use nft_api_lib::events::handle_event; use sqlx; use sqlx::postgres::PgPoolOptions; use sqlx::{Pool, Postgres}; use tokio::task; #[derive(Default)] struct AppEvent { op: String, message: String, leaf: String, owner: String, tree_id: String, authority: String, } const SET_APPSQL: &str = r#"INSERT INTO app_specific (msg, leaf, owner, tree_id, revision) VALUES ($1,$2,$3,$4,$5) ON CONFLICT (msg) DO UPDATE SET leaf = excluded.leaf, owner = excluded.owner, tree_id = excluded.tree_id, revision = excluded.revision"#; const SET_OWNERSHIP_APPSQL: &str = r#"INSERT INTO app_specific_ownership (tree_id, authority) VALUES ($1,$2) ON CONFLICT (tree_id) DO UPDATE SET authority = excluded.authority"#; const GET_APPSQL: &str = "SELECT revision FROM app_specific WHERE msg = $1 AND tree_id = $2"; const DEL_APPSQL: &str = "DELETE FROM app_specific WHERE leaf = $1 AND tree_id = $2"; const SET_CLSQL_ITEM: &str = "INSERT INTO cl_items (tree, seq, level, hash, node_idx) VALUES ($1,$2,$3,$4,$5)"; #[derive(sqlx::FromRow, Clone, Debug)] struct AppSpecificRev { revision: i64, } pub async fn cl_service(ids: &Vec<StreamId>, pool: &Pool<Postgres>) -> String { let mut last_id = "".to_string(); for StreamId { id, map } in ids { println!("\tCL STREAM ID {}", id); let pid = id.replace("-", "").parse::<i64>().unwrap(); let data = map.get("data"); if data.is_none() { println!("\tNo Data"); continue; } if let Value::Data(bytes) = data.unwrap().to_owned() { let raw_str = String::from_utf8(bytes); if !raw_str.is_ok() { continue; } let change_log_res = raw_str .map_err(|_serr| ApiError::ChangeLogEventMalformed) .and_then(|o| { let d: Result<ChangeLogEvent, ApiError> = handle_event(o); d }); if change_log_res.is_err() { println!("\tBad Data"); continue; } let change_log = change_log_res.unwrap(); println!("\tCL tree {:?}", change_log.id); let txnb = pool.begin().await; match txnb { Ok(txn) => { let mut i: i64 = 0; for p in change_log.path.into_iter() { println!("level {}, node {:?}", i, p.node.inner); let tree_id = change_log.id.as_ref(); let f = sqlx::query(SET_CLSQL_ITEM) .bind(&tree_id) .bind(&pid + i) .bind(&i) .bind(&p.node.inner.as_ref()) .bind(&(p.index as i64)) .execute(pool) .await; if f.is_err() { println!("Error {:?}", f.err().unwrap()); } i += 1; } match txn.commit().await { Ok(_r) => { println!("Saved CL"); } Err(e) => { eprintln!("{}", e.to_string()) } } } Err(e) => { eprintln!("{}", e.to_string()) } } } last_id = id.clone(); } last_id } pub async fn structured_program_event_service( ids: &Vec<StreamId>, pool: &Pool<Postgres>, ) -> String { let mut last_id = "".to_string(); for StreamId { id, map } in ids { let mut app_event = AppEvent::default(); for (k, v) in map.to_owned() { if let Value::Data(bytes) = v { let raw_str = String::from_utf8(bytes); if raw_str.is_ok() { if k == "op" { app_event.op = raw_str.unwrap(); } else if k == "tree_id" { app_event.tree_id = raw_str.unwrap(); } else if k == "msg" { app_event.message = raw_str.unwrap(); } else if k == "leaf" { app_event.leaf = raw_str.unwrap(); } else if k == "owner" { app_event.owner = raw_str.unwrap(); } else if k == "authority" { app_event.authority = raw_str.unwrap(); } } } } let pid = id.replace("-", "").parse::<i64>().unwrap(); let new_owner = map.get("new_owner").and_then(|x| { if let Value::Data(bytes) = x.to_owned() { String::from_utf8(bytes).ok() } else { None } }); println!("Op: {:?}", app_event.op); println!("leaf: {:?}", &app_event.leaf); println!("owner: {:?}", &app_event.owner); println!("tree_id: {:?}", &app_event.tree_id); println!("new_owner: {:?}", new_owner); if app_event.op == "add" || app_event.op == "tran" || app_event.op == "create" { let row = sqlx::query_as::<_, AppSpecificRev>(GET_APPSQL) .bind(&un_jank_message(&app_event.message)) .bind(&bs58::decode(&app_event.tree_id).into_vec().unwrap()) .fetch_one(pool) .await; if row.is_ok() { let res = row.unwrap(); if pid < res.revision as i64 { continue; } } } if app_event.op == "add" { sqlx::query(SET_APPSQL) .bind(&un_jank_message(&app_event.message)) .bind(&bs58::decode(&app_event.leaf).into_vec().unwrap()) .bind(&bs58::decode(&app_event.owner).into_vec().unwrap()) .bind(&bs58::decode(&app_event.tree_id).into_vec().unwrap()) .bind(&pid) .execute(pool) .await .unwrap(); } else if app_event.op == "tran" { match new_owner { Some(x) => { sqlx::query(SET_APPSQL) .bind(&un_jank_message(&app_event.message)) .bind(&bs58::decode(&app_event.leaf).into_vec().unwrap()) .bind(&bs58::decode(&x).into_vec().unwrap()) .bind(&bs58::decode(&app_event.tree_id).into_vec().unwrap()) .bind(&pid) .execute(pool) .await .unwrap(); } None => { println!("Received Transfer op with no new_owner"); continue; } }; } else if app_event.op == "rm" { sqlx::query(DEL_APPSQL) .bind(&bs58::decode(&app_event.leaf).into_vec().unwrap()) .bind(&bs58::decode(&app_event.tree_id).into_vec().unwrap()) .execute(pool) .await .unwrap(); } else if app_event.op == "create" { sqlx::query(SET_OWNERSHIP_APPSQL) .bind(&bs58::decode(&app_event.tree_id).into_vec().unwrap()) .bind(&bs58::decode(&app_event.authority).into_vec().unwrap()) .bind(&pid) .execute(pool) .await .unwrap(); } last_id = id.clone(); } last_id } fn un_jank_message(hex_str: &String) -> String { String::from_utf8(hex::decode(hex_str).unwrap()).unwrap() } #[tokio::main] async fn main() { let client = redis::Client::open("redis://redis/").unwrap(); let pool = PgPoolOptions::new() .max_connections(5) .connect("postgres://solana:solana@db/solana") .await .unwrap(); let mut cl_last_id: String = ">".to_string(); let mut gm_last_id: String = ">".to_string(); let conn_res = client.get_connection(); let mut conn = conn_res.unwrap(); let streams = vec!["GM_CL", "GMC_OP"]; let group_name = "ingester"; for key in &streams { let created: Result<(), _> = conn.xgroup_create_mkstream(*key, group_name, "$"); if let Err(e) = created { println!("Group already exists: {:?}", e) } } loop { let opts = StreamReadOptions::default() .block(1000) .count(100000) .group(group_name, "lelelelle"); let srr: StreamReadReply = conn .xread_options(streams.as_slice(), &[&cl_last_id, &gm_last_id], &opts) .unwrap(); for StreamKey { key, ids } in srr.keys { println!("{}", key); if key == "GM_CL" { cl_service(&ids, &pool).await; } else if key == "GMC_OP" { structured_program_event_service(&ids, &pool).await; } } } }
extern crate core; use hyper::{Body, Client, Request, Response, Server, StatusCode}; use futures_util::future::join3; use redis::streams::{StreamId, StreamKey, StreamReadOptions, StreamReadReply}; use redis::{Commands, Value}; use routerify::prelude::*; use routerify::{Middleware, Router, RouterService}; use anchor_client::anchor_lang::prelude::Pubkey; use routerify_json_response::{json_failed_resp_with_message, json_success_resp}; use std::{net::SocketAddr, thread}; use gummyroll::state::change_log::{ChangeLogEvent, PathNode}; use nft_api_lib::error::*; use nft_api_lib::events::handle_event; use sqlx; use sqlx::postgres::PgPoolOptions; use sqlx::{Pool, Postgres}; use tokio::task; #[derive(Default)] struct AppEvent { op: String, message: String, leaf: String, owner: String, tree_id: String, authority: String, } const SET_APPSQL: &str = r#"INSERT INTO app_specific (msg, leaf, owner, tree_id, revision) VALUES ($1,$2,$3,$4,$5) ON CONFLICT (msg) DO UPDATE SET leaf = excluded.leaf, owner = excluded.owner, tree_id = excluded.tree_id, revision = excluded.revision"#; const SET_OWNERSHIP_APPSQL: &str = r#"INSERT INTO app_specific_ownership (tree_id, authority) VALUES ($1,$2) ON CONFLICT (tree_id) DO UPDATE SET authority = excluded.authority"#; const GET_APPSQL: &str = "SELECT revision FROM app_specific WHERE msg = $1 AND tree_id = $2"; const DEL_APPSQL: &str = "DELETE FROM app_specific WHERE leaf = $1 AND tree_id = $2"; const SET_CLSQL_ITEM: &str = "INSERT INTO cl_items (tree, seq, level, hash, node_idx) VALUES ($1,$2,$3,$4,$5)"; #[derive(sqlx::FromRow, Clone, Debug)] struct AppSpecificRev { revision: i64, } pub async fn cl_service(ids: &Vec<StreamId>, pool: &Pool<Postgres>) -> String { let mut last_id = "".to_string(); for StreamId { id, map } in ids { println!("\tCL STREAM ID {}", id); let pid = id.replace("-", "").parse::<i64>().unwrap(); let data = map.get("data"); if data.is_none() { println!("\tNo Data"); continue; } if let Value::Data(bytes) = data.unwrap().to_owned() { let raw_str = String::from_utf8(bytes); if !raw_str.is_ok() { continue; } let change_log_res = raw_str .map_err(|_serr| ApiError::ChangeLogEventMalformed) .and_then(|o| { let d: Result<ChangeLogEvent, ApiError> = handle_event(o); d }); if change_log_res.is_err() { println!("\tBad Data"); continue; } let change_log = change_log_res.unwrap(); println!("\tCL tree {:?}", change_log.id); let txnb = pool.begin().await; match txnb { Ok(txn) => { let mut i: i64 = 0; for p in change_log.path.into_iter() { println!("level {}, node {:?}", i, p.node.inner); let tree_id = change_log.id.as_ref(); let f = sqlx::query(SET_CLSQL_ITEM) .bind(&tree_id) .bind(&pid + i) .bind(&i) .bind(&p.node.inner.as_ref()) .bind(&(p.index as i64)) .execute(pool) .
let mut conn = conn_res.unwrap(); let streams = vec!["GM_CL", "GMC_OP"]; let group_name = "ingester"; for key in &streams { let created: Result<(), _> = conn.xgroup_create_mkstream(*key, group_name, "$"); if let Err(e) = created { println!("Group already exists: {:?}", e) } } loop { let opts = StreamReadOptions::default() .block(1000) .count(100000) .group(group_name, "lelelelle"); let srr: StreamReadReply = conn .xread_options(streams.as_slice(), &[&cl_last_id, &gm_last_id], &opts) .unwrap(); for StreamKey { key, ids } in srr.keys { println!("{}", key); if key == "GM_CL" { cl_service(&ids, &pool).await; } else if key == "GMC_OP" { structured_program_event_service(&ids, &pool).await; } } } }
await; if f.is_err() { println!("Error {:?}", f.err().unwrap()); } i += 1; } match txn.commit().await { Ok(_r) => { println!("Saved CL"); } Err(e) => { eprintln!("{}", e.to_string()) } } } Err(e) => { eprintln!("{}", e.to_string()) } } } last_id = id.clone(); } last_id } pub async fn structured_program_event_service( ids: &Vec<StreamId>, pool: &Pool<Postgres>, ) -> String { let mut last_id = "".to_string(); for StreamId { id, map } in ids { let mut app_event = AppEvent::default(); for (k, v) in map.to_owned() { if let Value::Data(bytes) = v { let raw_str = String::from_utf8(bytes); if raw_str.is_ok() { if k == "op" { app_event.op = raw_str.unwrap(); } else if k == "tree_id" { app_event.tree_id = raw_str.unwrap(); } else if k == "msg" { app_event.message = raw_str.unwrap(); } else if k == "leaf" { app_event.leaf = raw_str.unwrap(); } else if k == "owner" { app_event.owner = raw_str.unwrap(); } else if k == "authority" { app_event.authority = raw_str.unwrap(); } } } } let pid = id.replace("-", "").parse::<i64>().unwrap(); let new_owner = map.get("new_owner").and_then(|x| { if let Value::Data(bytes) = x.to_owned() { String::from_utf8(bytes).ok() } else { None } }); println!("Op: {:?}", app_event.op); println!("leaf: {:?}", &app_event.leaf); println!("owner: {:?}", &app_event.owner); println!("tree_id: {:?}", &app_event.tree_id); println!("new_owner: {:?}", new_owner); if app_event.op == "add" || app_event.op == "tran" || app_event.op == "create" { let row = sqlx::query_as::<_, AppSpecificRev>(GET_APPSQL) .bind(&un_jank_message(&app_event.message)) .bind(&bs58::decode(&app_event.tree_id).into_vec().unwrap()) .fetch_one(pool) .await; if row.is_ok() { let res = row.unwrap(); if pid < res.revision as i64 { continue; } } } if app_event.op == "add" { sqlx::query(SET_APPSQL) .bind(&un_jank_message(&app_event.message)) .bind(&bs58::decode(&app_event.leaf).into_vec().unwrap()) .bind(&bs58::decode(&app_event.owner).into_vec().unwrap()) .bind(&bs58::decode(&app_event.tree_id).into_vec().unwrap()) .bind(&pid) .execute(pool) .await .unwrap(); } else if app_event.op == "tran" { match new_owner { Some(x) => { sqlx::query(SET_APPSQL) .bind(&un_jank_message(&app_event.message)) .bind(&bs58::decode(&app_event.leaf).into_vec().unwrap()) .bind(&bs58::decode(&x).into_vec().unwrap()) .bind(&bs58::decode(&app_event.tree_id).into_vec().unwrap()) .bind(&pid) .execute(pool) .await .unwrap(); } None => { println!("Received Transfer op with no new_owner"); continue; } }; } else if app_event.op == "rm" { sqlx::query(DEL_APPSQL) .bind(&bs58::decode(&app_event.leaf).into_vec().unwrap()) .bind(&bs58::decode(&app_event.tree_id).into_vec().unwrap()) .execute(pool) .await .unwrap(); } else if app_event.op == "create" { sqlx::query(SET_OWNERSHIP_APPSQL) .bind(&bs58::decode(&app_event.tree_id).into_vec().unwrap()) .bind(&bs58::decode(&app_event.authority).into_vec().unwrap()) .bind(&pid) .execute(pool) .await .unwrap(); } last_id = id.clone(); } last_id } fn un_jank_message(hex_str: &String) -> String { String::from_utf8(hex::decode(hex_str).unwrap()).unwrap() } #[tokio::main] async fn main() { let client = redis::Client::open("redis://redis/").unwrap(); let pool = PgPoolOptions::new() .max_connections(5) .connect("postgres://solana:solana@db/solana") .await .unwrap(); let mut cl_last_id: String = ">".to_string(); let mut gm_last_id: String = ">".to_string(); let conn_res = client.get_connection();
random
[ { "content": "/// Recomputes root of the Merkle tree from Node & proof\n\npub fn recompute(mut leaf: Node, proof: &[Node], index: u32) -> Node {\n\n for (i, s) in proof.iter().enumerate() {\n\n if index >> i & 1 == 0 {\n\n let res = hashv(&[&leaf, s.as_ref()]);\n\n leaf.copy_from_slice(res.as_ref());\n\n } else {\n\n let res = hashv(&[s.as_ref(), &leaf]);\n\n leaf.copy_from_slice(res.as_ref());\n\n }\n\n }\n\n leaf\n\n}\n\n\n\n// Off-chain implentation to keep track of nodes\n\npub struct MerkleTree {\n\n pub leaf_nodes: Vec<Rc<RefCell<TreeNode>>>,\n\n pub root: Node,\n\n}\n\n\n\nimpl MerkleTree {\n", "file_path": "archive/merkle-accumulator/src/merkle.rs", "rank": 0, "score": 217939.8674728256 }, { "content": "/// Calculates hash of empty nodes up to level i\n\npub fn empty_node(level: u32) -> Node {\n\n let mut data = EMPTY;\n\n if level != 0 {\n\n let lower_empty = empty_node(level - 1);\n\n let hash = hashv(&[lower_empty.as_ref(), lower_empty.as_ref()]);\n\n data.copy_from_slice(hash.as_ref());\n\n }\n\n data\n\n}\n\n\n", "file_path": "programs/gummyroll/src/utils.rs", "rank": 1, "score": 198217.7887318426 }, { "content": "pub fn get_message_hash(owner: &AccountInfo, message: &Vec<u8>) -> keccak::Hash {\n\n keccak::hashv(&[&owner.key().to_bytes(), message.as_slice()])\n\n}\n", "file_path": "programs/gummyroll_crud/src/lib.rs", "rank": 2, "score": 197143.48847385764 }, { "content": "/// Calculates hash of empty nodes up to level i\n\n/// TODO: cache this\n\npub fn empty_node(level: u32) -> Node {\n\n let mut data = EMPTY;\n\n if level != 0 {\n\n let lower_empty = empty_node(level - 1);\n\n let hash = hashv(&[&lower_empty, &lower_empty]);\n\n data.copy_from_slice(hash.as_ref());\n\n }\n\n data\n\n}\n", "file_path": "archive/merkle-accumulator/src/merkle.rs", "rank": 3, "score": 194961.9502495146 }, { "content": "/// Recomputes root of the Merkle tree from Node & proof\n\npub fn recompute(leaf: Node, proof: &[Node], index: u32) -> Node {\n\n let mut current_node = leaf;\n\n for (depth, sibling_leaf) in proof.iter().enumerate() {\n\n if index >> depth & 1 == 0 {\n\n let res = hashv(&[current_node.as_ref(), sibling_leaf.as_ref()]);\n\n current_node.copy_from_slice(res.as_ref());\n\n } else {\n\n let res = hashv(&[sibling_leaf.as_ref(), current_node.as_ref()]);\n\n current_node.copy_from_slice(res.as_ref());\n\n }\n\n }\n\n\n\n current_node\n\n}\n\n\n", "file_path": "programs/gummyroll/src/utils.rs", "rank": 4, "score": 188376.4505381807 }, { "content": "fn node_idx_to_leaf_idx(index: i64, tree_height: u32) -> i64 {\n\n index - 2i64.pow(tree_height)\n\n}\n\n\n\n/// Takes in an index from leaf-space\n\nasync fn handle_get_asset(\n\n req: Request<Body>,\n\n) -> Result<Response<Body>, routerify_json_response::Error> {\n\n let db: &Pool<Postgres> = req.data::<Pool<Postgres>>().unwrap();\n\n let tree_id = decode_b58_param(req.param(\"tree_id\").unwrap()).unwrap();\n\n let leaf_idx = req.param(\"index\").unwrap().parse::<i64>().unwrap();\n\n\n\n let tree_height = get_height(db, &tree_id).await.unwrap();\n\n let node_idx = leaf_idx_to_node_idx(leaf_idx, tree_height);\n\n let result = get_asset(db, &tree_id, node_idx).await;\n\n if result.is_err() {\n\n return json_failed_resp_with_message(\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n result.err().unwrap().to_string(),\n\n );\n", "file_path": "nft_api/src/main.rs", "rank": 5, "score": 187574.9607162937 }, { "content": "fn leaf_idx_to_node_idx(index: i64, tree_height: u32) -> i64 {\n\n index + 2i64.pow(tree_height)\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 6, "score": 187574.9607162937 }, { "content": "pub fn fill_in_proof<const MAX_DEPTH: usize>(proof_vec: Vec<Node>, full_proof: &mut [Node; MAX_DEPTH]) {\n\n msg!(\"Attempting to fill in proof\");\n\n if proof_vec.len() > 0 {\n\n full_proof[..proof_vec.len()].copy_from_slice(&proof_vec);\n\n }\n\n\n\n for i in proof_vec.len()..MAX_DEPTH {\n\n full_proof[i] = empty_node(i as u32);\n\n }\n\n}\n\n\n", "file_path": "programs/gummyroll/src/utils.rs", "rank": 7, "score": 183479.37599803225 }, { "content": "select distinct on (c.node_idx) * from cl_items as c, node as n where tree = decode('6128F38A464BD1B1D60BD116CAE0E6A13A9913E0DA6F1FBA78EBCE4D85F999E5', 'hex') AND c.level > n.level order by c.node_idx, c.seq, c.level desc\n\n\n\n\n\n\n", "file_path": "archive/tree.sql", "rank": 8, "score": 176633.36201999683 }, { "content": "pub fn insert_or_append_leaf<'info>(\n\n seed: &Pubkey,\n\n bump: u8,\n\n gummyroll_program: &AccountInfo<'info>,\n\n authority: &AccountInfo<'info>,\n\n merkle_roll: &AccountInfo<'info>,\n\n remaining_accounts: &[AccountInfo<'info>],\n\n root_node: Node,\n\n leaf: Node,\n\n index: u32,\n\n) -> Result<()> {\n\n let seeds = &[seed.as_ref(), &[bump]];\n\n let authority_pda_signer = &[&seeds[..]];\n\n let cpi_ctx = CpiContext::new_with_signer(\n\n gummyroll_program.clone(),\n\n gummyroll::cpi::accounts::Modify {\n\n authority: authority.clone(),\n\n merkle_roll: merkle_roll.clone(),\n\n },\n\n authority_pda_signer,\n\n )\n\n .with_remaining_accounts(remaining_accounts.to_vec());\n\n gummyroll::cpi::insert_or_append(\n\n cpi_ctx,\n\n root_node,\n\n leaf,\n\n index,\n\n )\n\n}", "file_path": "programs/bubblegum/src/utils.rs", "rank": 10, "score": 167713.61528037788 }, { "content": "fn router(db: Pool<Postgres>) -> Router<Body, routerify_json_response::Error> {\n\n Router::builder()\n\n .middleware(Middleware::pre(logger))\n\n .middleware(Middleware::post(|mut res| async move {\n\n let headers = res.headers_mut();\n\n headers.insert(\n\n header::ACCESS_CONTROL_ALLOW_ORIGIN,\n\n HeaderValue::from_static(\"*\"),\n\n );\n\n headers.insert(\n\n header::ACCESS_CONTROL_ALLOW_METHODS,\n\n HeaderValue::from_static(\"*\"),\n\n );\n\n headers.insert(\n\n header::ACCESS_CONTROL_ALLOW_HEADERS,\n\n HeaderValue::from_static(\"*\"),\n\n );\n\n headers.insert(\n\n header::ACCESS_CONTROL_EXPOSE_HEADERS,\n\n HeaderValue::from_static(\"*\"),\n", "file_path": "nft_api/src/main.rs", "rank": 11, "score": 167619.78226158448 }, { "content": "#[inline(always)]\n\npub fn assert_with_msg(v: bool, err: ProgramError, msg: &str) -> ProgramResult {\n\n if !v {\n\n let caller = std::panic::Location::caller();\n\n msg!(\"{}. \\n{}\", msg, caller);\n\n Err(err)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\nconst EMPTY: [u8; 32] = [0; 32];\n\n#[account]\n\n#[derive(Default)]\n\npub struct MerkleWallet {\n\n root: [u8; 32],\n\n counter: u128,\n\n bump: u8,\n\n}\n\n\n\n#[program]\n", "file_path": "archive/merkle_wallet/src/lib.rs", "rank": 12, "score": 163134.36837158154 }, { "content": "fn make_empty_node(lvl: i64, node_index: i64) -> NodeDAO {\n\n NodeDAO {\n\n node_idx: node_index,\n\n level: lvl,\n\n hash: empty_node(lvl as u32).inner.to_vec(),\n\n seq: 0,\n\n }\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 13, "score": 147257.79364129668 }, { "content": "pub fn error_msg<T>(data_len: usize) -> impl Fn(PodCastError) -> ProgramError {\n\n move |_: PodCastError| -> ProgramError {\n\n msg!(\n\n \"Failed to load {}. Size is {}, expected {}\",\n\n type_name::<T>(),\n\n data_len,\n\n size_of::<T>(),\n\n );\n\n ProgramError::InvalidAccountData\n\n }\n\n}\n\n\n", "file_path": "programs/gummyroll/src/utils.rs", "rank": 14, "score": 142285.44729459804 }, { "content": "fn get_required_nodes_for_proof(index: i64) -> Vec<i64> {\n\n let mut indexes = vec![];\n\n let mut idx = index;\n\n while idx > 1 {\n\n if idx % 2 == 0 {\n\n indexes.push(idx + 1)\n\n } else {\n\n indexes.push(idx - 1)\n\n }\n\n idx >>= 1\n\n }\n\n println!(\"nodes {:?}\", indexes);\n\n return indexes;\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 15, "score": 139086.126929731 }, { "content": "pub fn append_leaf<'info>(\n\n seed: &Pubkey,\n\n bump: u8,\n\n gummyroll_program: &AccountInfo<'info>,\n\n authority: &AccountInfo<'info>,\n\n append_authority: &AccountInfo<'info>,\n\n merkle_roll: &AccountInfo<'info>,\n\n leaf_node: Node,\n\n) -> Result<()> {\n\n let seeds = &[seed.as_ref(), &[bump]];\n\n let authority_pda_signer = &[&seeds[..]];\n\n let cpi_ctx = CpiContext::new_with_signer(\n\n gummyroll_program.clone(),\n\n gummyroll::cpi::accounts::Append {\n\n authority: authority.clone(),\n\n append_authority: append_authority.clone(),\n\n merkle_roll: merkle_roll.clone(),\n\n },\n\n authority_pda_signer,\n\n );\n\n gummyroll::cpi::append(cpi_ctx, leaf_node)\n\n}\n\n\n", "file_path": "programs/bubblegum/src/utils.rs", "rank": 16, "score": 138245.28190440004 }, { "content": "pub fn replace_leaf<'info>(\n\n seed: &Pubkey,\n\n bump: u8,\n\n gummyroll_program: &AccountInfo<'info>,\n\n authority: &AccountInfo<'info>,\n\n merkle_roll: &AccountInfo<'info>,\n\n remaining_accounts: &[AccountInfo<'info>],\n\n root_node: Node,\n\n previous_leaf: Node,\n\n new_leaf: Node,\n\n index: u32,\n\n) -> Result<()> {\n\n let seeds = &[seed.as_ref(), &[bump]];\n\n let authority_pda_signer = &[&seeds[..]];\n\n let cpi_ctx = CpiContext::new_with_signer(\n\n gummyroll_program.clone(),\n\n gummyroll::cpi::accounts::Modify {\n\n authority: authority.clone(),\n\n merkle_roll: merkle_roll.clone(),\n\n },\n", "file_path": "programs/bubblegum/src/utils.rs", "rank": 17, "score": 138245.28190440004 }, { "content": "CREATE INDEX cl_items_uniq_operation_idx ON cl_items (tree, level, seq);\n", "file_path": "init.sql", "rank": 18, "score": 128918.61007989108 }, { "content": "#[derive(sqlx::FromRow)]\n\nstruct Level {\n\n pub level: i64,\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 19, "score": 126056.02903191096 }, { "content": "CREATE INDEX cl_items__tree_node ON cl_items (tree, node_idx);\n\n\n", "file_path": "init.sql", "rank": 20, "score": 109721.41170925749 }, { "content": "pub fn handle_change_log_event(\n\n transaction: &ReplicaTransactionInfo,\n\n) -> Result<Vec<String>, GeyserPluginError> {\n\n lazy_static! {\n\n static ref CLRE: Regex = Regex::new(\n\n r\"Program data: ((?:[A-Za-z\\d+/]{4})*(?:[A-Za-z\\d+/]{3}=|[A-Za-z\\d+/]{2}==)?$)\"\n\n )\n\n .unwrap();\n\n }\n\n let mut events: Vec<String> = vec![];\n\n let err = Err(GeyserPluginError::Custom(Box::new(\n\n PlerkleError::EventError {},\n\n )));\n\n match transaction.transaction_status_meta.log_messages.as_ref() {\n\n Some(lines) => {\n\n for line in lines {\n\n let captures = CLRE.captures(line);\n\n let b64raw = captures.and_then(|c| c.get(1)).map(|c| c.as_str());\n\n b64raw.map(|raw| events.push((raw).parse().unwrap()));\n\n }\n", "file_path": "plerkle/src/programs/gummy_roll.rs", "rank": 21, "score": 107154.65738646785 }, { "content": "fn generate_leaf_node<'info>(seeds: &[&[u8]]) -> Result<[u8; 32]> {\n\n let mut leaf = EMPTY;\n\n for seed in seeds.iter() {\n\n let hash = hashv(&[leaf.as_ref(), seed]);\n\n leaf.copy_from_slice(hash.as_ref());\n\n }\n\n Ok(leaf)\n\n}\n\n\n", "file_path": "archive/merkle_wallet/src/lib.rs", "rank": 22, "score": 99273.18171139552 }, { "content": "CREATE INDEX app_specific_idx_tree_id ON app_specific (tree_id);\n\n\n", "file_path": "init.sql", "rank": 23, "score": 96853.94158441822 }, { "content": "#[derive(sqlx::FromRow, Clone, Debug)]\n\nstruct NodeDAO {\n\n pub hash: Vec<u8>,\n\n pub level: i64,\n\n pub node_idx: i64,\n\n pub seq: i64,\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 24, "score": 96797.35012908153 }, { "content": "#[derive(Serialize, Default, Clone, PartialEq)]\n\nstruct NodeView {\n\n pub hash: String,\n\n pub level: i64,\n\n pub index: i64,\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 25, "score": 96789.326744776 }, { "content": "export function hash(left: Buffer, right: Buffer): Buffer {\n\n return Buffer.from(keccak_256.digest(Buffer.concat([left, right])));\n", "file_path": "tests/merkle-tree.ts", "rank": 26, "score": 96115.25323422352 }, { "content": "fn decode_b58_param(param: &String) -> Result<Vec<u8>, ApiError> {\n\n let pub_key = Pubkey::from_str(&*param).map_err(|e| {\n\n println!(\"{}\", e.to_string());\n\n ApiError::ParameterInvalid\n\n })?;\n\n Ok(pub_key.to_bytes().to_vec())\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 27, "score": 96097.44892667934 }, { "content": "fn node_to_view(r: NodeDAO) -> NodeView {\n\n NodeView {\n\n hash: bs58::encode(r.hash).into_string(),\n\n level: r.level,\n\n index: r.node_idx,\n\n }\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 28, "score": 94453.34852426236 }, { "content": "const generateLeafNode = (seeds) => {\n\n let leaf = Buffer.alloc(32);\n\n for (const seed of seeds) {\n\n leaf = Buffer.from(keccak_256.digest([...leaf, ...seed]));\n\n }\n\n return leaf;\n", "file_path": "tests/merkle-tree.ts", "rank": 29, "score": 90747.8937954636 }, { "content": "export async function getProofOfLeafFromServer(endpoint: string, treeId: PublicKey, i: number): Promise<NodeView[]> {\n\n const url = `${endpoint}/proof/${treeId.toString()}/${i}`;\n\n let object = await fetch(\n\n url,\n\n {\n\n method: \"GET\",\n\n }\n\n ).then(resp => resp.json());\n\n\n\n return object.data as NodeView[];\n", "file_path": "tests/merkle-tree.ts", "rank": 30, "score": 88538.46972054942 }, { "content": "fn node_list_to_view(items: Vec<NodeDAO>) -> Vec<NodeView> {\n\n let mut view = vec![];\n\n for r in items {\n\n view.push(node_to_view(r))\n\n }\n\n view\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 32, "score": 85820.33627700637 }, { "content": "pub fn get_instruction_type(full_bytes: &Vec<u8>) -> InstructionName {\n\n let disc: [u8; 8] = {\n\n let mut disc = [0; 8];\n\n disc.copy_from_slice(&full_bytes[..8]);\n\n disc\n\n };\n\n match disc {\n\n [165, 83, 136, 142, 89, 202, 47, 220] => InstructionName::CreateTree,\n\n [163, 52, 200, 231, 140, 3, 69, 186] => InstructionName::Transfer,\n\n [199, 186, 9, 79, 96, 129, 24, 106] => InstructionName::Remove,\n\n [41, 249, 249, 146, 197, 111, 56, 181] => InstructionName::Add,\n\n _ => InstructionName::Unknown\n\n }\n\n}\n\n\n\n#[program]\n\npub mod gummyroll_crud {\n\n\n\n use super::*;\n\n\n", "file_path": "programs/gummyroll_crud/src/lib.rs", "rank": 33, "score": 83254.18581857516 }, { "content": "pub fn handle_event<T: anchor_lang::Event + anchor_lang::AnchorDeserialize>(\n\n data: String,\n\n) -> Result<T, ApiError> {\n\n let borsh_bytes = match base64::decode(&data) {\n\n Ok(borsh_bytes) => borsh_bytes,\n\n _ => {\n\n return Err(ApiError::ChangeLogEventMalformed);\n\n }\n\n };\n\n\n\n let mut slice: &[u8] = &borsh_bytes[..];\n\n let disc: [u8; 8] = {\n\n let mut disc = [0; 8];\n\n disc.copy_from_slice(&borsh_bytes[..8]);\n\n slice = &slice[8..];\n\n disc\n\n };\n\n if disc != T::discriminator() {\n\n return Err(ApiError::ChangeLogEventMalformed);\n\n }\n\n\n\n let e: T = anchor_lang::AnchorDeserialize::deserialize(&mut slice)\n\n .map_err(|_| ApiError::ChangeLogEventMalformed)?;\n\n Ok(e)\n\n}\n", "file_path": "nft_api/src/events/mod.rs", "rank": 34, "score": 79574.80767773665 }, { "content": "fn recompute(mut start: [u8; 32], path: &[[u8; 32]], address: u32) -> [u8; 32] {\n\n for (ix, s) in path.iter().enumerate() {\n\n if address >> ix & 1 == 1 {\n\n let res = hashv(&[&start, s.as_ref()]);\n\n start.copy_from_slice(res.as_ref());\n\n } else {\n\n let res = hashv(&[s.as_ref(), &start]);\n\n start.copy_from_slice(res.as_ref());\n\n }\n\n }\n\n start\n\n}\n\n\n\n#[derive(Accounts)]\n\npub struct InitializeMerkleWallet<'info> {\n\n #[account(\n\n init,\n\n seeds = [\n\n MERKLE_PREFIX.as_ref(),\n\n payer.key().as_ref(),\n", "file_path": "archive/merkle_wallet/src/lib.rs", "rank": 35, "score": 74858.4309660687 }, { "content": "export default function useHashImage(data: string): DataURL {\n\n const cacheEntry = cache[data];\n\n if (cacheEntry === undefined) {\n\n const promise = new Promise<void>(async (resolve) => {\n\n const url = (await hashprint({ data })) as DataURL;\n\n cache[data] = { __type: \"result\", url };\n\n resolve();\n\n });\n\n cache[data] = { __type: \"promise\", promise };\n\n throw promise;\n\n } else if (cacheEntry.__type === \"promise\") {\n\n throw cacheEntry.promise;\n\n }\n\n return cacheEntry.url;\n", "file_path": "gummyroll-crud-ui/lib/hooks/useHashImage.ts", "rank": 36, "score": 74557.16370060622 }, { "content": "export default async function getTreesForAuthority(\n\n authority: string\n\n): Promise<TreePayload[]> {\n\n const endpointOrCluster: string | anchor.web3.Cluster =\n\n process.env.NEXT_PUBLIC_RPC_ENDPOINT_OR_CLUSTER!;\n\n let endpoint: string;\n\n try {\n\n endpoint = anchor.web3.clusterApiUrl(\n\n endpointOrCluster as anchor.web3.Cluster,\n\n true /* tls */\n\n );\n\n } catch {\n\n endpoint = endpointOrCluster as string;\n\n }\n\n const result = await new anchor.web3.Connection(\n\n endpoint\n\n ).getParsedProgramAccounts(GummyrollProgramId, \"confirmed\");\n\n return result.map((result) => ({\n\n account: result.pubkey.toBase58(),\n\n authority: authority,\n\n }));\n", "file_path": "gummyroll-crud-ui/lib/loaders/getTreesForAuthority.ts", "rank": 37, "score": 74123.63159281561 }, { "content": "export default class TreeServerNotConfiguredError extends Error {}\n", "file_path": "gummyroll-crud-ui/lib/loaders/TreeServerNotConfiguredError.ts", "rank": 38, "score": 71693.14699141974 }, { "content": "function emptyTreeNode(level: number, id: number): TreeNode {\n\n return {\n\n node: emptyNode(level),\n\n left: undefined,\n\n right: undefined,\n\n parent: undefined,\n\n level: level,\n\n id\n\n }\n", "file_path": "tests/merkle-tree.ts", "rank": 39, "score": 70375.45530706352 }, { "content": "const MOCK_DATA_BY_OWNER: Record<string, ReadonlyArray<AssetPayload>> = {\n\n C2jDL4pcwpE2pP5EryTGn842JJUJTcurPGZUquQjySxK: allAssets,\n", "file_path": "gummyroll-crud-ui/lib/loaders/getAssetsForOwner.ts", "rank": 40, "score": 64162.01919423786 }, { "content": "#[derive(sqlx::FromRow)]\n\nstruct Root {\n\n pub hash: Vec<u8>,\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 41, "score": 62932.07109577941 }, { "content": "export function hashMessages(messages: OwnedMessage[]): Buffer[] {\n\n return messages.map((ownedMessage) => {\n\n return hashOwnedMessage(ownedMessage)\n\n });\n", "file_path": "cli/helpers/utils.ts", "rank": 42, "score": 62611.62788371437 }, { "content": "export function hashLeaves(leaves: Buffer[]): Buffer {\n\n let nodes = leaves;\n\n let level = 0;\n\n while (level < MAX_DEPTH) {\n\n let next_nodes = [];\n\n\n\n if (nodes.length == 0) {\n\n nodes = [emptyNode(level), emptyNode(level)];\n\n }\n\n\n\n while (nodes.length > 0) {\n\n let left = nodes.pop();\n\n let right: Buffer;\n\n\n\n if (nodes.length > 0) {\n\n right = nodes.pop();\n\n } else {\n\n right = emptyNode(level);\n\n }\n\n next_nodes.push(hash(left, right));\n\n }\n\n\n\n level++;\n\n nodes = next_nodes\n\n }\n\n return nodes[0];\n", "file_path": "tests/merkle-tree.ts", "rank": 43, "score": 62181.61811562555 }, { "content": "export function emptyNode(level: number): Buffer {\n\n if (CACHE_EMPTY_NODE.has(level)) {\n\n return CACHE_EMPTY_NODE.get(level);\n\n }\n\n if (level == 0) {\n\n return Buffer.alloc(32)\n\n }\n\n\n\n let result = hash(emptyNode(level - 1), emptyNode(level - 1));\n\n CACHE_EMPTY_NODE.set(level, result);\n\n return result;\n", "file_path": "tests/merkle-tree.ts", "rank": 44, "score": 62022.430564540584 }, { "content": "#[derive(sqlx::FromRow, Clone, Debug)]\n\nstruct AssetDAO {\n\n pub data: String,\n\n pub index: i64,\n\n pub owner: Vec<u8>,\n\n pub tree: Vec<u8>,\n\n pub admin: Vec<u8>,\n\n pub hash: Vec<u8>,\n\n pub level: i64\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 45, "score": 61810.89834384339 }, { "content": "#[derive(Default)]\n\nstruct AppEvent {\n\n op: String,\n\n message: String,\n\n leaf: String,\n\n owner: String,\n\n tree_id: String,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let main_pool = PgPoolOptions::new()\n\n .max_connections(5)\n\n .connect(\"postgres://solana:solana@db/solana\")\n\n .await\n\n .unwrap();\n\n let router = router(main_pool);\n\n // Create a Service from the router above to handle incoming requests.\n\n let service = RouterService::new(router).unwrap();\n\n // The address on which the server will be listening.\n\n let addr = SocketAddr::from(([0, 0, 0, 0], 9090));\n\n // Create a server by passing the created service to `.serve` method.\n\n let server = Server::bind(&addr).serve(service);\n\n\n\n println!(\"App is running on: {}\", addr);\n\n if let Err(err) = server.await {\n\n eprintln!(\"Server error: {}\", err);\n\n }\n\n}\n", "file_path": "nft_api/src/main.rs", "rank": 46, "score": 61799.031155880395 }, { "content": "#[derive(Serialize)]\n\nstruct AssetView {\n\n pub data: String,\n\n pub index: i64,\n\n pub owner: String,\n\n pub treeAccount: String,\n\n pub treeAdmin: String,\n\n pub hash: String,\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 47, "score": 61799.031155880395 }, { "content": "#[derive(Serialize)]\n\nstruct AssetProof {\n\n pub root: String,\n\n pub hash: String,\n\n pub proof: Vec<String>,\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 48, "score": 61799.031155880395 }, { "content": "fn main() {\n\n let G = constants::ED25519_BASEPOINT_POINT;\n\n let mut v = vec![];\n\n let mut rng = thread_rng();\n\n let mut A = Accumulator::new();\n\n for i in 0..4096 {\n\n let sk = Scalar::random(&mut rng);\n\n let pk = G * sk;\n\n let msg = format!(\"Hello {}\", i);\n\n let elem = Scalar::hash_from_bytes::<Sha512>(msg.as_bytes());\n\n v.push((sk, pk, msg));\n\n A.add(elem + sk);\n\n }\n\n\n\n for _ in 0..1024 {\n\n let (sk, _pk, msg) = v.choose(&mut rng).unwrap();\n\n let elem = Scalar::hash_from_bytes::<Sha512>(msg.as_bytes()) + sk;\n\n let head = A.get();\n\n match A.remove(elem, head * elem.invert(), head) {\n\n Some(_) => println!(\"Removed message: {}\", msg),\n\n None => {}\n\n }\n\n }\n\n\n\n println!(\"{:?}\", A.get());\n\n}\n", "file_path": "archive/ecc-accumulator/src/main.rs", "rank": 49, "score": 60970.819872400956 }, { "content": "fn main() {}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{merkle::*, MerkleAccumulator};\n\n use rand::prelude::SliceRandom;\n\n use rand::{self, Rng};\n\n use rand::{rngs::ThreadRng, thread_rng};\n\n\n\n /// Initializes off-chain Merkle Tree & creates on-chain tree\n\n #[inline]\n\n fn setup() -> (MerkleAccumulator, MerkleTree) {\n\n // Setup\n\n let mut leaves = vec![];\n\n // on-chain merkle change-record\n\n let merkle = MerkleAccumulator::new();\n\n\n\n // Init off-chain Merkle tree with leaves\n\n for _ in 0..(1 << MAX_DEPTH) {\n\n let leaf = EMPTY;\n", "file_path": "archive/merkle-accumulator/src/main.rs", "rank": 50, "score": 60970.819872400956 }, { "content": "function hashOwnedMessage(ownedMessage: OwnedMessage): Buffer {\n\n return hash(new PublicKey(ownedMessage.owner).toBuffer(), Buffer.from(ownedMessage.message));\n", "file_path": "cli/helpers/utils.ts", "rank": 51, "score": 60926.5557007101 }, { "content": "export async function getRootFromServer(endpoint: string, treeId: PublicKey): Promise<string> {\n\n const url = `${endpoint}/root/${treeId.toString()}`;\n\n let object = await fetch(url, { method: \"GET\", }).then(resp => resp.json());\n\n return object.data as string;\n", "file_path": "tests/merkle-tree.ts", "rank": 53, "score": 60526.63495602289 }, { "content": "export function getProofOfLeaf(tree: Tree, idx: number, minimizeProofHeight: boolean = false, treeHeight: number = -1, verbose = false): TreeNode[] {\n\n let proof: TreeNode[] = [];\n\n\n\n let node = tree.leaves[idx];\n\n\n\n let height = 0;\n\n while (typeof node.parent !== 'undefined') {\n\n if (minimizeProofHeight && height >= treeHeight) {\n\n break;\n\n }\n\n if (verbose) {\n\n console.log(`${node.level}: ${Uint8Array.from(node.node)}`);\n\n }\n\n let parent = node.parent;\n\n if (parent.left.id === node.id) {\n\n proof.push(parent.right);\n\n\n\n const hashed = hash(node.node, parent.right.node);\n\n if (!hashed.equals(parent.node)) {\n\n console.log(hashed);\n\n console.log(parent.node);\n\n throw new Error(\"Invariant broken when hashing left node\")\n\n }\n\n } else {\n\n proof.push(parent.left);\n\n\n\n const hashed = hash(parent.left.node, node.node);\n\n if (!hashed.equals(parent.node)) {\n\n console.log(hashed);\n\n console.log(parent.node);\n\n throw new Error(\"Invariant broken when hashing right node\")\n\n }\n\n }\n\n node = parent;\n\n height++;\n\n }\n\n\n\n return proof;\n", "file_path": "tests/merkle-tree.ts", "rank": 54, "score": 60476.87409805698 }, { "content": "let CACHE_EMPTY_NODE = new Map<number, Buffer>();\n", "file_path": "tests/merkle-tree.ts", "rank": 55, "score": 60353.21550019621 }, { "content": "const generateLeafNode = (seeds) => {\n\n let leaf = Buffer.alloc(32);\n\n for (const seed of seeds) {\n\n leaf = Buffer.from(keccak_256.digest([...leaf, ...seed]));\n\n }\n\n return leaf;\n", "file_path": "archive/merkle_wallet-test.ts", "rank": 56, "score": 59083.05558214922 }, { "content": "CREATE INDEX cl_items_level ON cl_items (level);\n", "file_path": "init.sql", "rank": 57, "score": 59014.54096556793 }, { "content": "const TREE_RPC_HOST = HOST;\n", "file_path": "tests/continuous_gummyroll-test.ts", "rank": 58, "score": 58951.46504907649 }, { "content": "const TREE_RPC_PORT = \"9090\";\n", "file_path": "tests/continuous_gummyroll-test.ts", "rank": 59, "score": 58951.46504907649 }, { "content": "export async function getProofOfAssetFromServer(endpoint: string, treeId: PublicKey, i: number): Promise<AssetProof> {\n\n const url = `${endpoint}/assets/${treeId.toString()}/${i}/proof`;\n\n let object = await fetch(\n\n url,\n\n {\n\n method: \"GET\",\n\n }\n\n ).then(resp => resp.json())\n\n .catch((e) => console.log(e));\n\n\n\n return object.data as AssetProof;\n", "file_path": "tests/merkle-tree.ts", "rank": 60, "score": 58940.36771696685 }, { "content": "pub trait Messenger {\n\n fn new() -> Result<Self>\n\n where\n\n Self: Sized;\n\n\n\n fn send_account(&self, account: &ReplicaAccountInfo, slot: u64, is_startup: bool)\n\n -> Result<()>;\n\n fn send_slot_status(&self, slot: u64, parent: Option<u64>, status: SlotStatus) -> Result<()>;\n\n fn send_transaction(\n\n &mut self,\n\n transaction_info: &ReplicaTransactionInfo,\n\n slot: u64,\n\n ) -> Result<()>;\n\n fn send_block(&mut self, block_info: &ReplicaBlockInfo) -> Result<()>;\n\n fn recv_account(&self) -> Result<()>;\n\n fn recv_slot_status(&self) -> Result<()>;\n\n fn recv_transaction(&self) -> Result<()>;\n\n fn recv_block(&self) -> Result<()>;\n\n}\n", "file_path": "messenger/src/lib.rs", "rank": 61, "score": 58773.292010410034 }, { "content": "CREATE INDEX app_specific_idx_owner ON app_specific (owner);\n", "file_path": "init.sql", "rank": 62, "score": 58234.89688383504 }, { "content": "CREATE INDEX cl_items_hash_idx ON cl_items (hash);\n", "file_path": "init.sql", "rank": 63, "score": 58153.23478823157 }, { "content": "-- Index All the things space is cheap\n\nCREATE INDEX cl_items_tree_idx ON cl_items (tree);\n", "file_path": "init.sql", "rank": 64, "score": 57663.23079023625 }, { "content": "CREATE INDEX cl_items_node_idx ON cl_items (node_idx);\n", "file_path": "init.sql", "rank": 65, "score": 57214.39640176902 }, { "content": "import type { NextApiRequest, NextApiResponse } from \"next\";\n\nimport getTreesForAuthority from \"../../../../lib/loaders/getTreesForAuthority\";\n\n\n\nexport default async function handler(\n\n req: NextApiRequest,\n\n res: NextApiResponse\n\n) {\n\n res.status(200).json({\n\n data: await getTreesForAuthority(req.query[\"ownerPubkey\"] as string),\n\n status: 200,\n\n success: true,\n\n });\n\n}\n", "file_path": "gummyroll-crud-ui/pages/api/owner/[ownerPubkey]/trees.ts", "rank": 66, "score": 57125.80360235222 }, { "content": "const hashprint = require(\"hashprintjs\");\n", "file_path": "gummyroll-crud-ui/lib/hooks/useHashImage.ts", "rank": 67, "score": 56385.483242730756 }, { "content": "const cache: Record<\n\n string,\n\n | void\n\n | { __type: \"promise\"; promise: Promise<void> }\n\n | { __type: \"result\"; url: DataURL }\n", "file_path": "gummyroll-crud-ui/lib/hooks/useHashImage.ts", "rank": 68, "score": 56385.483242730756 }, { "content": "pub trait ProgramParser {\n\n fn id(&self) -> Pubkey;\n\n}\n\n\n", "file_path": "plerkle/src/program_parser/mod.rs", "rank": 69, "score": 55595.664136196996 }, { "content": "pub trait ProgramInstructionParser {\n\n \n\n fn parse_instructions(&self, Vec<Inst>) {\n\n \n\n }\n\n\n\n}\n\n\n\n\n\npub struct GummyRollCrudParser {\n\n id: Pubkey\n\n}\n\n\n\nimpl ProgramParser for GummyRollCrudParser {\n\n fn id(&self) -> Pubkey{\n\n self.id\n\n }\n\n}\n\n\n\nimpl ProgramInstructionParser for GummyRollCrudParser {\n\n \n\n}", "file_path": "plerkle/src/program_parser/mod.rs", "rank": 70, "score": 54656.96797221234 }, { "content": "pub trait ProgramLogParser {\n\n\n\n}\n\n\n", "file_path": "plerkle/src/program_parser/mod.rs", "rank": 71, "score": 54656.96797221234 }, { "content": "let url: URL | null | undefined;\n", "file_path": "gummyroll-crud-ui/lib/loaders/getTreeServerAPIURL.ts", "rank": 72, "score": 54644.07228884421 }, { "content": "import { Keypair, PublicKey } from '@solana/web3.js';\n\nimport { program } from 'commander';\n\nimport log from 'loglevel';\n\nimport { buildTree } from '../tests/merkle-tree';\n\nimport { writeHashes, loadMessages, hashMessages, loadLeaves, writeTree, writeMetadata } from './helpers/utils';\n\n\n\nprogram.version('0.0.1');\n\nlog.setLevel('DEBUG');\n\n\n\nprogram\n\n .command('createNodes')\n\n .option(\n\n '-f, --input-file <string>',\n\n 'CSV file containing leaves',\n\n )\n\n .option(\n\n '-o, --out-file <string>',\n\n 'Output CSV file, ready to be uploaded to arweave',\n\n 'outfile.csv'\n\n )\n\n .option(\n\n '-d, --max-depth <number>',\n\n 'Max depth of the tree to be supported',\n\n '14'\n\n )\n\n .action(async (directory, cmd) => {\n\n const { inputFile, outFile, maxDepth } = cmd.opts();\n\n\n\n log.info(\"Received input file:\", inputFile);\n\n log.info(\"Writing to file:\", outFile);\n\n log.info(\"depth is:\", maxDepth);\n\n log.info('\\n');\n\n\n\n // Load in leaves, up to max depth\n\n const leaves = loadLeaves(inputFile, maxDepth);\n\n\n\n // Create tree in memory\n\n const tree = buildTree(leaves);\n\n\n\n // BFS search of tree && write leaves to CSV in 'GM CL' schema\n\n writeTree(tree, outFile);\n\n });\n\n\n\nprogram.command('hashMessages')\n\n .option(\n\n '-f, --input-file <string>',\n\n 'CSV file containing owner,message columns',\n\n )\n\n .option(\n\n '-o, --out-file <string>',\n\n 'Output CSV to be used in batchMint',\n\n 'test-input.csv'\n\n )\n\n .action(async (directory, cmd) => {\n\n const { inputFile, outFile } = cmd.opts();\n\n const messages = loadMessages(inputFile);\n\n const hashes = hashMessages(messages);\n\n writeHashes(hashes, outFile);\n\n });\n\n\n\nprogram.command('prepareMetadata')\n\n .option(\n\n '-f, --input-file <string>',\n\n 'CSV file containing owner,message columns',\n\n )\n\n .option(\n\n '-o, --out-file <string>',\n\n 'Output CSV to be used in batchMint',\n\n 'metadata.csv'\n\n )\n\n .action(async (directory, cmd) => {\n\n const { inputFile, outFile, pubkey } = cmd.opts();\n\n\n\n let treeId: PublicKey;\n\n if (pubkey == undefined) {\n\n treeId = Keypair.generate().publicKey;\n\n } else {\n\n treeId = new PublicKey(pubkey);\n\n }\n\n\n\n const messages = loadMessages(inputFile);\n\n writeMetadata(messages, outFile);\n\n });\n\n\n\n\n\nprogram.parse(process.argv);\n", "file_path": "cli/hash-cli.ts", "rank": 73, "score": 53615.21774645704 }, { "content": "pub trait ZeroCopy: Pod {\n\n fn load_mut_bytes<'a>(data: &'a mut [u8]) -> Result<&'a mut Self> {\n\n let size = size_of::<Self>();\n\n let data_len = data.len();\n\n\n\n Ok(bytemuck::try_from_bytes_mut(&mut data[..size])\n\n .map_err(error_msg::<Self>(data_len))\n\n .unwrap())\n\n }\n\n}\n\n\n", "file_path": "programs/gummyroll/src/utils.rs", "rank": 74, "score": 52770.343913169345 }, { "content": "fn asset_to_view(r: AssetDAO) -> AssetView {\n\n AssetView {\n\n index: node_idx_to_leaf_idx(r.index, r.level as u32),\n\n treeAccount: bs58::encode(r.tree).into_string(),\n\n owner: bs58::encode(r.owner).into_string().to_string(),\n\n treeAdmin: bs58::encode(r.admin).into_string().to_string(),\n\n hash: bs58::encode(r.hash).into_string().to_string(),\n\n data: r.data,\n\n }\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 75, "score": 48420.780890302565 }, { "content": "import { useEffect, useState } from \"react\";\n\n\n\nconst hashprint = require(\"hashprintjs\");\n\n\n\ntype DataURL = string;\n\n\n\nconst cache: Record<\n\n string,\n\n | void\n\n | { __type: \"promise\"; promise: Promise<void> }\n\n | { __type: \"result\"; url: DataURL }\n\n> = {};\n\n\n\nexport default function useHashImage(data: string): DataURL {\n\n const cacheEntry = cache[data];\n\n if (cacheEntry === undefined) {\n\n const promise = new Promise<void>(async (resolve) => {\n\n const url = (await hashprint({ data })) as DataURL;\n\n cache[data] = { __type: \"result\", url };\n\n resolve();\n\n });\n\n cache[data] = { __type: \"promise\", promise };\n\n throw promise;\n\n } else if (cacheEntry.__type === \"promise\") {\n\n throw cacheEntry.promise;\n\n }\n\n return cacheEntry.url;\n\n}\n", "file_path": "gummyroll-crud-ui/lib/hooks/useHashImage.ts", "rank": 76, "score": 48015.541153692524 }, { "content": "import * as anchor from \"@project-serum/anchor\";\n\nimport GummyrollProgramId from \"../anchor_programs/GummyrollProgramId\";\n\n\n\nexport type TreePayload = Readonly<{\n\n account: string;\n\n authority: string;\n\n}>;\n\n\n\nexport default async function getTreesForAuthority(\n\n authority: string\n\n): Promise<TreePayload[]> {\n\n const endpointOrCluster: string | anchor.web3.Cluster =\n\n process.env.NEXT_PUBLIC_RPC_ENDPOINT_OR_CLUSTER!;\n\n let endpoint: string;\n\n try {\n\n endpoint = anchor.web3.clusterApiUrl(\n\n endpointOrCluster as anchor.web3.Cluster,\n\n true /* tls */\n\n );\n\n } catch {\n\n endpoint = endpointOrCluster as string;\n\n }\n\n const result = await new anchor.web3.Connection(\n\n endpoint\n\n ).getParsedProgramAccounts(GummyrollProgramId, \"confirmed\");\n\n return result.map((result) => ({\n\n account: result.pubkey.toBase58(),\n\n authority: authority,\n\n }));\n\n}\n", "file_path": "gummyroll-crud-ui/lib/loaders/getTreesForAuthority.ts", "rank": 77, "score": 47736.34224469623 }, { "content": "create index app_specific_shizzle_mynizzle On app_specific_ownership (authority);", "file_path": "init.sql", "rank": 78, "score": 47461.02661592726 }, { "content": "export default class TreeServerNotConfiguredError extends Error {}\n", "file_path": "gummyroll-crud-ui/lib/loaders/TreeServerNotConfiguredError.ts", "rank": 79, "score": 46701.06648294117 }, { "content": "let url: URL | null | undefined;\n\nexport default function getTreeServerAPIURL() {\n\n if (url === undefined) {\n\n const config = process.env.NEXT_PUBLIC_TREE_SERVER_API_ENDPOINT;\n\n if (!config) {\n\n url = null;\n\n } else {\n\n url = new URL(config);\n\n }\n\n }\n\n return url;\n\n}\n", "file_path": "gummyroll-crud-ui/lib/loaders/getTreeServerAPIURL.ts", "rank": 80, "score": 46701.06648294117 }, { "content": "import getTreeServerAPIURL from \"./getTreeServerAPIURL\";\n\nimport TreeServerNotConfiguredError from \"./TreeServerNotConfiguredError\";\n\n\n\nexport default async function getTreeServerAPIMethod<TResponse>(path: string) {\n\n const treeServerURL = getTreeServerAPIURL();\n\n if (!treeServerURL) {\n\n throw new TreeServerNotConfiguredError();\n\n }\n\n const url = new URL(path, treeServerURL);\n\n console.log(url)\n\n const response = await fetch(url.toString());\n\n if (response.ok) {\n\n const json = (await response.json()) as { data: TResponse };\n\n return json.data;\n\n } else {\n\n throw new Error(response.statusText);\n\n }\n\n}\n", "file_path": "gummyroll-crud-ui/lib/loaders/getTreeServerAPIMethod.ts", "rank": 81, "score": 45751.02062326615 }, { "content": "fn asset_list_to_view(items: Vec<AssetDAO>) -> Vec<AssetView> {\n\n let mut view = vec![];\n\n for r in items {\n\n view.push(asset_to_view(r))\n\n }\n\n view\n\n}\n\n\n", "file_path": "nft_api/src/main.rs", "rank": 82, "score": 42169.1521574802 }, { "content": "export function buildTree(leaves: Buffer[]): Tree {\n\n let [nodes, finalLeaves] = buildLeaves(leaves);\n\n let seqNum = leaves.length;\n\n while (nodes.size() > 1) {\n\n let left = nodes.dequeue();\n\n const level = left.level;\n\n\n\n let right: TreeNode;\n\n if (level != nodes.peek().level) {\n\n right = emptyTreeNode(level, seqNum);\n\n seqNum++;\n\n } else {\n\n right = nodes.dequeue();\n\n }\n\n\n\n let parent: TreeNode = {\n\n node: hash(left.node, right.node),\n\n left: left,\n\n right: right,\n\n parent: undefined,\n\n level: level + 1,\n\n id: seqNum\n\n }\n\n left.parent = parent;\n\n right.parent = parent;\n\n nodes.enqueue(parent);\n\n seqNum++;\n\n }\n\n\n\n return {\n\n root: nodes.peek().node,\n\n leaves: finalLeaves,\n\n }\n", "file_path": "tests/merkle-tree.ts", "rank": 83, "score": 40833.345697883124 }, { "content": "with tree as (\n\n select\n\n R.level,\n\n R.tree,\n\n R.node_idx,\n\n max(R.seq) as seq\n\n from\n\n cl_items as R\n\n group by\n\n R.level,\n\n R.tree,\n\n R.node_idx\n\n),\n\n merkle as (\n\n select\n\n T.level,\n\n T.tree,\n\n T.node_idx,\n\n R.hash,\n\n R.seq\n", "file_path": "archive/tree.sql", "rank": 84, "score": 39710.16924321418 }, { "content": " from\n\n tree as T\n\n inner join cl_items R on R.node_idx = T.node_idx\n\n and R.seq = T.seq\n\n )\n\n\n\n select distinct on (node_idx) * from cl_items order by node_idx, seq, level desc ;\n\n-- select *\n\n-- from merkle\n\n\n\n\n\nwith node as (select level, node_idx from cl_items where node_idx = 16385 order by seq desc limit 1)\n", "file_path": "archive/tree.sql", "rank": 85, "score": 39706.188966512724 }, { "content": "const OwnerassetsList: NextPage = () => {\n\n const router = useRouter();\n\n const { publicKey } = useWallet();\n\n const ownerPubkey = router.query.ownerPubkey;\n\n const { data: assets } = useSWRImmutable<\n\n Awaited<ReturnType<typeof getAssetsForOwner>>\n\n >([\"owner\", ownerPubkey, \"assets\"]);\n\n if (!assets || assets.length === 0) {\n\n return (\n\n <div style={{ margin: \"20px\" }}>\n\n <h1>No assets</h1>\n\n </div>\n\n );\n\n }\n\n return (\n\n <div style={{ margin: \"20px\" }}>\n\n <h1>{ownerPubkey}&apos;s assets</h1>\n\n <ImageList cols={4} gap={16}>\n\n {assets.map((asset) => (\n\n <ImageListItem key={`${asset.treeAccount}:${asset.index}`}>\n\n <OwnerAsset {...asset} />\n\n </ImageListItem>\n\n ))}\n\n </ImageList>\n\n </div>\n\n );\n", "file_path": "gummyroll-crud-ui/pages/owner/[ownerPubkey]/assets.tsx", "rank": 86, "score": 37895.51554456813 }, { "content": "export default async function getAssetsForOwner(\n\n ownerPubkey: string\n\n): Promise<ReadonlyArray<AssetPayload> | undefined> {\n\n try {\n\n const assets = await getTreeServerAPIMethod<AssetPayload[]>(\n\n `/owner/${ownerPubkey}/assets`\n\n );\n\n console.debug(`API /owner/${ownerPubkey}/assets`, assets);\n\n return assets;\n\n } catch (e) {\n\n if (e instanceof TreeServerNotConfiguredError) {\n\n return MOCK_DATA_BY_OWNER[ownerPubkey];\n\n }\n\n throw e;\n\n }\n", "file_path": "gummyroll-crud-ui/lib/loaders/getAssetsForOwner.ts", "rank": 87, "score": 37271.60477712288 }, { "content": "use anchor_lang::prelude::*;\n\nuse std::ops::Deref;\n\nuse std::ops::DerefMut;\n\n\n\npub const EMPTY: Node = Node {\n\n inner: [0 as u8; 32],\n\n};\n\n\n\n#[derive(Debug, Copy, Clone, AnchorDeserialize, AnchorSerialize, Default, PartialEq)]\n\npub struct Node {\n\n pub inner: [u8; 32],\n\n}\n\n\n\nimpl Node {\n\n pub fn new(inner: [u8; 32]) -> Self {\n\n Self { inner }\n\n }\n\n}\n\n\n\nimpl Deref for Node {\n", "file_path": "programs/gummyroll/src/state/node.rs", "rank": 88, "score": 36131.311555228654 }, { "content": " type Target = [u8; 32];\n\n fn deref(&self) -> &Self::Target {\n\n &self.inner\n\n }\n\n}\n\n\n\nimpl DerefMut for Node {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.inner\n\n }\n\n}\n\n\n\nimpl AsRef<[u8; 32]> for Node {\n\n fn as_ref(&self) -> &[u8; 32] {\n\n &self.inner\n\n }\n\n}\n\n\n\nimpl From<[u8; 32]> for Node {\n\n fn from(inner: [u8; 32]) -> Self {\n\n Self { inner }\n\n }\n\n}", "file_path": "programs/gummyroll/src/state/node.rs", "rank": 89, "score": 36117.530539102874 }, { "content": "use anchor_lang::{prelude::*, solana_program::keccak};\n\nuse gummyroll::state::node::Node;\n\n\n\n#[event]\n\npub struct LeafSchemaEvent {\n\n pub owner: Pubkey,\n\n pub delegate: Pubkey, // Defaults to owner\n\n pub nonce: u128,\n\n pub data_hash: [u8; 32],\n\n}\n\n\n\n#[derive(AnchorDeserialize, AnchorSerialize, Clone, Copy, Default, Debug)]\n\npub struct LeafSchema {\n\n pub owner: Pubkey,\n\n pub delegate: Pubkey, // Defaults to owner\n\n pub nonce: u128,\n\n pub data_hash: [u8; 32],\n\n}\n\n\n\nimpl LeafSchema {\n", "file_path": "programs/bubblegum/src/state/leaf_schema.rs", "rank": 90, "score": 35148.12049111675 }, { "content": " pub fn new(owner: Pubkey, delegate: Pubkey, nonce: u128, data_hash: [u8; 32]) -> Self {\n\n Self {\n\n owner,\n\n delegate,\n\n nonce,\n\n data_hash,\n\n }\n\n }\n\n\n\n pub fn to_event(&self) -> LeafSchemaEvent {\n\n LeafSchemaEvent {\n\n owner: self.owner,\n\n delegate: self.delegate,\n\n nonce: self.nonce,\n\n data_hash: self.data_hash,\n\n }\n\n }\n\n\n\n pub fn to_node(&self) -> Node {\n\n let hashed_leaf = keccak::hashv(&[\n\n self.owner.as_ref(),\n\n self.delegate.as_ref(),\n\n self.nonce.to_le_bytes().as_ref(),\n\n self.data_hash.as_ref(),\n\n ])\n\n .to_bytes();\n\n Node::new(hashed_leaf)\n\n }\n\n}", "file_path": "programs/bubblegum/src/state/leaf_schema.rs", "rank": 91, "score": 35143.88292803193 }, { "content": "import type { NextApiRequest, NextApiResponse } from \"next\";\n\nimport getAssetsForOwner from \"../../../../lib/loaders/getAssetsForOwner\";\n\n\n\nexport default async function handler(\n\n req: NextApiRequest,\n\n res: NextApiResponse\n\n) {\n\n res.status(200).json({\n\n data: await getAssetsForOwner(req.query[\"ownerPubkey\"] as string),\n\n status: 200,\n\n success: true,\n\n });\n\n}\n", "file_path": "gummyroll-crud-ui/pages/api/owner/[ownerPubkey]/assets.ts", "rank": 92, "score": 33920.779835148096 }, { "content": "export function bfs<T>(tree: Tree, iterFunc: (node: TreeNode, nodeIdx: number) => T): T[] {\n\n let toExplore = [getRoot(tree)];\n\n const results = []\n\n let idx = 0;\n\n while (toExplore.length) {\n\n const nextLevel = [];\n\n for (let i = 0; i < toExplore.length; i++) {\n\n const node = toExplore[i];\n\n if (node.left) {\n\n nextLevel.push(node.left);\n\n }\n\n if (node.right) {\n\n nextLevel.push(node.right);\n\n }\n\n results.push(iterFunc(node, idx));\n\n idx++;\n\n }\n\n toExplore = nextLevel;\n\n }\n\n return results;\n", "file_path": "tests/merkle-tree.ts", "rank": 93, "score": 31785.629628921997 }, { "content": "export function loadMessages(inputFile: string): OwnedMessage[] {\n\n const messages = parse(fs.readFileSync(inputFile).toString(), {\n\n columns: true,\n\n skipEmptyLines: true,\n\n });\n\n return messages;\n", "file_path": "cli/helpers/utils.ts", "rank": 94, "score": 31340.967115246825 }, { "content": "const PROGRAM_ID = \"GRoLLMza82AiYN7W9S9KCCtCyyPRAQP2ifBy4v4D5RMD\";\n", "file_path": "tests/txLength.ts", "rank": 95, "score": 31317.540562746155 }, { "content": "const HOST = \"<undefined>\";\n", "file_path": "tests/continuous_gummyroll-test.ts", "rank": 96, "score": 31305.836856459642 }, { "content": "function chunk<T>(arr: T[], size: number): T[][] {\n\n return Array.from({ length: Math.ceil(arr.length / size) }, (_: any, i: number) =>\n\n arr.slice(i * size, i * size + size)\n\n );\n", "file_path": "tests/continuous_gummyroll-test.ts", "rank": 97, "score": 31305.836856459642 }, { "content": "let Gummyroll;\n", "file_path": "tests/continuous_gummyroll-test.ts", "rank": 98, "score": 31305.836856459642 }, { "content": "export function getRoot(tree: Tree): TreeNode {\n\n let node = tree.leaves[0];\n\n while (node.parent) {\n\n node = node.parent;\n\n }\n\n return node;\n", "file_path": "tests/merkle-tree.ts", "rank": 99, "score": 30906.52068059935 } ]
Rust
tapdance-rust-logic/src/util.rs
refraction-networking/tapdance
39262efa194b520f3187ad98cab8efb953eccf39
extern crate libc; use std::fs::File; use std::io::prelude::*; use std::io::BufReader; use mio::Ready; use mio::unix::UnixReady; use pnet::packet::Packet; use pnet::packet::tcp::{TcpOptionNumbers, TcpPacket}; pub fn all_unix_events() -> UnixReady { UnixReady::from(Ready::readable() | Ready::writable()) | UnixReady::hup() | UnixReady::error() } pub fn all_but_writable() -> Ready { Ready::from(UnixReady::from(Ready::readable()) | UnixReady::hup() | UnixReady::error()) } pub fn all_but_readable() -> UnixReady { UnixReady::from(Ready::writable()) | UnixReady::hup() | UnixReady::error() } pub fn hup_and_error() -> Ready { Ready::from(UnixReady::hup() | UnixReady::error()) } #[inline] pub fn inet_htoa(ip: u32) -> String { format!("{}.{}.{}.{}", (ip >> 24) & 0xff, (ip >> 16) & 0xff, (ip >> 8) & 0xff, (ip) & 0xff) } #[inline] pub fn deser_be_u32_slice(arr: &[u8]) -> u32 { if arr.len() != 4 { error!("deser_be_u32_slice given bad slice. length: {}", arr.len()); return 0; } (arr[0] as u32) << 24 | (arr[1] as u32) << 16 | (arr[2] as u32) << 8 | (arr[3] as u32) } #[inline] pub fn deser_be_u32(arr: &[u8; 4]) -> u32 { (arr[0] as u32) << 24 | (arr[1] as u32) << 16 | (arr[2] as u32) << 8 | (arr[3] as u32) } pub fn get_tcp_timestamps(tcp_pkt: &TcpPacket) -> (u32, u32) { match tcp_pkt.get_options_iter() .find(|x| x.get_number() == TcpOptionNumbers::TIMESTAMPS) { Some(p) => (deser_be_u32_slice(&p.payload()[0..4]), deser_be_u32_slice(&p.payload()[4..8])), None => (0, 0), } } pub fn tcp_seq_is_wrapped(s1: u32, s2: u32) -> bool { ((s1 as i64) - (s2 as i64)).abs() > 2147483648 } pub fn tcp_seq_lte(a: u32, b: u32) -> bool { if a == b { true } else { let res = a < b; if tcp_seq_is_wrapped(a, b) { !res } else { res } } } pub fn tcp_seq_lt(a: u32, b: u32) -> bool { if a == b { false } else { let res = a < b; if tcp_seq_is_wrapped(a, b) { !res } else { res } } } pub fn mem_used_kb() -> u64 { let my_pid: i32 = unsafe { libc::getpid() }; let f = match File::open(format!("/proc/{}/status", my_pid)) { Ok(f) => f, Err(e) => { error!("Failed to open /proc/{}/status: {:?}", my_pid, e); return 0; } }; let buf_f = BufReader::new(f); for l in buf_f.lines() { if let Ok(line) = l { if line.contains("VmRSS") { let (_, vmrss_gone) = line.split_at(6); let starts_at_number = vmrss_gone.trim_left(); if let Some(kb_ind) = starts_at_number.find("kB") { let (kb_gone, _) = starts_at_number.split_at(kb_ind); let just_number = kb_gone.trim_right(); if let Ok(as_u64) = just_number.parse::<u64>() { return as_u64; } } } } else { error!("Error reading /proc/{}/status", my_pid); return 0; } } error!("Failed to parse a VmRSS value out of /proc/{}/status!", my_pid); return 0; } #[cfg(test)] mod tests { use util; #[test] fn mem_used_kb_parses_something() { assert!(util::mem_used_kb() > 0); } }
extern crate libc; use std::fs::File; use std::io::prelude::*; use std::io::BufReader; use mio::Ready; use mio::unix::UnixReady; use pnet::packet::Packet; use pnet::packet::tcp::{TcpOptionNumbers, TcpPacket}; pub fn all_unix_events() -> UnixReady { UnixReady::from(Ready::readable() | Ready::writable()) | UnixReady::hup() | UnixReady::error() } pub fn all_but_writable() -> Ready { Ready::from(UnixReady::from(Ready::readable()) | UnixReady::hup() | UnixReady::error()) } pub fn all_but_readable() -> UnixReady { UnixReady::from(Ready::writable()) | UnixReady::hup() | UnixReady::error() } pub fn hup_and_error() -> Ready { Ready::from(UnixReady::hup() | UnixReady::error()) } #[inline] pub fn inet_htoa(ip: u32) -> String { format!("{}.{}.{}.{}", (ip >> 24) & 0xff, (ip >> 16) & 0xff, (ip >> 8) & 0xff, (ip) & 0xff) } #[inline] pub fn deser_be_u32_slice(arr: &[u8]) -> u32 { if arr.len() != 4 { error!("deser_be_u32_slice given bad slice. length: {}", arr.len()); return 0; } (arr[0] as u32) << 24 | (arr[1] as u32) << 16 | (arr[2] as u32) << 8 | (arr[3] as u32) } #[inline] pub fn deser_be_u32(arr: &[u8; 4]) -> u32 { (arr[0] as u32) << 24 | (arr[1] as u32) << 16 | (arr[2] as u32) << 8 | (arr[3] as u32) } pub fn get_tcp_timestamps(tcp_pkt: &TcpPacket) -> (u32, u32) { match tcp_pkt.get_options_iter() .find(|x| x.get_number() == TcpOptionNumbers::TIMESTAMPS) { Some(p) => (deser_be_u32_slice(&p.payload()[0..4]), deser_be_u32_slice(&p.payload()[4..8])), None => (0, 0), } } pub fn tcp_seq_is_wrapped(s1: u32, s2: u32) -> bool { ((s1 as i64) - (s2 as i64)).abs() > 2147483648 } pub fn tcp_seq_lte(a: u32, b: u32) -> bool { if a == b { true } else { let res = a < b; if tcp_seq_is_wrapped(a, b) { !res } else { res } } }
pub fn mem_used_kb() -> u64 { let my_pid: i32 = unsafe { libc::getpid() }; let f = match File::open(format!("/proc/{}/status", my_pid)) { Ok(f) => f, Err(e) => { error!("Failed to open /proc/{}/status: {:?}", my_pid, e); return 0; } }; let buf_f = BufReader::new(f); for l in buf_f.lines() { if let Ok(line) = l { if line.contains("VmRSS") { let (_, vmrss_gone) = line.split_at(6); let starts_at_number = vmrss_gone.trim_left(); if let Some(kb_ind) = starts_at_number.find("kB") { let (kb_gone, _) = starts_at_number.split_at(kb_ind); let just_number = kb_gone.trim_right(); if let Ok(as_u64) = just_number.parse::<u64>() { return as_u64; } } } } else { error!("Error reading /proc/{}/status", my_pid); return 0; } } error!("Failed to parse a VmRSS value out of /proc/{}/status!", my_pid); return 0; } #[cfg(test)] mod tests { use util; #[test] fn mem_used_kb_parses_something() { assert!(util::mem_used_kb() > 0); } }
pub fn tcp_seq_lt(a: u32, b: u32) -> bool { if a == b { false } else { let res = a < b; if tcp_seq_is_wrapped(a, b) { !res } else { res } } }
function_block-full_function
[ { "content": "// Pass in 1st, 2nd bytes of the TL (big-endian). Returns is_proto, len.\n\n// The len returned might be 0, meaning extended TL.\n\npub fn parse_typelen(byte1: u8, byte2: u8) -> (bool, usize)\n\n{\n\n let tl = unsafe { mem::transmute::<[u8;2], i16>([byte2, byte1]) };\n\n if tl >= 0 {\n\n (true, tl.clone() as usize)\n\n } else {\n\n let len: usize = { if tl == -32768i16 { 32768 as usize }\n\n else { (-tl.clone()) as usize }};\n\n (false, len)\n\n }\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/protocol_outer_framing.rs", "rank": 6, "score": 331949.80608282407 }, { "content": "pub fn c_make_forged_tls(local_ip: u32, local_port: u16,\n\n remote_ip: u32, remote_port: u16,\n\n tcp_seq: u32, tcp_ack: u32,\n\n cli_tcp_win: u16, cli_advertised_wscale: u8,\n\n tcp_mss: u16, tcp_ts: u32, tcp_ts_ecr: u32,\n\n master_secret: &[u8], cipher_suite: u16,\n\n client_random: &[u8], server_random: &[u8],\n\n app_data: &[u8], forged_fd_out: *mut i32)\n\n-> *mut c_void\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 7, "score": 306379.0032093463 }, { "content": "pub fn c_get_cpu_time() -> (i64, i64, i64, i64)\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 8, "score": 295128.16945964965 }, { "content": "pub fn c_tcp_send_rst_pkt(saddr: u32, daddr: u32,\n\n sport: u16, dport: u16, seq: u32)\n\n{panic!(\"c_tcp_send_rst_pkt({}) called\", seq);}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 11, "score": 268571.4656773394 }, { "content": "//out: &mut [u8; STEGO_DATA_LEN]) -> i32\n\npub fn extract_telex_tag(secret_key: &[u8], tls_record: &[u8]) -> Vec<u8>\n\n{\n\n if tls_record.len() < 272 // (conservatively) smaller than minimum request\n\n {\n\n return vec![];\n\n }\n\n // This fn indexes a lot of slices with computed offsets; panics possible!\n\n if let Ok(out_vec) = panic::catch_unwind(||\n\n {\n\n // TLS record: 1 byte of 'content type', 2 of 'version', 2 of 'length',\n\n // and then [length] bytes of 'payload'\n\n //======================================================================\n\n //let content_type = tls_record[0];\n\n //let tls_version = u8u8_to_u16(tls_record[1], tls_record[2]);\n\n\n\n let tls_payload = &tls_record[5..tls_record.len()];\n\n //======================================================================\n\n // Starting from 252 byte from the end of the TLS payload extract\n\n // stego'd data from each block of 4 bytes (if the payload length isn't\n\n // a multiple of 4, just ignore the tail). Continue until we have run\n", "file_path": "tapdance-rust-logic/src/elligator.rs", "rank": 12, "score": 267487.32017620973 }, { "content": "pub fn c_write_reporter(msg: String)\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 13, "score": 264965.0457877526 }, { "content": "pub fn c_open_reporter(fname: String)\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 14, "score": 264965.0457877526 }, { "content": "pub fn c_add_decoy_failure(failed: &String)\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 15, "score": 262923.19427185075 }, { "content": "pub fn c_get_payload_from_tag(station_privkey: &[u8],\n\n stego_payload: &mut [u8],\n\n out: &mut [u8], out_len: size_t) -> size_t\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n\n\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 16, "score": 260938.05865611293 }, { "content": "pub fn make_session_close_msg() -> Vec<u8>\n\n{\n\n let body = make_simple_proto(S2C_Transition::S2C_SESSION_CLOSE);\n\n let mut frame = empty_proto_frame_with_len(body.compute_size() as usize);\n\n body.write_to_vec(&mut frame)\n\n .unwrap_or_else(|e|{error!(\"writing close proto body failed: {}\", e);});\n\n frame\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/protocol_sta2cli.rs", "rank": 17, "score": 258975.48375551007 }, { "content": "pub fn make_confirm_reconnect_msg() -> Vec<u8>\n\n{\n\n let body = make_simple_proto(S2C_Transition::S2C_CONFIRM_RECONNECT);\n\n let mut frame = empty_proto_frame_with_len(body.compute_size() as usize);\n\n body.write_to_vec(&mut frame)\n\n .unwrap_or_else(|e|{error!(\"writing recon proto body failed: {}\", e);});\n\n frame\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/protocol_sta2cli.rs", "rank": 18, "score": 258975.48375551007 }, { "content": "pub fn make_session_init_msg() -> Vec<u8>\n\n{\n\n let body = make_simple_proto(S2C_Transition::S2C_SESSION_INIT);\n\n let mut frame = empty_proto_frame_with_len(body.compute_size() as usize);\n\n body.write_to_vec(&mut frame)\n\n .unwrap_or_else(|e|{error!(\"writing init proto body failed: {}\", e);});\n\n frame\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/protocol_sta2cli.rs", "rank": 19, "score": 258975.48375551007 }, { "content": "fn fill_tcpkt(buf: &mut [u8], seq_start: u32, payload: &[u8])\n\n{\n\n let mut pkt_mut1 = MutableTcpPacket::new(buf).unwrap();\n\n pkt_mut1.set_sequence(seq_start);\n\n pkt_mut1.set_data_offset(5);\n\n pkt_mut1.set_payload(payload);\n\n}\n\n\n\n//TODO FIN tests\n\n\n", "file_path": "tapdance-rust-logic/src/evented_ssl_eavesdropper.rs", "rank": 22, "score": 253303.67575221814 }, { "content": "fn make_simple_proto(ver: u32, gen: u32, pad_len: usize) -> Vec<u8>\n\n{\n\n let mut msg = ClientToStation::new();\n\n msg.set_protocol_version(ver);\n\n msg.set_decoy_list_generation(gen);\n\n msg.set_padding({\n\n let mut padding: Vec<u8> = Vec::with_capacity(pad_len);\n\n for i in 0..pad_len {\n\n padding.push((i % 256) as u8);\n\n }\n\n padding\n\n });\n\n let mut ret_vec: Vec<u8> = Vec::new();\n\n msg.write_to_vec(&mut ret_vec);\n\n ret_vec\n\n}\n\n\n\n// This test assumes it's running on a little-endian machine. (Which is good,\n\n// because the actual code does, too).\n", "file_path": "tapdance-rust-logic/src/protocol_outer_framing.rs", "rank": 25, "score": 246203.67846536302 }, { "content": "pub fn make_err_msg(why: SessionError) -> Vec<u8>\n\n{\n\n let mut body = make_simple_proto(S2C_Transition::S2C_ERROR);\n\n body.set_err_reason(why.to_s2c_proto_enum());\n\n let mut frame = empty_proto_frame_with_len(body.compute_size() as usize);\n\n body.write_to_vec(&mut frame)\n\n .unwrap_or_else(|e|{error!(\"writing 'err' proto body failed: {}\", e);});\n\n frame\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", "file_path": "tapdance-rust-logic/src/protocol_sta2cli.rs", "rank": 26, "score": 243290.82196882655 }, { "content": "// Allocates space for a framed protobuf message, with the protobuf itself being\n\n// the given size. Writes the type+len value into the first 2 (or 6) bytes.\n\n// (Returned Vec has capacity for the whole message, but len() only 2 (or 6)).\n\npub fn empty_proto_frame_with_len(len: usize) -> Vec<u8>\n\n{\n\n if len <= 32767 {\n\n let mut frame: Vec<u8> = Vec::with_capacity(2 + len);\n\n let net_order_len: i16 = (len as i16).to_be();\n\n let len_bytes = unsafe { mem::transmute::<i16, [u8;2]>(net_order_len) };\n\n frame.push(len_bytes[0]);\n\n frame.push(len_bytes[1]);\n\n frame\n\n }\n\n else { // we need \"16-bit type+len = 0, real len is next 4 bytes\"\n\n let mut frame: Vec<u8> = Vec::with_capacity(6 + len);\n\n frame.push(0);\n\n frame.push(0);\n\n let net_order_len: u32 = (len as u32).to_be();\n\n let len_bytes = unsafe { mem::transmute::<u32, [u8;4]>(net_order_len) };\n\n frame.push(len_bytes[0]);\n\n frame.push(len_bytes[1]);\n\n frame.push(len_bytes[2]);\n\n frame.push(len_bytes[3]);\n\n frame\n\n }\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/protocol_sta2cli.rs", "rank": 27, "score": 241413.163022591 }, { "content": "pub fn c_SSL_write(ssl: *mut c_void, input: &[u8])\n\n-> Result<usize, i32>\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 28, "score": 240452.3649459658 }, { "content": "pub fn c_make_forged_memory_tls(master_secret: &[u8], cipher_suite: u16,\n\n client_random: &[u8], server_random: &[u8], app_data: &[u8],\n\n from_cli_membio: *mut c_void, unused_to_cli_membio: *mut c_void)\n\n-> *mut c_void\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n\n\n\n\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 29, "score": 239551.35779260568 }, { "content": "pub fn get_global_failure_map_rawptr() -> *mut HashMap<String, usize>\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 30, "score": 239535.21178647163 }, { "content": "pub fn c_BIO_write(bio: *mut c_void, data: &[u8]) -> i32\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 31, "score": 234650.94007605646 }, { "content": "pub fn c_SSL_read(ssl: *mut c_void, output: &mut [u8])\n\n-> Result<usize, i32>\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 32, "score": 234650.94007605646 }, { "content": "// These next two functions are the writing half of the station's\n\n// implementation of the outer framing protocol. They should never, ever need to\n\n// change! If they do, all clients will have to change as well.\n\n//\n\n// Writes the type+len i16 into the first two bytes of header_target. (The len\n\n// info for the type+len i16 is taken from the data_len argument).\n\npub fn write_data_tl_hdr(header_target: &mut [u8], data_len: usize)\n\n{\n\n let net_order_neg_len: i16 =\n\n if data_len < 32768 {\n\n (-(data_len as i16)).to_be()\n\n }\n\n else if data_len == 32768 { // special case: +32768 would overflow i16\n\n (-32768i16).to_be()\n\n }\n\n else {\n\n panic!(\"Attempted to send a data chunk of size {}. Max is 32768.\",\n\n data_len);\n\n };\n\n let len_bytes = unsafe { mem::transmute::<i16, [u8;2]>(net_order_neg_len) };\n\n header_target[0] = len_bytes[0];\n\n header_target[1] = len_bytes[1];\n\n}\n", "file_path": "tapdance-rust-logic/src/protocol_sta2cli.rs", "rank": 33, "score": 231188.2880022875 }, { "content": "pub fn make_simple_proto(msg_type: S2C_Transition) -> StationToClient\n\n{\n\n let mut msg = StationToClient::new();\n\n msg.set_protocol_version(TAPDANCE_PROTOCOL_VERSION);\n\n msg.set_state_transition(msg_type);\n\n msg.set_padding({\n\n let pad_len = thread_rng().gen_range(1, 200);\n\n let mut padding: Vec<u8> = Vec::with_capacity(pad_len);\n\n for i in 0..pad_len {\n\n padding.push((i % 256) as u8);\n\n }\n\n padding\n\n });\n\n msg\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/protocol_sta2cli.rs", "rank": 34, "score": 226254.35705341518 }, { "content": "pub fn c_new_membio()\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 35, "score": 196342.39167423616 }, { "content": "// local_ip, local_port, remote_ip, remote_port should all be net-order.\n\n// The rest are host-order.\n\npub fn c_make_forged_tls(\n\n local_ip: u32, local_port: u16, remote_ip: u32, remote_port: u16,\n\n tcp_seq: u32, tcp_ack: u32, cli_tcp_win: u16,\n\n cli_advertised_wscale: u8, tcp_mss: u16, tcp_ts: u32, tcp_ts_ecr: u32,\n\n master_secret: &[u8], cipher_suite: u16,\n\n client_random: &[u8], server_random: &[u8],\n\n app_data: &[u8], forged_fd_out: *mut i32)\n\n-> *mut c_void\n\n{\n\n\tunsafe {\n\n\t\tmake_forged_tls(\n\n local_ip, local_port, remote_ip, remote_port, tcp_seq, tcp_ack,\n\n cli_tcp_win, cli_advertised_wscale, tcp_mss, tcp_ts, tcp_ts_ecr,\n\n master_secret.as_ptr(), master_secret.len(), cipher_suite,\n\n client_random.as_ptr(), server_random.as_ptr(),\n\n app_data.as_ptr(), app_data.len(), forged_fd_out)\n\n\t}\n\n}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 36, "score": 194931.65186043692 }, { "content": "pub fn c_ugh_ssl_err()\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 37, "score": 194924.90124225258 }, { "content": "pub fn c_ERR_clear_error()\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 38, "score": 194924.90124225258 }, { "content": "fn get_rounded_time() -> i64\n\n{\n\n let timespec = get_time();\n\n if timespec.nsec >= 500000000 { timespec.sec + 1 }\n\n else { timespec.sec }\n\n}\n\n\n\n#[repr(C)]\n\npub struct RustGlobalsStruct\n\n{\n\n global: *mut PerCoreGlobal,\n\n cli_conf: *mut ClientConf,\n\n fail_map: *mut HashMap<String, usize>,\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn rust_init(lcore_id: i32, ckey: *const u8)\n\n-> RustGlobalsStruct\n\n{\n\n let key = *array_ref!(unsafe{std::slice::from_raw_parts(ckey, 32 as usize)},\n", "file_path": "tapdance-rust-logic/src/rust_tapdance.rs", "rank": 39, "score": 193561.9918531631 }, { "content": "pub fn c_reset_global_cli_download_count()\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 40, "score": 192174.72887133848 }, { "content": "fn is_tls_app_pkt(tcp_pkt: &TcpPacket) -> bool\n\n{\n\n let payload = tcp_pkt.payload();\n\n payload.len() > 5 && payload[0] == TLS_TYPE_APPLICATION_DATA\n\n}\n\n\n\nimpl PerCoreGlobal\n\n{\n\n // frame_len is supposed to be the length of the whole Ethernet frame. We're\n\n // only passing it here for plumbing reasons, and just for stat reporting.\n\n fn process_ipv4_packet(&mut self, ip_pkt: Ipv4Packet, frame_len: usize)\n\n {\n\n // Ignore packets that aren't TCP\n\n if ip_pkt.get_next_level_protocol() != IpNextHeaderProtocols::Tcp {\n\n return;\n\n }\n\n let tcp_pkt = match TcpPacket::new(ip_pkt.payload()) {\n\n Some(pkt) => pkt,\n\n None => return,\n\n };\n", "file_path": "tapdance-rust-logic/src/process_packet.rs", "rank": 41, "score": 191445.6151670508 }, { "content": "// Extracts 3 stego'd bytes in_bufto 'out_buf', from the 4 bytes of AES\n\n// ciphertext at 'in_buf'.\n\nfn extract_stego_bytes(in_buf: &[u8], out_buf: &mut [u8])\n\n{\n\n assert!(in_buf.len() == 4);\n\n assert!(out_buf.len() == 3);\n\n\n\n let x = ((in_buf[0] & 0x3f) as u32) * (64*64*64) +\n\n ((in_buf[1] & 0x3f) as u32) * (64*64) +\n\n ((in_buf[2] & 0x3f) as u32) * (64) +\n\n ((in_buf[3] & 0x3f) as u32);\n\n\n\n out_buf[0] = ((x >> 16) & 0xff) as u8;\n\n out_buf[1] = ((x >> 8 ) & 0xff) as u8;\n\n out_buf[2] = ((x ) & 0xff) as u8;\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/elligator.rs", "rank": 42, "score": 188574.5142290237 }, { "content": "// Extracts up to out_buf_len stego'd bytes in_bufto 'out_buf', from the 4 bytes\n\n// of AES ciphertext at 'in_buf'.\n\n// Returns number of bytes written in_bufto 'out_buf'.\n\nfn extract_stego_bytes_chopped(in_buf: &[u8], out_buf: &mut [u8])\n\n{\n\n assert!(in_buf.len() == 4);\n\n\n\n let x = ((in_buf[0] & 0x3f) as u32) * (64*64*64) + \n\n ((in_buf[1] & 0x3f) as u32) * (64*64) + \n\n ((in_buf[2] & 0x3f) as u32) * (64) + \n\n ((in_buf[3] & 0x3f) as u32);\n\n let x_bytes = unsafe { mem::transmute::<u32, [u8; 4]>(x) };\n\n\n\n out_buf[0] = x_bytes[1];\n\n if out_buf.len() == 2\n\n {\n\n out_buf[1] = x_bytes[2];\n\n }\n\n else if out_buf.len() > 2\n\n {\n\n out_buf[1] = x_bytes[2];\n\n out_buf[2] = x_bytes[3];\n\n }\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/elligator.rs", "rank": 43, "score": 187468.78803316038 }, { "content": "fn rust_get_payload_from_tag(station_privkey: &[u8], stego_payload: &mut [u8],\n\n out: &mut [u8]) -> size_t\n\n{\n\n // First 32 bytes of stego_payload may be elligator-encoded point\n\n stego_payload[31] &= !(0xc0);\n\n client_pubkey = decode(&stego_payload[0..32]\n\n\n\n\n\n\n\n}\n\n*/\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n/* Uses a function from an external library; run separately from other tests.\n\n#[cfg(test)]\n\nmod tests {\n\nuse elligator;\n", "file_path": "tapdance-rust-logic/src/elligator.rs", "rank": 44, "score": 185294.70172003692 }, { "content": "fn test_default_syn() -> Vec<u8>\n\n{\n\n vec!(0xe3, 0x2c, // src port\n\n 0x01, 0xbb, // dst port 443\n\n 0x43, 0xb0, 0x9f, 0x78, // seq# (1135648632)\n\n 0, 0, 0, 0, // ACK 0\n\n 160, // 50 byte header = offset 10, 10 << 4 = 160\n\n 2, // SYN flag\n\n 0xaa, 0xaa, // window\n\n 0x5a, 0x0e, // checksum\n\n 0, 0, // urgent pointer\n\n // 20 bytes of options, from the SYN of `iperf -c localhost -p 443`:\n\n // [mss 65495,sackOK,TS val 885507 ecr 0,nop,wscale 7]\n\n 0x02, 0x04, 0xff, 0xd7, // mss 65495\n\n 0x04, 0x02, 0x08, 0x0a, 0x00, 0x0d, 0x83, 0x03,\n\n 0x00, 0x00, 0x00, 0x00, 0x01, 0x03, 0x03, 0x07)\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/flow_tracker.rs", "rank": 45, "score": 182270.23490250472 }, { "content": "pub fn c_get_global_cli_download_count() -> u64\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n\n//HACKY_CFG_YES_TEST_END*/\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 46, "score": 180922.23764399023 }, { "content": "#[test]\n\nfn format_failed_decoys()\n\n{\n\n let session_id = SessionId::new(&[255,255,255,255,255,255,255,255,\n\n 255,255,255,255,255,255,255,255]);\n\n let mut msg1 = ClientToStation::new();\n\n msg1.set_protocol_version(123);\n\n assert_eq!(0, msg1.get_failed_decoys().len());\n\n\n\n let mut msg2 = ClientToStation::new();\n\n msg2.set_protocol_version(123);\n\n *msg2.mut_failed_decoys().push_default() = \"1.2.3.4\".to_string();\n\n let would_report2 = format!(\"decoysfailed {} {}\", session_id,\n\n msg2.get_failed_decoys().join(\" \"));\n\n assert_eq!(\"decoysfailed ffffffffffffffffffffffffffffffff 1.2.3.4\",\n\n would_report2);\n\n\n\n let mut msg3 = ClientToStation::new();\n\n msg3.set_protocol_version(123);\n\n *msg3.mut_failed_decoys().push_default() = \"5.6.7.8\".to_string();\n\n *msg3.mut_failed_decoys().push_default() = \"9.10.11.12\".to_string();\n", "file_path": "tapdance-rust-logic/src/tapdance_session.rs", "rank": 47, "score": 174020.7041907439 }, { "content": "// Assumes you will only call it after checking\n\n// if tag_payload.len() >= TAG_FLAGS_LEN + TAG_M_KEY_LEN + TAG_SRV_RND_LEN +\n\n// TAG_CLI_RND_LEN + TAG_CON_ID_LEN\n\nfn parse_tag_payload(tag_payload: &Vec<u8>)\n\n-> (u8, &[u8], &[u8], &[u8], SessionId)\n\n{\n\n let mut offset = 0;\n\n\n\n let flags = tag_payload[offset];\n\n offset += TAG_FLAGS_LEN;\n\n\n\n let master_key = &tag_payload[offset..offset+TAG_M_KEY_LEN];\n\n offset += TAG_M_KEY_LEN;\n\n\n\n let server_random = &tag_payload[offset..offset+TAG_SRV_RND_LEN];\n\n offset += TAG_SRV_RND_LEN;\n\n\n\n let client_random = &tag_payload[offset..offset+TAG_CLI_RND_LEN];\n\n offset += TAG_CLI_RND_LEN;\n\n\n\n let session_id_slice = &tag_payload[offset..offset+TAG_CON_ID_LEN];\n\n // (do `offset += TAG_CON_ID_LEN` here if you need to read further)\n\n let session_id = SessionId::new(\n\n array_ref![session_id_slice,0,TAG_CON_ID_LEN]);\n\n\n\n (flags, master_key, server_random, client_random, session_id)\n\n}\n", "file_path": "tapdance-rust-logic/src/process_packet.rs", "rank": 48, "score": 172647.60358407546 }, { "content": "pub fn c_ugh_ssl_err() { unsafe { ugh_ssl_err(); }}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 49, "score": 172634.7039595702 }, { "content": "pub fn c_ERR_clear_error() { unsafe { ERR_clear_error(); }}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 50, "score": 172634.7039595702 }, { "content": "pub fn c_get_global_cli_conf() -> *const ClientConf\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 51, "score": 172634.7039595702 }, { "content": "pub fn c_add_to_global_cli_download_count(input: u64)\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 52, "score": 172634.7039595702 }, { "content": "#[test]\n\nfn begin_tracking_uses_whole_4tuple()\n\n{\n\n let mut ft = FlowTracker::new();\n\n assert_eq!(0, ft.tracked_flows.len());\n\n ft.begin_tracking_flow(&flow1, test_default_syn());\n\n assert_eq!(1, ft.tracked_flows.len());\n\n ft.begin_tracking_flow(&flow1_diff_srcip, test_default_syn());\n\n assert_eq!(2, ft.tracked_flows.len());\n\n ft.begin_tracking_flow(&flow1_diff_dstip, test_default_syn());\n\n assert_eq!(3, ft.tracked_flows.len());\n\n ft.begin_tracking_flow(&flow1_diff_sport, test_default_syn());\n\n assert_eq!(4, ft.tracked_flows.len());\n\n ft.begin_tracking_flow(&flow1_diff_dport, test_default_syn());\n\n assert_eq!(5, ft.tracked_flows.len());\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/flow_tracker.rs", "rank": 54, "score": 171261.50994817296 }, { "content": "#[test]\n\nfn flow_equality_uses_whole_4tuple()\n\n{\n\n assert_eq!(flow1, flow1_clone);\n\n assert!(flow1 != flow1_diff_srcip);\n\n assert!(flow1 != flow1_diff_dstip);\n\n assert!(flow1 != flow1_diff_sport);\n\n assert!(flow1 != flow1_diff_dport);\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/flow_tracker.rs", "rank": 55, "score": 171261.50994817296 }, { "content": "// Returns Ok(true) if the shutdown is complete, Ok(false) if in progress,\n\n// Err(SSLerr i32) for various reasons (including WOULDBLOCKs, i.e. in progress)\n\npub fn c_SSL_shutdown(ssl: *mut c_void)\n\n-> Result<bool, i32>\n\n{\n\n let res = unsafe {\n\n ERR_clear_error();\n\n SSL_shutdown(ssl)\n\n };\n\n if res == 1 { Ok(true) }\n\n else if res == 0 { Ok(false) }\n\n else { Err(unsafe{SSL_get_error(ssl, res)}) }\n\n}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 56, "score": 170109.49811517063 }, { "content": "pub fn c_BIO_free_all(bio: *mut c_void)\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 57, "score": 170099.24450808007 }, { "content": "pub fn c_SSL_free(ssl: *mut c_void)\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 58, "score": 170099.24450808007 }, { "content": "fn proto_parse(raw_bytes: &[u8]) -> Option<ClientToStation>\n\n{\n\n match protobuf::parse_from_bytes::<ClientToStation>(raw_bytes)\n\n {\n\n Ok(p) => Some(p),\n\n Err(what) => { error!(\"Parsing what was supposedly a full protobuf \\\n\n blob failed with: {:?}. Session is bad.\", what);\n\n None\n\n }\n\n }\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", "file_path": "tapdance-rust-logic/src/protocol_outer_framing.rs", "rank": 59, "score": 164927.66996336402 }, { "content": "pub fn init(log_level: LogLevel, core_id: i32)\n\n{\n\n log::set_logger(|max_log_level| {\n\n max_log_level.set(log_level.to_log_level_filter());\n\n Box::new(SimpleLogger{log_level: log_level, lcore_id: core_id})\n\n }).unwrap_or_else(|e|{error!(\"failed to init logging: {}\", e);});\n\n}\n\n\n\n//HACKY_CFG_NO_TEST_BEGIN\n\n#[macro_export]\n\nmacro_rules! report {\n\n ($($arg:tt)*) => {{\n\n let s = format!(\"{}\\n\", format_args!($($arg)*));\n\n debug!(\"{}\", s);\n\n $crate::c_api::c_write_reporter(s);\n\n }};\n\n}\n\n//HACKY_CFG_NO_TEST_END*/\n\n/*//HACKY_CFG_YES_TEST_BEGIN\n\n#[macro_export]\n\nmacro_rules! report {\n\n ($($arg:tt)*) => {{\n\n let s = format!(\"{}\\n\", format_args!($($arg)*));\n\n debug!(\"{}\", s);\n\n }};\n\n}\n\n//HACKY_CFG_YES_TEST_END*/\n", "file_path": "tapdance-rust-logic/src/logging.rs", "rank": 60, "score": 162179.22005839378 }, { "content": "pub fn c_new_membio() -> *mut c_void { unsafe { BIO_new(BIO_s_mem()) }}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 61, "score": 157745.83157088864 }, { "content": "pub fn c_SSL_free(ssl: *mut c_void) { unsafe { SSL_free(ssl) }}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 62, "score": 155161.49978984558 }, { "content": "pub fn c_BIO_free_all(bio: *mut c_void) { unsafe { BIO_free_all(bio); } }\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 63, "score": 155161.49978984558 }, { "content": "// Returns # of bytes read into buf. Error, WouldBlock, Shutdown all return 0.\n\nfn read_from_stream(reader: &mut StreamReceiver, buf: &mut [u8]) -> usize\n\n{\n\n if let ReadStat::GotData(n) = reader.read(buf) { n }\n\n else {0}\n\n}\n\n\n", "file_path": "tapdance-rust-logic/src/protocol_outer_framing.rs", "rank": 64, "score": 152734.23087664598 }, { "content": "fn SSL_write(ssl: *mut c_void, input: *const u8, in_len: i32) -> i32\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 65, "score": 149985.74639098597 }, { "content": "fn SSL_read(ssl: *mut c_void, output: *mut u8, out_len: i32) -> i32\n\n{panic!(\"YOU ARE TESTING AND THIS FUNCTION IS NOT MOCKED YET!\");}\n", "file_path": "tapdance-rust-logic/src/c_api.rs", "rank": 66, "score": 149985.74639098597 }, { "content": "static /* inline */ int packet_match_ip(fast_bpf_node_t *n, fast_bpf_pkt_info_t *h) {\n\n fast_bpf_pkt_info_tuple_t *t = &h->tuple;\n\n\n\n if (n->qualifiers.header == Q_INNER) {\n\n if (ignore_inner_header) return 1;\n\n t = &h->tunneled_tuple;\n\n }\n\n\n\n switch(n->qualifiers.direction) {\n\n case Q_SRC:\n\n if((t->ip_src.v4 & n->mask) == n->ip) return 1;\n\n break;\n\n case Q_DST:\n\n if((t->ip_dst.v4 & n->mask) == n->ip) return 1;\n\n break;\n\n case Q_DEFAULT:\n\n case Q_OR:\n\n if((t->ip_src.v4 & n->mask) == n->ip ||\n\n (t->ip_dst.v4 & n->mask) == n->ip) return 1;\n\n break;\n\n case Q_AND:\n\n if((t->ip_src.v4 & n->mask) == n->ip &&\n\n (t->ip_dst.v4 & n->mask) == n->ip) return 1;\n\n break;\n\n default:\n\n DEBUG_PRINTF(\"Unexpected direction qualifier (%d)\\n\", __LINE__);\n\n }\n\n //DEBUG_PRINTF(\"%s returning false\\n\", __FUNCTION__);\n\n return 0;\n", "file_path": "pfring-framework/userland/fast_bpf/tree_match.c", "rank": 67, "score": 146244.01123661487 }, { "content": "static u_int8_t use_ipv6_l32_match = 0;\n", "file_path": "pfring-framework/userland/fast_bpf/tree_match.c", "rank": 68, "score": 145154.22130364296 }, { "content": "#define OFPC_ARP_MATCH_IP (1 << 7)\n", "file_path": "pfring-framework/userland/tcpdump-4.7.4/print-openflow-1.0.c", "rank": 69, "score": 132500.22045157812 }, { "content": "#define CFM_CCM_MD_FORMAT_NONE 1\n", "file_path": "pfring-framework/userland/tcpdump-4.7.4/print-cfm.c", "rank": 70, "score": 131263.5668913854 }, { "content": "\tconst struct ip *ip;\n", "file_path": "pfring-framework/userland/tcpdump-4.7.4/print-ip.c", "rank": 71, "score": 130692.15575566323 }, { "content": "\tconst char *s1, *s2;\n", "file_path": "pfring-framework/userland/tcpdump-4.7.4/strcasecmp.c", "rank": 72, "score": 118246.77153131271 }, { "content": "\tregister const char *s1, *s2;\n", "file_path": "pfring-framework/userland/libpcap-1.7.4/gencode.c", "rank": 73, "score": 118246.77153131271 }, { "content": " u_int32_t length;\n", "file_path": "pfring-framework/userland/lib/pfring.h", "rank": 74, "score": 118186.87073813134 }, { "content": "\t uint16_t length;\n", "file_path": "pfring-framework/userland/tcpdump-4.7.4/ospf.h", "rank": 75, "score": 118186.87073813134 }, { "content": "\tuint16_t\tlength;\n", "file_path": "pfring-framework/userland/tcpdump-4.7.4/appletalk.h", "rank": 76, "score": 118186.87073813134 }, { "content": "\tuint8_t\t\tlength;\n", "file_path": "pfring-framework/userland/tcpdump-4.7.4/print-802_11.c", "rank": 77, "score": 118186.87073813134 }, { "content": "static int\n\nl_strnstart(const char *tstr1, u_int tl1, const char *str2, u_int l2)\n\n{\n\n\n\n\tif (tl1 > l2)\n\n\t\treturn 0;\n\n\n\n\treturn (strncmp(tstr1, str2, tl1) == 0 ? 1 : 0);\n", "file_path": "pfring-framework/userland/tcpdump-4.7.4/print-beep.c", "rank": 78, "score": 27.086885826012413 }, { "content": " if !any_progress\n\n {\n\n // Avoids an infinite loop in case we have a fragment that\n\n // is ready now, but the BIO is not accepting any bytes.\n\n // (I am SO glad this got caught in the initial unit tests!)\n\n break;\n\n }\n\n },\n\n TcpSegmentUsefulness::UsefulLater => break,\n\n // Do NOT break. Chunks later in the heap may be usable.\n\n TcpSegmentUsefulness::UsefulNever => {let _ = self.tcp_buf.pop();},\n\n }\n\n }\n\n }\n\n // slice_seq_start should be the TCP seq# of the first byte of the slice.\n\n fn add_slice_to_buffer(&mut self, the_slice: &[u8], slice_seq_start: u32,\n\n is_fin: bool)\n\n {\n\n let mut chunk_vec = Vec::with_capacity(the_slice.len());\n\n chunk_vec.extend_from_slice(the_slice);\n", "file_path": "tapdance-rust-logic/src/evented_ssl_eavesdropper.rs", "rank": 80, "score": 25.318044353104625 }, { "content": "#[macro_use]\n\nextern crate arrayref;\n\nextern crate lazycell;\n\nextern crate libc;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate mio;\n\nextern crate pnet;\n\nextern crate protobuf;\n\nextern crate rand;\n\nextern crate time;\n\nextern crate tuntap;\n\n\n\nuse std::cell::RefCell;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::io::{BufReader, Read};\n\nuse std::mem::transmute;\n\nuse std::rc::Rc;\n", "file_path": "tapdance-rust-logic/src/rust_tapdance.rs", "rank": 81, "score": 25.08203419369056 }, { "content": "// msg_size_in_buf != 0 and asmbld_msg.is_some() should be mutually exclusive.\n\n// any_bytes_read is necessary because some of the logic that uses this result\n\n// is also involved in buffering.\n\npub struct ReadMsgRes\n\n{\n\n pub any_bytes_read: bool,\n\n pub msg_size_in_buf: usize,\n\n pub asmbld_msg: Option<Vec<u8>>,\n\n pub proto: Option<ClientToStation>,\n\n}\n\nimpl ReadMsgRes\n\n{\n\n pub fn with_direct(msg_size: usize) -> ReadMsgRes\n\n {\n\n ReadMsgRes { any_bytes_read: true,\n\n msg_size_in_buf: msg_size,\n\n asmbld_msg: None,\n\n proto: None }\n\n }\n\n pub fn with_assembled(data: Vec<u8>) -> ReadMsgRes\n", "file_path": "tapdance-rust-logic/src/protocol_outer_framing.rs", "rank": 82, "score": 24.41572127581613 }, { "content": "\n\n\n\nextern crate log;\n\nextern crate time;\n\n\n\nuse log::{LogRecord, LogLevel, LogMetadata};\n\n\n\npub struct SimpleLogger\n\n{\n\n log_level: LogLevel,\n\n lcore_id: i32,\n\n}\n\n\n\nimpl log::Log for SimpleLogger\n\n{\n\n fn enabled(&self, metadata: &LogMetadata) -> bool\n\n {\n\n metadata.level() <= self.log_level\n\n }\n\n\n", "file_path": "tapdance-rust-logic/src/logging.rs", "rank": 83, "score": 23.367973688635477 }, { "content": " if let Some(to_check) = self.tracked_flows.get(&flow) {\n\n match *to_check {\n\n FlowState::InTLSHandshake(_) => false,\n\n FlowState::ActiveTapDance(_) => true,\n\n FlowState::FinishingTapDance(_) => true,\n\n FlowState::PassiveTapDance(_) => true,\n\n }\n\n }\n\n else {false}\n\n }\n\n pub fn tracking_at_all(&self, flow: &Flow) -> bool\n\n {\n\n self.tracked_flows.contains_key(flow)\n\n }\n\n // Returns the wscale and mss gotten from the TCP options in the SYN. After\n\n // this function, the FlowTracker no longer stores these values.\n\n // decoy_last_ack should be a host-order u32.\n\n pub fn mark_tapdance_flow(&mut self, flow: &Flow, decoy_last_ack: u32)\n\n -> WscaleAndMSS\n\n {\n", "file_path": "tapdance-rust-logic/src/flow_tracker.rs", "rank": 84, "score": 23.331785986018353 }, { "content": "static const struct tok tag2str[] = {\n\n/* RFC1048 tags */\n\n\t{ TAG_PAD,\t\t\" PAD\" },\n\n\t{ TAG_SUBNET_MASK,\t\"iSubnet-Mask\" },\t/* subnet mask (RFC950) */\n\n\t{ TAG_TIME_OFFSET,\t\"LTime-Zone\" },\t/* seconds from UTC */\n\n\t{ TAG_GATEWAY,\t\t\"iDefault-Gateway\" },\t/* default gateway */\n\n\t{ TAG_TIME_SERVER,\t\"iTime-Server\" },\t/* time servers (RFC868) */\n\n\t{ TAG_NAME_SERVER,\t\"iIEN-Name-Server\" },\t/* IEN name servers (IEN116) */\n\n\t{ TAG_DOMAIN_SERVER,\t\"iDomain-Name-Server\" },\t/* domain name (RFC1035) */\n\n\t{ TAG_LOG_SERVER,\t\"iLOG\" },\t/* MIT log servers */\n\n\t{ TAG_COOKIE_SERVER,\t\"iCS\" },\t/* cookie servers (RFC865) */\n\n\t{ TAG_LPR_SERVER,\t\"iLPR-Server\" },\t/* lpr server (RFC1179) */\n\n\t{ TAG_IMPRESS_SERVER,\t\"iIM\" },\t/* impress servers (Imagen) */\n\n\t{ TAG_RLP_SERVER,\t\"iRL\" },\t/* resource location (RFC887) */\n\n\t{ TAG_HOSTNAME,\t\t\"aHostname\" },\t/* ascii hostname */\n\n\t{ TAG_BOOTSIZE,\t\t\"sBS\" },\t/* 512 byte blocks */\n\n\t{ TAG_END,\t\t\" END\" },\n\n/* RFC1497 tags */\n\n\t{ TAG_DUMPPATH,\t\t\"aDP\" },\n\n\t{ TAG_DOMAINNAME,\t\"aDomain-Name\" },\n\n\t{ TAG_SWAP_SERVER,\t\"iSS\" },\n\n\t{ TAG_ROOTPATH,\t\t\"aRP\" },\n\n\t{ TAG_EXTPATH,\t\t\"aEP\" },\n\n/* RFC2132 tags */\n\n\t{ TAG_IP_FORWARD,\t\"BIPF\" },\n\n\t{ TAG_NL_SRCRT,\t\t\"BSRT\" },\n\n\t{ TAG_PFILTERS,\t\t\"pPF\" },\n\n\t{ TAG_REASS_SIZE,\t\"sRSZ\" },\n\n\t{ TAG_DEF_TTL,\t\t\"bTTL\" },\n\n\t{ TAG_MTU_TIMEOUT,\t\"lMTU-Timeout\" },\n\n\t{ TAG_MTU_TABLE,\t\"sMTU-Table\" },\n\n\t{ TAG_INT_MTU,\t\t\"sMTU\" },\n\n\t{ TAG_LOCAL_SUBNETS,\t\"BLSN\" },\n\n\t{ TAG_BROAD_ADDR,\t\"iBR\" },\n\n\t{ TAG_DO_MASK_DISC,\t\"BMD\" },\n\n\t{ TAG_SUPPLY_MASK,\t\"BMS\" },\n\n\t{ TAG_DO_RDISC,\t\t\"BRouter-Discovery\" },\n\n\t{ TAG_RTR_SOL_ADDR,\t\"iRSA\" },\n\n\t{ TAG_STATIC_ROUTE,\t\"pStatic-Route\" },\n\n\t{ TAG_USE_TRAILERS,\t\"BUT\" },\n\n\t{ TAG_ARP_TIMEOUT,\t\"lAT\" },\n\n\t{ TAG_ETH_ENCAP,\t\"BIE\" },\n\n\t{ TAG_TCP_TTL,\t\t\"bTT\" },\n\n\t{ TAG_TCP_KEEPALIVE,\t\"lKI\" },\n\n\t{ TAG_KEEPALIVE_GO,\t\"BKG\" },\n\n\t{ TAG_NIS_DOMAIN,\t\"aYD\" },\n\n\t{ TAG_NIS_SERVERS,\t\"iYS\" },\n\n\t{ TAG_NTP_SERVERS,\t\"iNTP\" },\n\n\t{ TAG_VENDOR_OPTS,\t\"bVendor-Option\" },\n\n\t{ TAG_NETBIOS_NS,\t\"iNetbios-Name-Server\" },\n\n\t{ TAG_NETBIOS_DDS,\t\"iWDD\" },\n\n\t{ TAG_NETBIOS_NODE,\t\"$Netbios-Node\" },\n\n\t{ TAG_NETBIOS_SCOPE,\t\"aNetbios-Scope\" },\n\n\t{ TAG_XWIN_FS,\t\t\"iXFS\" },\n\n\t{ TAG_XWIN_DM,\t\t\"iXDM\" },\n\n\t{ TAG_NIS_P_DOMAIN,\t\"sN+D\" },\n\n\t{ TAG_NIS_P_SERVERS,\t\"iN+S\" },\n\n\t{ TAG_MOBILE_HOME,\t\"iMH\" },\n\n\t{ TAG_SMPT_SERVER,\t\"iSMTP\" },\n\n\t{ TAG_POP3_SERVER,\t\"iPOP3\" },\n\n\t{ TAG_NNTP_SERVER,\t\"iNNTP\" },\n\n\t{ TAG_WWW_SERVER,\t\"iWWW\" },\n\n\t{ TAG_FINGER_SERVER,\t\"iFG\" },\n\n\t{ TAG_IRC_SERVER,\t\"iIRC\" },\n\n\t{ TAG_STREETTALK_SRVR,\t\"iSTS\" },\n\n\t{ TAG_STREETTALK_STDA,\t\"iSTDA\" },\n\n\t{ TAG_REQUESTED_IP,\t\"iRequested-IP\" },\n\n\t{ TAG_IP_LEASE,\t\t\"lLease-Time\" },\n\n\t{ TAG_OPT_OVERLOAD,\t\"$OO\" },\n\n\t{ TAG_TFTP_SERVER,\t\"aTFTP\" },\n\n\t{ TAG_BOOTFILENAME,\t\"aBF\" },\n\n\t{ TAG_DHCP_MESSAGE,\t\" DHCP-Message\" },\n\n\t{ TAG_SERVER_ID,\t\"iServer-ID\" },\n\n\t{ TAG_PARM_REQUEST,\t\"bParameter-Request\" },\n\n\t{ TAG_MESSAGE,\t\t\"aMSG\" },\n\n\t{ TAG_MAX_MSG_SIZE,\t\"sMSZ\" },\n\n\t{ TAG_RENEWAL_TIME,\t\"lRN\" },\n\n\t{ TAG_REBIND_TIME,\t\"lRB\" },\n\n\t{ TAG_VENDOR_CLASS,\t\"aVendor-Class\" },\n\n\t{ TAG_CLIENT_ID,\t\"$Client-ID\" },\n\n/* RFC 2485 */\n\n\t{ TAG_OPEN_GROUP_UAP,\t\"aUAP\" },\n\n/* RFC 2563 */\n\n\t{ TAG_DISABLE_AUTOCONF,\t\"BNOAUTO\" },\n\n/* RFC 2610 */\n\n\t{ TAG_SLP_DA,\t\t\"bSLP-DA\" },\t/*\"b\" is a little wrong */\n\n\t{ TAG_SLP_SCOPE,\t\"bSLP-SCOPE\" },\t/*\"b\" is a little wrong */\n\n/* RFC 2937 */\n\n\t{ TAG_NS_SEARCH,\t\"sNSSEARCH\" },\t/* XXX 's' */\n\n/* RFC 3004 - The User Class Option for DHCP */\n\n\t{ TAG_USER_CLASS,\t\"$User-Class\" },\n\n/* RFC 3011 */\n\n\t{ TAG_IP4_SUBNET_SELECT, \"iSUBNET\" },\n\n/* RFC 3442 */\n\n\t{ TAG_CLASSLESS_STATIC_RT, \"$Classless-Static-Route\" },\n\n\t{ TAG_CLASSLESS_STA_RT_MS, \"$Classless-Static-Route-Microsoft\" },\n\n/* RFC 5859 - TFTP Server Address Option for DHCPv4 */\n\n\t{ TAG_TFTP_SERVER_ADDRESS, \"iTFTP-Server-Address\" },\n\n/* http://www.iana.org/assignments/bootp-dhcp-extensions/index.htm */\n\n\t{ TAG_SLP_NAMING_AUTH,\t\"aSLP-NA\" },\n\n\t{ TAG_CLIENT_FQDN,\t\"$FQDN\" },\n\n\t{ TAG_AGENT_CIRCUIT,\t\"$Agent-Information\" },\n\n\t{ TAG_AGENT_REMOTE,\t\"bARMT\" },\n\n\t{ TAG_AGENT_MASK,\t\"bAMSK\" },\n\n\t{ TAG_TZ_STRING,\t\"aTZSTR\" },\n\n\t{ TAG_FQDN_OPTION,\t\"bFQDNS\" },\t/* XXX 'b' */\n\n\t{ TAG_AUTH,\t\t\"bAUTH\" },\t/* XXX 'b' */\n\n\t{ TAG_VINES_SERVERS,\t\"iVINES\" },\n\n\t{ TAG_SERVER_RANK,\t\"sRANK\" },\n\n\t{ TAG_CLIENT_ARCH,\t\"sARCH\" },\n\n\t{ TAG_CLIENT_NDI,\t\"bNDI\" },\t/* XXX 'b' */\n\n\t{ TAG_CLIENT_GUID,\t\"bGUID\" },\t/* XXX 'b' */\n\n\t{ TAG_LDAP_URL,\t\t\"aLDAP\" },\n\n\t{ TAG_6OVER4,\t\t\"i6o4\" },\n\n\t{ TAG_PRINTER_NAME,\t\"aPRTR\" },\n\n\t{ TAG_MDHCP_SERVER,\t\"bMDHCP\" },\t/* XXX 'b' */\n\n\t{ TAG_IPX_COMPAT,\t\"bIPX\" },\t/* XXX 'b' */\n\n\t{ TAG_NETINFO_PARENT,\t\"iNI\" },\n\n\t{ TAG_NETINFO_PARENT_TAG, \"aNITAG\" },\n\n\t{ TAG_URL,\t\t\"aURL\" },\n\n\t{ TAG_FAILOVER,\t\t\"bFAIL\" },\t/* XXX 'b' */\n\n\t{ 0, NULL }\n", "file_path": "pfring-framework/userland/tcpdump-4.7.4/print-bootp.c", "rank": 85, "score": 23.237051317203026 }, { "content": "use std::cell::RefCell;\n\nuse std::collections::{HashMap, VecDeque};\n\nuse std::panic;\n\nuse std::rc::Rc;\n\nuse time::precise_time_ns;\n\n\n\nuse pnet::packet::Packet;\n\nuse pnet::packet::ipv4::Ipv4Packet;\n\nuse pnet::packet::tcp::{TcpOptionIterable, TcpOptionNumbers, TcpPacket};\n\n\n\nuse c_api;\n\nuse tapdance_session::TapdanceSession;\n\nuse util;\n\n\n\n// All members are stored in host-order, even src_ip and dst_ip.\n\n#[derive(PartialEq,Eq,Hash,Copy,Clone,Debug)]\n\npub struct Flow\n\n{\n\n pub src_ip: u32,\n\n pub dst_ip: u32,\n", "file_path": "tapdance-rust-logic/src/flow_tracker.rs", "rank": 86, "score": 22.313610326994905 }, { "content": " // Returns whether the (read, write) direction is currently operational.\n\n // (Maybe blocked, but definitely there and capable of progress).\n\n // (false, false) DOES NOT imply that the underlying stream is None: there\n\n // can be a clean shutdown process resolving. Basically, if either of these\n\n // are false, it's probably a good idea to try a clean_shutdown().\n\n //\n\n // It's necessary to track these separately in case it was we the station,\n\n // and not the remote host, who initiated a clean_shutdown: in that case,\n\n // reads_open would remain true until the remote host has responded to the\n\n // shutdown. (At the same time, writes_open==false reminds us we're closing)\n\n pub fn rw_is_open(&self) -> (bool, bool)\n\n {\n\n (self.reads_open, self.writes_open)\n\n }\n\n \n\n pub fn half_close_pending(&self) -> bool { self.half_close_pending }\n\n pub fn half_closed(&self) -> bool { self.half_closed }\n\n pub fn stream_count(&self) -> usize { self.stream_count }\n\n\n\n // Inform the struct that its underlying stream is closed to reads.\n", "file_path": "tapdance-rust-logic/src/buffered_tunnel.rs", "rank": 87, "score": 21.594036519782794 }, { "content": " // during_event_processing: same as in mio_notify_readable().\n\n // Returns whether any progress was made, i.e., any bytes accepted by BIO.\n\n fn feed_or_buffer(&mut self, chunk: &[u8], chunk_seq_start: u32,\n\n is_fin: bool, during_event_processing: bool) -> bool\n\n {\n\n let usefulness = self.segment_usefulness(chunk_seq_start,\n\n chunk.len() as u32);\n\n if let TcpSegmentUsefulness::UsefulNow(buf_ind_start) = usefulness\n\n {\n\n let fed =\n\n self.mem_ssl.feed_data_from_remote(&chunk[buf_ind_start..]);\n\n self.next_seq = self.next_seq.wrapping_add(fed);\n\n let attempted_feed_len = chunk[buf_ind_start..].len() as u32;\n\n if fed < attempted_feed_len\n\n {\n\n self.store_fed_scraps(\n\n &chunk[buf_ind_start..], fed, is_fin,\n\n chunk_seq_start.wrapping_add(buf_ind_start as u32));\n\n }\n\n if is_fin\n", "file_path": "tapdance-rust-logic/src/evented_ssl_eavesdropper.rs", "rank": 88, "score": 21.383909280330837 }, { "content": "{\n\n pub fn new(session_id: SessionId) -> BufferableSSL\n\n {\n\n BufferableSSL { c_ssl: 0 as *mut c_void,\n\n underlying_sock: None,\n\n read_wants_writable: false,\n\n write_wants_readable: false,\n\n bytes_recvd: 0,\n\n bytes_sent: 0,\n\n session_id: session_id }\n\n }\n\n\n\n // Pass all of these as host order.\n\n pub fn construct_forged_ssl(&mut self,\n\n tcp_pkt: &TcpPacket, flow: &Flow,\n\n wscale_and_mss: &WscaleAndMSS,\n\n tcp_ts: u32, tcp_ts_ecr: u32,\n\n master_key: &[u8],\n\n client_random: &[u8],\n\n server_random: &[u8]) -> bool\n", "file_path": "tapdance-rust-logic/src/bufferable_ssl.rs", "rank": 89, "score": 21.098504006642443 }, { "content": "use std::fmt;\n\n\n\n#[derive(PartialEq, Eq, Hash, Clone, Copy)]\n\npub struct SessionId([u8; 16]);\n\n\n\nimpl SessionId\n\n{\n\n pub fn new(id: &[u8]) -> SessionId\n\n {\n\n let mut x = [0; 16];\n\n if id.len() == 16 {\n\n x.clone_from_slice(id);\n\n } else {\n\n error!(\"Tried to clone a slice, supposedly containing a SessionId,\n\n that wasn't 16 bytes! Using 0 for this session's ID\n\n Contents: {:?}\", id);\n\n }\n\n SessionId(x)\n\n }\n\n}\n", "file_path": "tapdance-rust-logic/src/session_id.rs", "rank": 90, "score": 21.01560880187447 }, { "content": " membio_from_remote: c_api::c_new_membio(),\n\n membio_to_remote: c_api::c_new_membio() }\n\n }\n\n pub fn ssl_read(&mut self, output: &mut [u8]) -> Result<usize, i32>\n\n {\n\n if self.ptr != 0 as *mut c_void { c_api::c_SSL_read(self.ptr, output) }\n\n else { Err(0) }\n\n }\n\n // Returns number of bytes successfully fed. May be [0, input.len()].\n\n pub fn feed_data_from_remote(&mut self, input: &[u8]) -> u32\n\n {\n\n let raw_fed = c_api::c_BIO_write(self.membio_from_remote, input);\n\n if raw_fed < 0 { 0 } else { raw_fed as u32 }\n\n }\n\n // SSL_pending can tell you 0 when SSL_read would read. If we want this\n\n // functionality, this struct needs a buffer to hold SSL_read results, so\n\n // this function can try directly and then report the buffer size. (Rather,\n\n // this fn should become a bool; if we need to be able to say the exact\n\n // size, the buffer could get huge). So, TODO? Or just remove?\n\n// pub fn num_readbytes_ready(&self) -> i32\n", "file_path": "tapdance-rust-logic/src/mem_open_ssl.rs", "rank": 91, "score": 20.273638410326495 }, { "content": "\n\n// For only tapdance flows (TLS/proxy/etc state)\n\npub struct TapdanceSession\n\n{\n\n pub session_id: SessionId,\n\n pub cov: BufferedTunnel<BufferableTCP>,\n\n pub cli_pair: DirectionPair,\n\n sent_init_to_client: bool,\n\n sent_close_to_client: bool,\n\n pub expect_bidi_reconnect: bool,\n\n pub expect_uploader_reconnect: bool,\n\n already_errored_out: bool,\n\n pub decoy_ip: String,\n\n pub received_gen: bool, // false until our first protobuf from client (first\n\n // reconnect)\n\n pub cli2cov_bytes_tot: usize,\n\n pub cov2cli_bytes_tot: usize,\n\n}\n\nimpl TapdanceSession\n\n{\n", "file_path": "tapdance-rust-logic/src/tapdance_session.rs", "rank": 92, "score": 19.995313611495952 }, { "content": "use std::io;\n\n\n\nuse lazycell::LazyCell;\n\nuse mio::{Ready, Poll, PollOpt, Registration, SetReadiness, Token};\n\nuse mio::unix::UnixReady;\n\n\n\npub struct EventHook\n\n{\n\n mio_reg: LazyCell<Registration>,\n\n mio_notifier: LazyCell<SetReadiness>,\n\n became_readable_before_reg: LazyCell<bool>,\n\n}\n\nimpl EventHook\n\n{\n\n pub fn new() -> EventHook\n\n {\n\n EventHook { mio_reg: LazyCell::new(),\n\n mio_notifier: LazyCell::new(),\n\n became_readable_before_reg: LazyCell::new() }\n\n }\n", "file_path": "tapdance-rust-logic/src/event_hook.rs", "rank": 93, "score": 19.781044513251167 }, { "content": "#define SUNRPC_PMAPPORT\t\t((uint16_t)111)\n", "file_path": "pfring-framework/userland/tcpdump-4.7.4/print-sunrpc.c", "rank": 94, "score": 19.525092662994147 }, { "content": " {\n\n self.in_order_fin_received = true;\n\n }\n\n if fed > 0\n\n {\n\n self.mio_notify_readable(during_event_processing);\n\n return true;\n\n }\n\n }\n\n else if usefulness == TcpSegmentUsefulness::UsefulLater\n\n {\n\n self.add_slice_to_buffer(chunk, chunk_seq_start, is_fin);\n\n }\n\n // else (do nothing; the data is stale/already recvd).\n\n return false;\n\n }\n\n fn became_readable_during_processing(&self) -> bool\n\n {\n\n if let Ok(ref b) = self.became_readable_during_processing.try_borrow()\n\n {\n", "file_path": "tapdance-rust-logic/src/evented_ssl_eavesdropper.rs", "rank": 95, "score": 19.513587290071612 }, { "content": "use std::{mem, panic};\n\n\n\nuse c_api;\n\n\n\nextern crate crypto;\n\n\n\n//pub mod rust_tapdance;\n\n\n\nconst STEGO_DATA_LEN: usize = 177;\n\n// elligator2.h\n\n// Assuming curve25519; prime p = 2^255-19; curve y^2 = x^3 + A*x^2 + x;\n\n// A = 486662\n\n// Elliptic curve points represented as bytes. Each coordinate is 32 bytes.\n\n// On curve25519, always take canonical y in range 0,..,(p-1)/2.\n\n// We can ignore y-coord.\n\n\n\n\n\n// Extracts up to out_buf_len stego'd bytes in_bufto 'out_buf', from the 4 bytes\n\n// of AES ciphertext at 'in_buf'.\n\n// Returns number of bytes written in_bufto 'out_buf'.\n", "file_path": "tapdance-rust-logic/src/elligator.rs", "rank": 96, "score": 18.966695068136637 }, { "content": "use signalling::ErrorReasonS2C;\n\n\n\n#[derive(Debug)]\n\npub enum SessionError\n\n{\n\n ClientStream, // Client's TLS stream (or underlying TCP) broke\n\n CovertStream, // TCP connection to Squid broke\n\n ClientReported, // Client sent an ERROR proto\n\n ClientProtocol, // Client misbehaved, e.g. malformed proto\n\n StationInternal,// Station logic hit a bad state; probably a station bug\n\n ClientTimeout, // After cleanly closing a stream, sayig expect reconnect,\n\n // client took too long (30 seconds) to establish a new one.\n\n DecoyOverload, // The client picked an overloaded decoy for this session.\n\n}\n\n\n\nimpl SessionError\n\n{\n\n pub fn to_string(&self) -> &'static str\n\n {\n\n match self {\n", "file_path": "tapdance-rust-logic/src/session_error.rs", "rank": 97, "score": 18.966102874027605 }, { "content": "use mio::{Evented, Poll, PollOpt, Ready, Token};\n\nuse mio::unix::UnixReady;\n\nuse std::collections::VecDeque;\n\nuse std::io;\n\nuse std::io::{Error, ErrorKind};\n\n\n\nuse stream_traits::{BufferableSender, StreamReceiver,\n\n BufStat, RawWriteStat, ReadStat, ShutStat, WriteStat};\n\nuse token_map::UniqTok;\n\n\n\nconst MAX_WRITE_SIZE: usize = 16*1024;\n\n\n\n// Wraps a buffer around a stream. For when we want a single logical stream to\n\n// persist across a series of actual streams, with writes being buffered when\n\n// in-between streams (in addition to when the stream is WOULDBLOCKing).\n\npub struct BufferedTunnel<S>\n\n{\n\n buf: VecDeque<u8>,\n\n stream: Option<S>,\n\n writes_open: bool,\n", "file_path": "tapdance-rust-logic/src/buffered_tunnel.rs", "rank": 98, "score": 18.747044146095092 }, { "content": "size_t get_tag_from_payload(const unsigned char *payload, size_t payload_len,\n\n const unsigned char *station_pubkey,\n", "file_path": "libtapdance/elligator2.h", "rank": 99, "score": 18.699393131952462 } ]
Rust
src/models/withdrawal.rs
nervina-labs/compact-nft-aggregator
f5d4afebd732fdcd55c7590ddfc5af0eb30b2eda
use super::helper::{parse_cota_id_and_token_index_pairs, parse_lock_hash}; use crate::models::block::get_syncer_tip_block_number; use crate::models::helper::{generate_crc, PAGE_SIZE}; use crate::models::scripts::get_script_map_by_ids; use crate::models::{DBResult, DBTotalResult}; use crate::schema::withdraw_cota_nft_kv_pairs::dsl::withdraw_cota_nft_kv_pairs; use crate::schema::withdraw_cota_nft_kv_pairs::*; use crate::utils::error::Error; use crate::utils::helper::{diff_time, parse_bytes_n}; use crate::POOL; use chrono::prelude::*; use diesel::*; use log::error; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Queryable, Debug, Clone)] pub struct WithdrawCotaNft { pub cota_id: String, pub token_index: u32, pub out_point: String, pub state: u8, pub configure: u8, pub characteristic: String, pub receiver_lock_script_id: i64, pub version: u8, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct WithdrawDb { pub cota_id: [u8; 20], pub token_index: [u8; 4], pub out_point: [u8; 24], pub state: u8, pub configure: u8, pub characteristic: [u8; 20], pub receiver_lock_script: Vec<u8>, pub version: u8, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct WithdrawNFTDb { pub cota_id: [u8; 20], pub token_index: [u8; 4], pub out_point: [u8; 24], pub state: u8, pub configure: u8, pub characteristic: [u8; 20], } pub fn get_withdrawal_cota_by_lock_hash( lock_hash_: [u8; 32], cota_id_and_token_index_pairs: Option<Vec<([u8; 20], [u8; 4])>>, ) -> DBResult<WithdrawDb> { let start_time = Local::now().timestamp_millis(); let conn = &POOL.clone().get().expect("Mysql pool connection error"); let (lock_hash_hex, lock_hash_crc_) = parse_lock_hash(lock_hash_); let mut withdraw_nfts: Vec<WithdrawCotaNft> = vec![]; match cota_id_and_token_index_pairs { Some(pairs) => { let pair_vec = parse_cota_id_and_token_index_pairs(pairs); for (cota_id_str, token_index_u32) in pair_vec.into_iter() { let cota_id_crc_ = generate_crc(cota_id_str.as_bytes()); let withdrawals: Vec<WithdrawCotaNft> = withdraw_cota_nft_kv_pairs .select(get_selection()) .filter(cota_id_crc.eq(cota_id_crc_)) .filter(token_index.eq(token_index_u32)) .filter(lock_hash_crc.eq(lock_hash_crc_)) .filter(lock_hash.eq(lock_hash_hex.clone())) .filter(cota_id.eq(cota_id_str)) .order(updated_at.desc()) .limit(1) .load::<WithdrawCotaNft>(conn) .map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; if !withdrawals.is_empty() { let withdrawal = withdrawals.get(0).unwrap().clone(); withdraw_nfts.push(withdrawal); } } } None => { let mut page: i64 = 0; loop { let withdrawals_page: Vec<WithdrawCotaNft> = withdraw_cota_nft_kv_pairs .select(get_selection()) .filter(lock_hash_crc.eq(lock_hash_crc_)) .filter(lock_hash.eq(lock_hash_hex.clone())) .limit(PAGE_SIZE) .offset(PAGE_SIZE * page) .load::<WithdrawCotaNft>(conn) .map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; let length = withdrawals_page.len(); withdraw_nfts.extend(withdrawals_page); if length < (PAGE_SIZE as usize) { break; } page += 1; } } }; diff_time(start_time, "SQL get_withdrawal_cota_by_lock_hash"); parse_withdraw_db(withdraw_nfts) } pub fn get_withdrawal_cota_by_cota_ids( lock_hash_: [u8; 32], cota_ids: Vec<[u8; 20]>, page: i64, page_size: i64, ) -> DBTotalResult<WithdrawDb> { let start_time = Local::now().timestamp_millis(); let conn = &POOL.clone().get().expect("Mysql pool connection error"); let (lock_hash_hex, lock_hash_crc_) = parse_lock_hash(lock_hash_); let cota_ids_: Vec<String> = cota_ids .into_iter() .map(|cota_id_| hex::encode(&cota_id_)) .collect(); let total: i64 = withdraw_cota_nft_kv_pairs .filter(lock_hash_crc.eq(lock_hash_crc_)) .filter(lock_hash.eq(lock_hash_hex.clone())) .filter(cota_id.eq_any(cota_ids_.clone())) .count() .get_result::<i64>(conn) .map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; let withdraw_cota_nfts: Vec<WithdrawCotaNft> = withdraw_cota_nft_kv_pairs .select(get_selection()) .filter(lock_hash_crc.eq(lock_hash_crc_)) .filter(lock_hash.eq(lock_hash_hex)) .filter(cota_id.eq_any(cota_ids_)) .order(updated_at.desc()) .limit(page_size) .offset(page_size * page) .load::<WithdrawCotaNft>(conn) .map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; let (withdrawals, block_height) = parse_withdraw_db(withdraw_cota_nfts)?; diff_time(start_time, "SQL get_withdrawal_cota_by_cota_ids"); Ok((withdrawals, total, block_height)) } pub fn get_withdrawal_cota_by_script_id( script_id: i64, cota_id_opt: Option<[u8; 20]>, ) -> DBTotalResult<WithdrawNFTDb> { let start_time = Local::now().timestamp_millis(); let conn = &POOL.clone().get().expect("Mysql pool connection error"); let total_result = match cota_id_opt { Some(cota_id_) => withdraw_cota_nft_kv_pairs .filter(receiver_lock_script_id.eq(script_id)) .filter(cota_id.eq(hex::encode(&cota_id_))) .count() .get_result::<i64>(conn), None => withdraw_cota_nft_kv_pairs .filter(receiver_lock_script_id.eq(script_id)) .count() .get_result::<i64>(conn), }; let total: i64 = total_result.map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; let withdraw_cota_nfts_result = match cota_id_opt { Some(cota_id_) => withdraw_cota_nft_kv_pairs .select(get_selection()) .filter(receiver_lock_script_id.eq(script_id)) .filter(cota_id.eq(hex::encode(&cota_id_))) .order(updated_at.desc()) .load::<WithdrawCotaNft>(conn), None => withdraw_cota_nft_kv_pairs .select(get_selection()) .filter(receiver_lock_script_id.eq(script_id)) .order(updated_at.desc()) .load::<WithdrawCotaNft>(conn), }; let withdraw_cota_nfts: Vec<WithdrawCotaNft> = withdraw_cota_nfts_result.map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; let withdrawals = parse_withdraw_cota_nft(withdraw_cota_nfts); let block_height = get_syncer_tip_block_number()?; diff_time(start_time, "SQL get_withdrawal_cota_by_script_id"); Ok((withdrawals, total, block_height)) } pub fn get_sender_lock_by_script_id( script_id: i64, cota_id_: [u8; 20], token_index_: [u8; 4], ) -> Result<Option<String>, Error> { let start_time = Local::now().timestamp_millis(); let conn = &POOL.clone().get().expect("Mysql pool connection error"); let cota_id_hex = hex::encode(cota_id_); let token_index_u32 = u32::from_be_bytes(token_index_); let cota_id_crc_u32 = generate_crc(cota_id_hex.as_bytes()); let lock_hashes: Vec<String> = withdraw_cota_nft_kv_pairs .select(lock_hash) .filter(cota_id_crc.eq(cota_id_crc_u32)) .filter(token_index.eq(token_index_u32)) .filter(cota_id.eq(cota_id_hex)) .filter(receiver_lock_script_id.eq(script_id)) .order(updated_at.desc()) .limit(1) .load::<String>(conn) .map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; diff_time(start_time, "SQL get_sender_lock_by_script_id"); Ok(lock_hashes.get(0).cloned()) } fn parse_withdraw_db(withdrawals: Vec<WithdrawCotaNft>) -> DBResult<WithdrawDb> { let block_height = get_syncer_tip_block_number()?; if withdrawals.is_empty() { return Ok((vec![], block_height)); } let receiver_lock_script_ids: Vec<i64> = withdrawals .iter() .map(|withdrawal| withdrawal.receiver_lock_script_id) .collect(); let mut withdraw_db_vec: Vec<WithdrawDb> = vec![]; let script_map = get_script_map_by_ids(receiver_lock_script_ids)?; for withdrawal in withdrawals { let lock_script = script_map .get(&withdrawal.receiver_lock_script_id) .ok_or(Error::DatabaseQueryError("scripts".to_owned()))? .clone(); withdraw_db_vec.push(WithdrawDb { cota_id: parse_bytes_n::<20>(withdrawal.cota_id).unwrap(), token_index: withdrawal.token_index.to_be_bytes(), configure: withdrawal.configure, state: withdrawal.state, characteristic: parse_bytes_n::<20>(withdrawal.characteristic).unwrap(), receiver_lock_script: lock_script, out_point: parse_bytes_n::<24>(withdrawal.out_point).unwrap(), version: withdrawal.version, }) } Ok((withdraw_db_vec, block_height)) } fn parse_withdraw_cota_nft(withdrawals: Vec<WithdrawCotaNft>) -> Vec<WithdrawNFTDb> { if withdrawals.is_empty() { return vec![]; } let withdraw_db_vec: Vec<WithdrawNFTDb> = withdrawals .into_iter() .map(|withdrawal| WithdrawNFTDb { cota_id: parse_bytes_n::<20>(withdrawal.cota_id).unwrap(), token_index: withdrawal.token_index.to_be_bytes(), configure: withdrawal.configure, state: withdrawal.state, characteristic: parse_bytes_n::<20>(withdrawal.characteristic).unwrap(), out_point: parse_bytes_n::<24>(withdrawal.out_point).unwrap(), }) .collect(); withdraw_db_vec } fn get_selection() -> ( cota_id, token_index, out_point, state, configure, characteristic, receiver_lock_script_id, version, ) { ( cota_id, token_index, out_point, state, configure, characteristic, receiver_lock_script_id, version, ) }
use super::helper::{parse_cota_id_and_token_index_pairs, parse_lock_hash}; use crate::models::block::get_syncer_tip_block_number; use crate::models::helper::{generate_crc, PAGE_SIZE}; use crate::models::scripts::get_script_map_by_ids; use crate::models::{DBResult, DBTotalResult}; use crate::schema::withdraw_cota_nft_kv_pairs::dsl::withdraw_cota_nft_kv_pairs; use crate::schema::withdraw_cota_nft_kv_pairs::*; use crate::utils::error::Error; use crate::utils::helper::{diff_time, parse_bytes_n}; use crate::POOL; use chrono::prelude::*; use diesel::*; use log::error; use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Queryable, Debug, Clone)] pub struct WithdrawCotaNft { pub cota_id: String, pub token_index: u32, pub out_point: String, pub state: u8, pub configure: u8, pub characteristic: String, pub receiver_lock_script_id: i64, pub version: u8, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct WithdrawDb { pub cota_id: [u8; 20], pub token_index: [u8; 4], pub out_point: [u8; 24], pub state: u8, pub configure: u8, pub characteristic: [u8; 20], pub receiver_lock_script: Vec<u8>, pub version: u8, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct WithdrawNFTDb { pub cota_id: [u8; 20], pub token_index: [u8; 4], pub out_point: [u8; 24], pub state: u8, pub configure: u8, pub characteristic: [u8; 20], } pub fn get_withdrawal_cota_by_lock_hash( lock_hash_: [u8; 32], cota_id_and_token_index_pairs: Option<Vec<([u8; 20], [u8; 4])>>, ) -> DBResult<WithdrawDb> { let start_time = Local::now().timestamp_millis(); let conn = &POOL.clone().get().expect("Mysql pool connection error"); let (lock_hash_hex, lock_hash_crc_) = parse_lock_hash(lock_hash_); let mut withdraw_nfts: Vec<WithdrawCotaNft> = vec![]; match cota_id_and_token_index_pairs { Some(pairs) => { let pair_vec = parse_cota_id_and_token_index_pairs(pairs); for (cota_id_str, token_index_u32) in pair_vec.into_iter() { let cota_id_crc_ = generate_crc(cota_id_str.as_bytes()); let withdrawals: Vec<WithdrawCotaNft> = withdraw_cota_nft_kv_pairs .select(get_selection()) .filter(cota_id_crc.eq(cota_id_crc_)) .filter(token_index.eq(token_index_u32)) .filter(lock_hash_crc.eq(lock_hash_crc_)) .filter(lock_hash.eq(lock_hash_hex.clone())) .filter(cota_id.eq(cota_id_str)) .order(updated_at.desc()) .limit(1) .load::<WithdrawCotaNft>(conn) .map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; if !withdrawals.is_empty() { let withdrawal = withdrawals.get(0).unwrap().clone(); withdraw_nfts.push(withdrawal); } } } None => { let mut page: i64 = 0; loop { let withdrawals_page: Vec<WithdrawCotaNft> = withdraw_cota_nft_kv_pairs .select(get_selection()) .filter(lock_hash_crc.eq(lock_hash_crc_)) .filter(lock_hash.eq(lock_hash_hex.clone())) .limit(PAGE_SIZE) .offset(PAGE_SIZE * page) .load::<WithdrawCotaNft>(conn) .map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; let length = withdrawals_page.len(); withdraw_nfts.extend(withdrawals_page); if length < (PAGE_SIZE as usize) { break; } page += 1; } } }; diff_time(start_time, "SQL get_withdrawal_cota_by_lock_hash"); parse_withdraw_db(withdraw_nfts) } pub fn get_withdrawal_cota_by_cota_ids( lock_hash_: [u8; 32], cota_ids: Vec<[u8; 20]>, page: i64, page_size: i64, ) -> DBTotalResult<WithdrawDb> { let start_time = Local::now().timestamp_millis(); let conn = &POOL.clone().get().expect("Mysql pool connection error"); let (lock_hash_hex, lock_hash_crc_) = parse_lock_hash(lock_hash_); let cota_ids_: Vec<String> = cota_ids .into_iter() .map(|cota_id_| hex::encode(&cota_id_)) .collect(); let total: i64 = withdraw_cota_nft_kv_pairs .filter(lock_hash_crc.eq(lock_hash_crc_)) .filter(lock_hash.eq(lock_hash_hex.clone())) .filter(cota_id.eq_any(cota_ids_.clone())) .count() .get_result::<i64>(conn) .map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; let withdraw_cota_nfts: Vec<WithdrawCotaNft> = withdraw_cota_nft_kv_pairs .select(get_selection()) .filter(lock_hash_crc.eq(lock_hash_crc_)) .filter(lock_hash.eq(lock_hash_hex)) .filter(cota_id.eq_any(cota_ids_)) .order(updated_at.desc()) .limit(page_size) .offset(page_size * page) .load::<WithdrawCotaNft>(conn) .map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; let (withdrawals, block_height) = parse_withdraw_db(withdraw_cota_nfts)?; diff_time(start_time, "SQL get_withdrawal_cota_by_cota_ids"); Ok((withdrawals, total, block_height)) } pub fn get_withdrawal_cota_by_script_id( script_id: i64, cota_id_opt: Option<[u8; 20]>, ) -> DBTotalResult<WithdrawNFTDb> { let start_time = Local::now().timestamp_millis(); let conn = &POOL.clone().get().expect("Mysql pool connection error"); let total_result = match cota_id_opt { Some(cota_id_) => withdraw_cota_nft_kv_pairs .filter(receiver_lock_script_id.eq(script_id)) .filter(cota_id.eq(hex::encode(&cota_id_))) .count() .get_result::<i64>(conn), None => withdraw_cota_nft_kv_pairs .filter(receiver_lock_script_id.eq(script_id)) .count() .get_result::<i64>(conn), }; let total: i64 = total_result.map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; let withdraw_cota_nfts_result = match cota_id_opt { Some(cota_id_) => withdraw_cota_nft_kv_pairs .select(get_selection()) .filter(receiver_lock_script_id.eq(script_id)) .filter(cota_id.eq(hex::encode(&cota_id_))) .order(updated_at.desc()) .load::<WithdrawCotaNft>(conn), None => withdraw_cota_nft_kv_pairs .select(get_selection()) .filter(receiver_lock_script_id.eq(script_id)) .order(updated_at.desc()) .load::<WithdrawCotaNft>(conn), }; let withdraw_cota_nfts: Vec<WithdrawCotaNft> = withdraw_cota_nfts_result.map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; let withdrawals = parse_withdraw_cota_nft(withdraw_cota_nfts); let block_height = get_syncer_tip_block_number()?; diff_time(start_time, "SQL get_withdrawal_cota_by_script_id"); Ok((withdrawals, total, block_height)) }
fn parse_withdraw_db(withdrawals: Vec<WithdrawCotaNft>) -> DBResult<WithdrawDb> { let block_height = get_syncer_tip_block_number()?; if withdrawals.is_empty() { return Ok((vec![], block_height)); } let receiver_lock_script_ids: Vec<i64> = withdrawals .iter() .map(|withdrawal| withdrawal.receiver_lock_script_id) .collect(); let mut withdraw_db_vec: Vec<WithdrawDb> = vec![]; let script_map = get_script_map_by_ids(receiver_lock_script_ids)?; for withdrawal in withdrawals { let lock_script = script_map .get(&withdrawal.receiver_lock_script_id) .ok_or(Error::DatabaseQueryError("scripts".to_owned()))? .clone(); withdraw_db_vec.push(WithdrawDb { cota_id: parse_bytes_n::<20>(withdrawal.cota_id).unwrap(), token_index: withdrawal.token_index.to_be_bytes(), configure: withdrawal.configure, state: withdrawal.state, characteristic: parse_bytes_n::<20>(withdrawal.characteristic).unwrap(), receiver_lock_script: lock_script, out_point: parse_bytes_n::<24>(withdrawal.out_point).unwrap(), version: withdrawal.version, }) } Ok((withdraw_db_vec, block_height)) } fn parse_withdraw_cota_nft(withdrawals: Vec<WithdrawCotaNft>) -> Vec<WithdrawNFTDb> { if withdrawals.is_empty() { return vec![]; } let withdraw_db_vec: Vec<WithdrawNFTDb> = withdrawals .into_iter() .map(|withdrawal| WithdrawNFTDb { cota_id: parse_bytes_n::<20>(withdrawal.cota_id).unwrap(), token_index: withdrawal.token_index.to_be_bytes(), configure: withdrawal.configure, state: withdrawal.state, characteristic: parse_bytes_n::<20>(withdrawal.characteristic).unwrap(), out_point: parse_bytes_n::<24>(withdrawal.out_point).unwrap(), }) .collect(); withdraw_db_vec } fn get_selection() -> ( cota_id, token_index, out_point, state, configure, characteristic, receiver_lock_script_id, version, ) { ( cota_id, token_index, out_point, state, configure, characteristic, receiver_lock_script_id, version, ) }
pub fn get_sender_lock_by_script_id( script_id: i64, cota_id_: [u8; 20], token_index_: [u8; 4], ) -> Result<Option<String>, Error> { let start_time = Local::now().timestamp_millis(); let conn = &POOL.clone().get().expect("Mysql pool connection error"); let cota_id_hex = hex::encode(cota_id_); let token_index_u32 = u32::from_be_bytes(token_index_); let cota_id_crc_u32 = generate_crc(cota_id_hex.as_bytes()); let lock_hashes: Vec<String> = withdraw_cota_nft_kv_pairs .select(lock_hash) .filter(cota_id_crc.eq(cota_id_crc_u32)) .filter(token_index.eq(token_index_u32)) .filter(cota_id.eq(cota_id_hex)) .filter(receiver_lock_script_id.eq(script_id)) .order(updated_at.desc()) .limit(1) .load::<String>(conn) .map_err(|e| { error!("Query withdraw error: {}", e.to_string()); Error::DatabaseQueryError(e.to_string()) })?; diff_time(start_time, "SQL get_sender_lock_by_script_id"); Ok(lock_hashes.get(0).cloned()) }
function_block-full_function
[ { "content": "pub fn parse_cota_id_and_token_index_pairs(pairs: Vec<([u8; 20], [u8; 4])>) -> Vec<(String, u32)> {\n\n pairs\n\n .into_iter()\n\n .map(|pair| (hex::encode(pair.0), u32::from_be_bytes(pair.1)))\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_generate_crc() {\n\n assert_eq!(generate_crc(\"cota\".as_bytes()), 985327312u32);\n\n assert_eq!(\n\n generate_crc(\n\n &\"41a7a00cced6ecc5be4ec248c01096b705e4cd9d8b0a5ef5cdb6760a3742f5de\"\n\n .as_bytes()\n\n .to_vec()\n\n ),\n\n 2934249110\n\n )\n\n }\n\n\n\n // TODO: Add more tests\n\n}\n", "file_path": "src/models/helper.rs", "rank": 0, "score": 334894.28889322106 }, { "content": "pub fn parse_lock_hash(lock_hash: [u8; 32]) -> (String, u32) {\n\n (\n\n hex::encode(lock_hash),\n\n generate_crc(hex::encode(lock_hash).as_bytes()),\n\n )\n\n}\n\n\n", "file_path": "src/models/helper.rs", "rank": 1, "score": 296344.53271238774 }, { "content": "pub fn parse_bytes(value: String) -> Result<Vec<u8>, Error> {\n\n let v = remove_0x(&value);\n\n hex::decode(v).map_err(|_| Error::RequestParamHexInvalid(value))\n\n}\n\n\n", "file_path": "src/utils/helper.rs", "rank": 2, "score": 294984.43994683307 }, { "content": "pub fn get_owned_cota_count(lock_script: &[u8], cota_id: [u8; 20]) -> Result<(i64, u64), Error> {\n\n let lock_hash = blake2b_256(lock_script);\n\n let script_id_opt = get_script_id_by_lock_script(lock_script)?;\n\n let (withdrawal_nfts, _, block_height) = match script_id_opt {\n\n Some(script_id) => get_withdrawal_cota_by_script_id(script_id, Some(cota_id)),\n\n None => Ok((vec![], 0, 0)),\n\n }?;\n\n let withdrawal_count = withdrawal_nfts\n\n .into_iter()\n\n .filter(|withdrawal| {\n\n !is_exist_in_claim(\n\n lock_hash,\n\n withdrawal.cota_id,\n\n withdrawal.token_index,\n\n withdrawal.out_point,\n\n )\n\n })\n\n .count() as i64;\n\n\n\n let hold_count = get_hold_cota_count_by_lock_hash(lock_hash, cota_id)?;\n\n debug!(\n\n \"hold count: {} and withdrawal count: {}\",\n\n hold_count, withdrawal_count\n\n );\n\n let count = hold_count + withdrawal_count;\n\n Ok((count, block_height))\n\n}\n", "file_path": "src/models/common.rs", "rank": 3, "score": 293639.8736370482 }, { "content": "pub fn get_script_map_by_ids(script_ids: Vec<i64>) -> Result<HashMap<i64, Vec<u8>>, Error> {\n\n let start_time = Local::now().timestamp_millis();\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n let mut scripts_dbs: Vec<ScriptDb> = vec![];\n\n let script_ids_subs: Vec<&[i64]> = script_ids.chunks(PAGE_SIZE as usize).collect();\n\n for script_ids_sub in script_ids_subs.into_iter() {\n\n let scripts_db = scripts\n\n .select((id, code_hash, hash_type, args))\n\n .filter(id.eq_any(script_ids_sub))\n\n .load::<Script>(conn)\n\n .map_or_else(\n\n |e| {\n\n error!(\"Query script error: {}\", e.to_string());\n\n Err(Error::DatabaseQueryError(e.to_string()))\n\n },\n\n |scripts_| Ok(parse_script(scripts_)),\n\n )?;\n\n scripts_dbs.extend(scripts_db);\n\n }\n\n let scripts_: Vec<(i64, Vec<u8>)> = scripts_dbs\n\n .iter()\n\n .map(|script_db| (script_db.id, generate_script_vec(script_db)))\n\n .collect();\n\n let script_map: HashMap<i64, Vec<u8>> = scripts_.into_iter().collect();\n\n diff_time(start_time, \"SQL get_script_map_by_ids\");\n\n Ok(script_map)\n\n}\n\n\n", "file_path": "src/models/scripts.rs", "rank": 4, "score": 290529.3573452316 }, { "content": "pub fn generate_cota_index(cota_id: [u8; 20], token_index: [u8; 4]) -> Vec<u8> {\n\n let mut cota_id_index = vec![];\n\n cota_id_index.extend(&cota_id);\n\n cota_id_index.extend(&token_index);\n\n cota_id_index\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 5, "score": 286926.25283009687 }, { "content": "pub fn parse_cota_witness(witness: Vec<u8>, version: u8) -> Result<CotaMap, Error> {\n\n if version > 1 {\n\n return Err(Error::WitnessParseError(\"Version invalid\".to_string()));\n\n }\n\n let witness_args = WitnessArgs::from_slice(&witness)\n\n .map_err(|_| Error::WitnessParseError(\"Parse witness args error\".to_string()))?;\n\n if witness_args.input_type().is_none() && witness_args.output_type().is_none() {\n\n return Err(Error::WitnessParseError(\"Not cota witness\".to_string()));\n\n }\n\n let mut cota_map = Map::new();\n\n cota_map = parse_cota(witness_args.input_type(), version, cota_map)?;\n\n parse_metadata(witness_args.output_type(), cota_map)\n\n}\n\n\n\nconst CREATE: u8 = 1;\n\nconst MINT: u8 = 2;\n\nconst WITHDRAW: u8 = 3;\n\nconst CLAIM: u8 = 4;\n\nconst UPDATE: u8 = 5;\n\nconst TRANSFER: u8 = 6;\n\nconst CLAIM_UPDATE: u8 = 7;\n\nconst TRANSFER_UPDATE: u8 = 8;\n", "file_path": "src/response/witness/cota.rs", "rank": 6, "score": 282957.8775925528 }, { "content": "pub fn parse_bytes_n<const N: usize>(value: String) -> Result<[u8; N], Error> {\n\n let vec =\n\n hex::decode(value.clone()).map_err(|_| Error::RequestParamHexInvalid(value.clone()))?;\n\n if vec.len() != N {\n\n return Err(Error::RequestParamHexLenError {\n\n msg: value,\n\n got: vec.len(),\n\n expected: N,\n\n });\n\n }\n\n Ok(parse_vec_n(vec))\n\n}\n\n\n", "file_path": "src/utils/helper.rs", "rank": 7, "score": 266384.04786344 }, { "content": "pub fn parse_vec_n<const N: usize>(vec: Vec<u8>) -> [u8; N] {\n\n vec.try_into().unwrap_or_else(|v: Vec<u8>| {\n\n panic!(\"Expected a Vec of length {} but it was {}\", N, v.len())\n\n })\n\n}\n\n\n", "file_path": "src/utils/helper.rs", "rank": 8, "score": 252689.3252323878 }, { "content": "pub fn generate_withdrawal_key(cota_id: [u8; 20], token_index: [u8; 4]) -> (CotaNFTId, H256) {\n\n let withdrawal_key = CotaNFTIdBuilder::default()\n\n .cota_id(CotaId::from_slice(&cota_id).unwrap())\n\n .smt_type(Uint16::from_slice(&WITHDRAWAL_NFT_SMT_TYPE).unwrap())\n\n .index(Uint32::from_slice(&token_index).unwrap())\n\n .build();\n\n let mut withdrawal_key_bytes = [0u8; 32];\n\n withdrawal_key_bytes[0..26].copy_from_slice(withdrawal_key.as_slice());\n\n let key = H256::from(withdrawal_key_bytes);\n\n\n\n (withdrawal_key, key)\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 9, "score": 251661.62309127237 }, { "content": "pub fn with_lock<F>(lock_hash: [u8; 32], mut operator: F) -> Result<(), Error>\n\nwhere\n\n F: FnMut() -> Result<(), Error>,\n\n{\n\n let &(ref lock, ref cond) = &*Arc::clone(&SMT_LOCK);\n\n {\n\n let mut set = lock.lock();\n\n while !set.insert(lock_hash) {\n\n cond.wait(&mut set);\n\n }\n\n }\n\n let unlock = || {\n\n let mut set = lock.lock();\n\n set.remove(&lock_hash);\n\n cond.notify_all();\n\n };\n\n let err_handle = |err| {\n\n unlock();\n\n err\n\n };\n\n operator().map_err(err_handle)?;\n\n unlock();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 10, "score": 243310.20803484536 }, { "content": "pub fn parse_owned_nft_count(count: i64, block_number: u64) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_i64(\"count\", count);\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n", "file_path": "src/response/hold.rs", "rank": 11, "score": 237504.3389519147 }, { "content": "pub fn leaf_key_to_vec(lock_hash: [u8; 32], key: &H256) -> Vec<u8> {\n\n let mut ret = Vec::with_capacity(64);\n\n ret.extend_from_slice(&lock_hash);\n\n ret.extend_from_slice(key.as_slice());\n\n ret\n\n}\n\n\n", "file_path": "src/smt/store/serde.rs", "rank": 12, "score": 232025.72918186747 }, { "content": "pub fn get_script_id_by_lock_script(lock_script: &[u8]) -> Result<Option<i64>, Error> {\n\n let start_time = Local::now().timestamp_millis();\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n let lock = LockScript::from_slice(lock_script).unwrap();\n\n\n\n let lock_code_hash = hex::encode(lock.code_hash().as_slice());\n\n let lock_code_hash_crc = generate_crc(lock_code_hash.as_bytes());\n\n\n\n let lock_args = hex::encode(lock.args().raw_data().to_vec());\n\n let lock_args_crc = generate_crc(lock_args.as_bytes());\n\n\n\n let script_ids: Vec<i64> = scripts\n\n .select(id)\n\n .filter(code_hash_crc.eq(lock_code_hash_crc))\n\n .filter(hash_type.eq(lock.hash_type().as_slice()[0]))\n\n .filter(args_crc.eq(lock_args_crc))\n\n .filter(code_hash.eq(lock_code_hash))\n\n .filter(args.eq(lock_args))\n\n .limit(1)\n\n .load::<i64>(conn)\n\n .map_err(|e| {\n\n error!(\"Query script error: {}\", e.to_string());\n\n Error::DatabaseQueryError(e.to_string())\n\n })?;\n\n diff_time(start_time, \"SQL get_script_id_by_lock_script\");\n\n Ok(script_ids.get(0).cloned())\n\n}\n\n\n", "file_path": "src/models/scripts.rs", "rank": 13, "score": 230976.06611972695 }, { "content": "pub fn branch_key_to_vec(lock_hash: [u8; 32], key: &BranchKey) -> Vec<u8> {\n\n let mut ret = Vec::with_capacity(65);\n\n ret.extend_from_slice(&lock_hash);\n\n ret.extend_from_slice(key.node_key.as_slice());\n\n ret.extend_from_slice(&[key.height]);\n\n ret\n\n}\n\n\n", "file_path": "src/smt/store/serde.rs", "rank": 14, "score": 229442.61137787905 }, { "content": "pub fn generate_hold_key(cota_id: [u8; 20], token_index: [u8; 4]) -> (CotaNFTId, H256) {\n\n let hold_key = CotaNFTIdBuilder::default()\n\n .cota_id(CotaId::from_slice(&cota_id).unwrap())\n\n .smt_type(Uint16::from_slice(&HOLD_NFT_SMT_TYPE).unwrap())\n\n .index(Uint32::from_slice(&token_index).unwrap())\n\n .build();\n\n let mut hold_key_bytes = [0u8; 32];\n\n hold_key_bytes[0..26].copy_from_slice(hold_key.as_slice());\n\n let key = H256::from(hold_key_bytes);\n\n (hold_key, key)\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 15, "score": 226183.64823883312 }, { "content": "pub fn generate_crc(v: &[u8]) -> u32 {\n\n const CRC: Crc<u32> = Crc::<u32>::new(&CRC_32_ISO_HDLC);\n\n CRC.checksum(v)\n\n}\n\n\n", "file_path": "src/models/helper.rs", "rank": 16, "score": 223133.840996899 }, { "content": "pub fn init_connection_pool() -> SqlConnectionPool {\n\n let database_url = env::var(\"DATABASE_URL\").expect(\"DATABASE_URL must be set\");\n\n let manager = ConnectionManager::<MysqlConnection>::new(database_url);\n\n let max: u32 = match env::var(\"MAX_POOL\") {\n\n Ok(max_) => from_str::<u32>(&max_).unwrap(),\n\n Err(_e) => 20,\n\n };\n\n r2d2::Pool::builder().max_size(max).build(manager).unwrap()\n\n}\n\n\n", "file_path": "src/models/helper.rs", "rank": 17, "score": 222772.73864950458 }, { "content": "pub fn get_issuer_info_by_lock_hash(lock_hash_: [u8; 32]) -> Result<Option<IssuerInfoDb>, Error> {\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n let lock_hash_hex = hex::encode(lock_hash_);\n\n let issuers: Vec<IssuerInfoDb> = issuer_infos\n\n .select((name, avatar, description))\n\n .filter(lock_hash.eq(lock_hash_hex))\n\n .limit(1)\n\n .load::<IssuerInfoDb>(conn)\n\n .map_or_else(\n\n |e| {\n\n error!(\"Query issuer info error: {}\", e.to_string());\n\n Err(Error::DatabaseQueryError(e.to_string()))\n\n },\n\n Ok,\n\n )?;\n\n Ok(issuers.get(0).cloned())\n\n}\n", "file_path": "src/models/issuer.rs", "rank": 18, "score": 215530.93021177597 }, { "content": "pub fn branch_node_to_vec(node: &BranchNode) -> Vec<u8> {\n\n match (&node.left, &node.right) {\n\n (MergeValue::Value(left), MergeValue::Value(right)) => {\n\n let mut ret = Vec::with_capacity(33);\n\n ret.extend_from_slice(&[0]);\n\n ret.extend_from_slice(left.as_slice());\n\n ret.extend_from_slice(right.as_slice());\n\n ret\n\n }\n\n (\n\n MergeValue::Value(left),\n\n MergeValue::MergeWithZero {\n\n base_node,\n\n zero_bits,\n\n zero_count,\n\n },\n\n ) => {\n\n let mut ret = Vec::with_capacity(98);\n\n ret.extend_from_slice(&[1]);\n\n ret.extend_from_slice(left.as_slice());\n", "file_path": "src/smt/store/serde.rs", "rank": 20, "score": 210536.43629771326 }, { "content": "fn get_selection() -> (cota_id, token_index, configure, state, characteristic) {\n\n (cota_id, token_index, configure, state, characteristic)\n\n}\n", "file_path": "src/models/hold.rs", "rank": 21, "score": 208604.46292090346 }, { "content": "pub fn generate_claim_value(version: u8) -> (Byte32, H256) {\n\n let mut claim_value_vec = vec![255u8; 31];\n\n if version == 0 {\n\n claim_value_vec.insert(0, 0u8);\n\n } else {\n\n claim_value_vec.insert(0, 1u8);\n\n }\n\n let claim_value = Byte32::from_slice(&claim_value_vec).unwrap();\n\n let value = H256::from([255u8; 32]);\n\n (claim_value, value)\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 22, "score": 206970.61812117335 }, { "content": "fn parse_cota(input_type: BytesOpt, version: u8, mut cota_map: CotaMap) -> Result<CotaMap, Error> {\n\n match input_type.to_opt() {\n\n Some(input_type_) => {\n\n let input_type: Bytes = input_type_.unpack();\n\n let (tx_type, slice) = (u8::from(input_type[0]), &input_type[1..]);\n\n if tx_type > TRANSFER_UPDATE || tx_type == 0 {\n\n return Err(Error::WitnessParseError(\"Not cota witness\".to_string()));\n\n }\n\n let cota_entries = match tx_type {\n\n CREATE => {\n\n parse_define(DefineCotaNFTEntries::from_slice(slice).map_err(entries_error)?)\n\n }\n\n MINT => match version {\n\n 0 => parse_mint(MintCotaNFTEntries::from_slice(slice).map_err(entries_error)?),\n\n _ => parse_mint_v1(\n\n MintCotaNFTV1Entries::from_slice(slice).map_err(entries_error)?,\n\n ),\n\n },\n\n WITHDRAW => match version {\n\n 0 => parse_withdrawal(\n", "file_path": "src/response/witness/cota.rs", "rank": 23, "score": 206436.51473347345 }, { "content": "pub fn parse_vec_map<T: ReqParser>(map: &Map<String, Value>, key: &str) -> Result<Vec<T>, Error> {\n\n let value = map\n\n .get(key)\n\n .ok_or(Error::RequestParamNotFound(key.to_owned()))?;\n\n if !value.is_array() {\n\n return Err(Error::RequestParamTypeError(key.to_owned()));\n\n }\n\n let mut vec: Vec<T> = Vec::new();\n\n for element in value.as_array().unwrap() {\n\n if !element.is_object() {\n\n return Err(Error::RequestParamTypeError(key.to_owned()));\n\n }\n\n vec.push(T::from_map(element.as_object().unwrap())?)\n\n }\n\n Ok(vec)\n\n}\n\n\n", "file_path": "src/request/helper.rs", "rank": 24, "score": 206000.2659488644 }, { "content": "type DBTotalResult<T> = Result<(Vec<T>, i64, u64), Error>;\n", "file_path": "src/models/mod.rs", "rank": 25, "score": 200512.02486778423 }, { "content": "pub fn get_define_cota_by_cota_id(cota_id_: [u8; 20]) -> Result<Option<DefineDb>, Error> {\n\n let start_time = Local::now().timestamp_millis();\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n let cota_id_hex = hex::encode(cota_id_);\n\n let defines: Vec<DefineDb> = define_cota_nft_kv_pairs\n\n .select(get_selection())\n\n .filter(cota_id.eq(cota_id_hex))\n\n .limit(1)\n\n .load::<DefineCotaNft>(conn)\n\n .map_or_else(\n\n |e| {\n\n error!(\"Query define error: {}\", e.to_string());\n\n Err(Error::DatabaseQueryError(e.to_string()))\n\n },\n\n |defines| Ok(parse_define_cota_nft(defines)),\n\n )?;\n\n diff_time(start_time, \"SQL get_define_cota_by_cota_id\");\n\n Ok(defines.get(0).cloned())\n\n}\n\n\n", "file_path": "src/models/define.rs", "rank": 26, "score": 197118.03870691193 }, { "content": "fn generate_mysql_smt<'a>(smt: &mut CotaSMT<'a>, lock_hash: [u8; 32]) -> Result<(), Error> {\n\n let start_time = Local::now().timestamp_millis();\n\n let (defines, holds, withdrawals, claims) = get_all_cota_by_lock_hash(lock_hash)?;\n\n diff_time(\n\n start_time,\n\n \"Load all history smt leaves from mysql database\",\n\n );\n\n\n\n let start_time = Local::now().timestamp_millis();\n\n for define_db in defines {\n\n let DefineDb {\n\n cota_id,\n\n total,\n\n issued,\n\n configure,\n\n block_number,\n\n } = define_db;\n\n let (_, key) = generate_define_key(cota_id);\n\n let (_, value) = generate_define_value(\n\n total.to_be_bytes(),\n", "file_path": "src/entries/smt.rs", "rank": 27, "score": 196826.71518065728 }, { "content": "pub fn get_class_info_by_cota_id(cota_id_: [u8; 20]) -> Result<Option<ClassInfoDb>, Error> {\n\n let cota_id_hex = hex::encode(cota_id_);\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n let classes: Vec<ClassInfoDb> = class_infos\n\n .select((\n\n name,\n\n symbol,\n\n description,\n\n image,\n\n audio,\n\n video,\n\n model,\n\n characteristic,\n\n properties,\n\n ))\n\n .filter(cota_id.eq(cota_id_hex))\n\n .limit(1)\n\n .load::<ClassInfoDb>(conn)\n\n .map_or_else(\n\n |e| {\n\n error!(\"Query class info error: {}\", e.to_string());\n\n Err(Error::DatabaseQueryError(e.to_string()))\n\n },\n\n Ok,\n\n )?;\n\n Ok(classes.get(0).cloned())\n\n}\n", "file_path": "src/models/class.rs", "rank": 28, "score": 194744.99588237255 }, { "content": "pub fn generate_define_key(cota_id: [u8; 20]) -> (DefineCotaNFTId, H256) {\n\n let cota_id = CotaId::from_slice(&cota_id).unwrap();\n\n let smt_type = Uint16::from_slice(&DEFINE_NFT_SMT_TYPE).unwrap();\n\n let define_key = DefineCotaNFTIdBuilder::default()\n\n .cota_id(cota_id)\n\n .smt_type(smt_type)\n\n .build();\n\n let mut define_key_bytes = [0u8; 32];\n\n define_key_bytes[0..22].copy_from_slice(define_key.as_slice());\n\n let key = H256::from(define_key_bytes);\n\n (define_key, key)\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 32, "score": 185922.0168619256 }, { "content": "pub fn get_define_cota_by_lock_hash(lock_hash_: [u8; 32]) -> DBResult<DefineDb> {\n\n let start_time = Local::now().timestamp_millis();\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n let (lock_hash_hex, lock_hash_crc_) = parse_lock_hash(lock_hash_);\n\n let mut page: i64 = 0;\n\n let mut defines: Vec<DefineDb> = Vec::new();\n\n loop {\n\n let defines_page = define_cota_nft_kv_pairs\n\n .select(get_selection())\n\n .filter(lock_hash_crc.eq(lock_hash_crc_))\n\n .filter(lock_hash.eq(lock_hash_hex.clone()))\n\n .limit(PAGE_SIZE)\n\n .offset(PAGE_SIZE * page)\n\n .load::<DefineCotaNft>(conn)\n\n .map_or_else(\n\n |e| {\n\n error!(\"Query define error: {}\", e.to_string());\n\n Err(Error::DatabaseQueryError(e.to_string()))\n\n },\n\n |defines| Ok(parse_define_cota_nft(defines)),\n", "file_path": "src/models/define.rs", "rank": 33, "score": 184069.23147141066 }, { "content": "pub fn get_claim_cota_by_lock_hash(lock_hash_: [u8; 32]) -> DBResult<ClaimDb> {\n\n let start_time = Local::now().timestamp_millis();\n\n let (lock_hash_hex, lock_hash_crc_) = parse_lock_hash(lock_hash_);\n\n let mut page: i64 = 0;\n\n let mut claims: Vec<ClaimDb> = Vec::new();\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n loop {\n\n let claims_page: Vec<ClaimDb> = claimed_cota_nft_kv_pairs\n\n .select(get_selection())\n\n .filter(lock_hash_crc.eq(lock_hash_crc_))\n\n .filter(lock_hash.eq(lock_hash_hex.clone()))\n\n .limit(PAGE_SIZE)\n\n .offset(PAGE_SIZE * page)\n\n .load::<ClaimCotaNft>(conn)\n\n .map_or_else(\n\n |e| {\n\n error!(\"Query claim error: {}\", e.to_string());\n\n Err(Error::DatabaseQueryError(e.to_string()))\n\n },\n\n |claims| Ok(parse_claimed_cota_nft(claims)),\n", "file_path": "src/models/claim.rs", "rank": 34, "score": 184069.23147141066 }, { "content": "pub fn parse_withdrawal(obj: WithdrawalCotaNFTEntries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"withdraw\".to_owned());\n\n map.insert_obj_vec(\n\n \"hold_keys\",\n\n obj.hold_keys().into_iter().map(parse_cota_nft_id).collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"hold_values\",\n\n obj.hold_values()\n\n .into_iter()\n\n .map(parse_cota_nft_info)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"withdrawal_keys\",\n\n obj.withdrawal_keys()\n\n .into_iter()\n\n .map(parse_cota_nft_id)\n\n .collect(),\n", "file_path": "src/response/witness/parser.rs", "rank": 35, "score": 177375.4733339064 }, { "content": "pub fn parse_withdrawal_v1(obj: WithdrawalCotaNFTV1Entries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"withdraw\".to_owned());\n\n map.insert_obj_vec(\n\n \"hold_keys\",\n\n obj.hold_keys().into_iter().map(parse_cota_nft_id).collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"hold_values\",\n\n obj.hold_values()\n\n .into_iter()\n\n .map(parse_cota_nft_info)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"withdrawal_keys\",\n\n obj.withdrawal_keys()\n\n .into_iter()\n\n .map(parse_withdrawal_cota_nft_key_v1)\n\n .collect(),\n", "file_path": "src/response/witness/parser.rs", "rank": 36, "score": 174780.41168202148 }, { "content": "pub fn get_all_cota_by_lock_hash(lock_hash: [u8; 32]) -> DBAllResult {\n\n let defines = get_define_cota_by_lock_hash(lock_hash)?;\n\n let holds = get_hold_cota_by_lock_hash(lock_hash, None)?;\n\n let withdrawals = get_withdrawal_cota_by_lock_hash(lock_hash, None)?;\n\n let claims = get_claim_cota_by_lock_hash(lock_hash)?;\n\n Ok((defines.0, holds.0, withdrawals.0, claims.0))\n\n}\n\n\n", "file_path": "src/models/common.rs", "rank": 37, "score": 171925.4374443968 }, { "content": "fn slice_to_hex(slice: &[u8]) -> String {\n\n format!(\"0x{}\", hex::encode(slice))\n\n}\n", "file_path": "src/response/witness/parser.rs", "rank": 38, "score": 168952.05644873634 }, { "content": "fn generate_script_vec(script_db: &ScriptDb) -> Vec<u8> {\n\n let args_bytes: Vec<Byte> = script_db.args.iter().map(|v| Byte::from(*v)).collect();\n\n let script = ScriptBuilder::default()\n\n .code_hash(Byte32::from_slice(&script_db.code_hash[..]).unwrap())\n\n .hash_type(Byte::from(script_db.hash_type))\n\n .args(BytesBuilder::default().set(args_bytes).build())\n\n .build();\n\n script.as_slice().to_vec()\n\n}\n", "file_path": "src/models/scripts.rs", "rank": 39, "score": 167637.84588048863 }, { "content": "type DBAllResult = Result<(Vec<DefineDb>, Vec<HoldDb>, Vec<WithdrawDb>, Vec<ClaimDb>), Error>;\n\n\n", "file_path": "src/models/common.rs", "rank": 40, "score": 166586.70249525088 }, { "content": "pub fn parse_withdrawal_response(\n\n withdrawals: Vec<(WithdrawNFTDb, Option<ClassInfoDb>)>,\n\n total: i64,\n\n page_size: i64,\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let nfts: Vec<Value> = withdrawals\n\n .into_iter()\n\n .map(parse_withdrawal_value)\n\n .collect();\n\n let mut map = Map::new();\n\n map.insert_i64(\"total\", total);\n\n map.insert_i64(\"page_size\", page_size);\n\n map.insert_u64(\"block_number\", block_number);\n\n map.insert_array(\"nfts\", nfts);\n\n map\n\n}\n\n\n", "file_path": "src/response/withdrawal.rs", "rank": 41, "score": 161554.76594488553 }, { "content": "pub fn parse_withdrawal_smt(\n\n (root_hash, withdrawal_entries): (H256, WithdrawalCotaNFTV1Entries),\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let withdrawal_entry = hex::encode(withdrawal_entries.as_slice());\n\n let withdrawal_root_hash = hex::encode(root_hash.as_slice());\n\n let mut map = Map::new();\n\n map.insert_str(\"smt_root_hash\", withdrawal_root_hash);\n\n map.insert_str(\"withdrawal_smt_entry\", withdrawal_entry);\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n\n\n", "file_path": "src/response/withdrawal.rs", "rank": 42, "score": 161554.76594488553 }, { "content": "fn generate_smt(history_leaf_count: u32, update_leaf_count: u32) {\n\n let mut rng = thread_rng();\n\n let lock_hash: [u8; 32] = rng.gen::<[u8; 32]>().into();\n\n let mut leaves = vec![];\n\n for _ in 0..history_leaf_count {\n\n let key: H256 = rng.gen::<[u8; 32]>().into();\n\n let value: H256 = rng.gen::<[u8; 32]>().into();\n\n leaves.push((key, value));\n\n }\n\n let db = RocksDB::new_with_path(&format!(\"test_db_{}\", history_leaf_count))\n\n .expect(\"rocksdb open error\");\n\n let transaction = StoreTransaction::new(db.transaction());\n\n let smt_store = SMTStore::new(\n\n lock_hash,\n\n COLUMN_SMT_LEAF,\n\n COLUMN_SMT_BRANCH,\n\n COLUMN_SMT_ROOT,\n\n COLUMN_SMT_TEMP_LEAVES,\n\n &transaction,\n\n );\n", "file_path": "src/smt/tests/smt.rs", "rank": 43, "score": 158550.6705107056 }, { "content": "fn generate_params(lock_script: &[u8]) -> Result<Value, Error> {\n\n let lock = Script::from_slice(lock_script)\n\n .map_err(|_e| Error::Other(\"Lock script format error\".to_owned()))?;\n\n let hash_type = match lock.hash_type().into() {\n\n 0u8 => \"data\",\n\n 1u8 => \"type\",\n\n 2u8 => \"data1\",\n\n _ => \"0\",\n\n };\n\n let is_mainnet: bool = match env::var(\"IS_MAINNET\") {\n\n Ok(mainnet) => from_str::<bool>(&mainnet).unwrap(),\n\n Err(_e) => false,\n\n };\n\n let code_hash = if is_mainnet {\n\n MAINNET_COTA_CODE_HASH\n\n } else {\n\n TESTNET_COTA_CODE_HASH\n\n };\n\n\n\n Ok(json!([\n", "file_path": "src/indexer/index.rs", "rank": 45, "score": 155497.89450061033 }, { "content": "fn parse_script(slice: &[u8]) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n let script = Script::from_slice(slice).expect(\"Parse script error\");\n\n map.insert_str(\"code_hash\", slice_to_hex(script.code_hash().as_slice()));\n\n map.insert_str(\"hash_type\", slice_to_hex(script.hash_type().as_slice()));\n\n map.insert_str(\"args\", slice_to_hex(&script.args().raw_data().to_vec()));\n\n map\n\n}\n\n\n", "file_path": "src/response/witness/parser.rs", "rank": 46, "score": 155241.76508804096 }, { "content": "fn get_selection() -> (cota_id, total, issued, configure, block_number) {\n\n (cota_id, total, issued, configure, block_number)\n\n}\n", "file_path": "src/models/define.rs", "rank": 47, "score": 154918.28620983995 }, { "content": "pub fn get_syncer_tip_block_number() -> Result<u64, Error> {\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n check_infos\n\n .select(block_number)\n\n .order(block_number.desc())\n\n .first::<u64>(conn)\n\n .map_err(|e| {\n\n error!(\"Query block number error: {}\", e.to_string());\n\n Error::DatabaseQueryError(e.to_string())\n\n })\n\n}\n", "file_path": "src/models/block.rs", "rank": 48, "score": 154193.27503307926 }, { "content": "pub fn slice_to_branch_node(slice: &[u8]) -> BranchNode {\n\n match slice[0] {\n\n 0 => {\n\n let left: [u8; 32] = slice[1..33].try_into().expect(\"checked slice\");\n\n let right: [u8; 32] = slice[33..65].try_into().expect(\"checked slice\");\n\n BranchNode {\n\n left: MergeValue::Value(left.into()),\n\n right: MergeValue::Value(right.into()),\n\n }\n\n }\n\n 1 => {\n\n let left: [u8; 32] = slice[1..33].try_into().expect(\"checked slice\");\n\n let base_node: [u8; 32] = slice[33..65].try_into().expect(\"checked slice\");\n\n let zero_bits: [u8; 32] = slice[65..97].try_into().expect(\"checked slice\");\n\n let zero_count = slice[97];\n\n BranchNode {\n\n left: MergeValue::Value(left.into()),\n\n right: MergeValue::MergeWithZero {\n\n base_node: base_node.into(),\n\n zero_bits: zero_bits.into(),\n", "file_path": "src/smt/store/serde.rs", "rank": 49, "score": 153881.4061051231 }, { "content": "pub fn diff_time(start_time: i64, message: &str) {\n\n let diff_time = (Local::now().timestamp_millis() - start_time) as f64 / 1000f64;\n\n debug!(\"{}: {}s\", message, diff_time);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn test_remove_0x() {\n\n assert_eq!(remove_0x(\"0x123456\"), \"123456\");\n\n assert_eq!(remove_0x(\"123456\"), \"123456\");\n\n assert_eq!(remove_0x(\"0\"), \"0\");\n\n assert_eq!(remove_0x(\"0x\"), \"\");\n\n }\n\n\n\n #[test]\n\n fn test_parse_vec_n() {\n\n assert_eq!(\n\n parse_vec_n::<20>(vec![\n", "file_path": "src/utils/helper.rs", "rank": 50, "score": 153456.33296867463 }, { "content": "pub fn generate_withdrawal_value(\n\n configure: u8,\n\n state: u8,\n\n characteristic: [u8; 20],\n\n to_lock_script: &[u8],\n\n out_point: [u8; 24],\n\n) -> (WithdrawalCotaNFTValue, H256) {\n\n let cota_info = CotaNFTInfoBuilder::default()\n\n .configure(Byte::from(configure))\n\n .state(Byte::from(state))\n\n .characteristic(Characteristic::from_slice(&characteristic).unwrap())\n\n .build();\n\n let to_lock_bytes: Vec<Byte> = to_lock_script.iter().map(|v| Byte::from(*v)).collect();\n\n let withdrawal_value = WithdrawalCotaNFTValueBuilder::default()\n\n .nft_info(cota_info)\n\n .out_point(OutPointSlice::from_slice(&out_point).unwrap())\n\n .to_lock(BytesBuilder::default().set(to_lock_bytes).build())\n\n .build();\n\n let value = H256::from(blake2b_256(withdrawal_value.as_slice()));\n\n (withdrawal_value, value)\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 51, "score": 151808.41691536445 }, { "content": "pub fn get_withdrawal_cota(\n\n lock_script: &[u8],\n\n page: i64,\n\n page_size: i64,\n\n cota_id_opt: Option<[u8; 20]>,\n\n) -> DBTotalResult<(WithdrawNFTDb, Option<ClassInfoDb>)> {\n\n let lock_hash = blake2b_256(lock_script);\n\n let script_id_opt = get_script_id_by_lock_script(lock_script)?;\n\n let (mut withdrawal_nfts, _, block_height) = match script_id_opt {\n\n Some(script_id) => get_withdrawal_cota_by_script_id(script_id, cota_id_opt),\n\n None => Ok((vec![], 0, 0)),\n\n }?;\n\n withdrawal_nfts = withdrawal_nfts\n\n .into_iter()\n\n .filter(|withdrawal| {\n\n !is_exist_in_claim(\n\n lock_hash,\n\n withdrawal.cota_id,\n\n withdrawal.token_index,\n\n withdrawal.out_point,\n", "file_path": "src/models/common.rs", "rank": 52, "score": 151808.41691536445 }, { "content": "pub fn parse_sender_response(\n\n sender_lock_hash: Option<String>,\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n match sender_lock_hash {\n\n Some(lock_hash) => map.insert_str(\"sender_lock_hash\", format!(\"0x{}\", lock_hash)),\n\n None => map.insert_null(\"sender_lock_hash\"),\n\n };\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n", "file_path": "src/response/withdrawal.rs", "rank": 53, "score": 151808.41691536445 }, { "content": "pub fn generate_withdrawal_value_v1(\n\n configure: u8,\n\n state: u8,\n\n characteristic: [u8; 20],\n\n to_lock_script: &[u8],\n\n) -> (WithdrawalCotaNFTValueV1, H256) {\n\n let cota_info = CotaNFTInfoBuilder::default()\n\n .configure(Byte::from(configure))\n\n .state(Byte::from(state))\n\n .characteristic(Characteristic::from_slice(&characteristic).unwrap())\n\n .build();\n\n let to_lock_bytes: Vec<Byte> = to_lock_script.iter().map(|v| Byte::from(*v)).collect();\n\n let withdrawal_value = WithdrawalCotaNFTValueV1Builder::default()\n\n .nft_info(cota_info)\n\n .to_lock(BytesBuilder::default().set(to_lock_bytes).build())\n\n .build();\n\n let value = H256::from(blake2b_256(withdrawal_value.as_slice()));\n\n (withdrawal_value, value)\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 54, "score": 148764.7572444207 }, { "content": "pub fn generate_withdrawal_key_v1(\n\n cota_id: [u8; 20],\n\n token_index: [u8; 4],\n\n out_point: [u8; 24],\n\n) -> (WithdrawalCotaNFTKeyV1, H256) {\n\n let nft_id = CotaNFTIdBuilder::default()\n\n .cota_id(CotaId::from_slice(&cota_id).unwrap())\n\n .smt_type(Uint16::from_slice(&WITHDRAWAL_NFT_SMT_TYPE).unwrap())\n\n .index(Uint32::from_slice(&token_index).unwrap())\n\n .build();\n\n let withdrawal_key = WithdrawalCotaNFTKeyV1Builder::default()\n\n .nft_id(nft_id)\n\n .out_point(OutPointSlice::from_slice(&out_point).unwrap())\n\n .build();\n\n let key = H256::from(blake2b_256(withdrawal_key.as_slice()));\n\n\n\n (withdrawal_key, key)\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 55, "score": 148764.7572444207 }, { "content": "fn reset_smt_temp_leaves<'a>(smt: &mut CotaSMT<'a>) -> Result<(), Error> {\n\n let leaves_opt = smt.store().get_leaves()?;\n\n if let Some(leaves) = leaves_opt {\n\n smt.update_all(leaves)\n\n .expect(\"SMT update temp leaves error\");\n\n }\n\n debug!(\"Reset temp leaves successfully\");\n\n Ok(())\n\n}\n", "file_path": "src/entries/smt.rs", "rank": 57, "score": 144971.2689522041 }, { "content": "pub fn get_hold_cota_by_lock_hash_and_page(\n\n lock_hash_: [u8; 32],\n\n page: i64,\n\n page_size: i64,\n\n cota_id_opt: Option<[u8; 20]>,\n\n) -> DBTotalResult<HoldDb> {\n\n let start_time = Local::now().timestamp_millis();\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n let (lock_hash_hex, lock_hash_crc_) = parse_lock_hash(lock_hash_);\n\n let total_result = match cota_id_opt {\n\n Some(cota_id_) => hold_cota_nft_kv_pairs\n\n .filter(lock_hash_crc.eq(lock_hash_crc_))\n\n .filter(lock_hash.eq(lock_hash_hex.clone()))\n\n .filter(cota_id.eq(hex::encode(&cota_id_)))\n\n .count()\n\n .get_result::<i64>(conn),\n\n None => hold_cota_nft_kv_pairs\n\n .filter(lock_hash_crc.eq(lock_hash_crc_))\n\n .filter(lock_hash.eq(lock_hash_hex.clone()))\n\n .filter(cota_id.ne(hex::encode(&[0u8; 20])))\n", "file_path": "src/models/hold.rs", "rank": 58, "score": 143680.63174061917 }, { "content": "pub fn get_hold_cota_count_by_lock_hash(\n\n lock_hash_: [u8; 32],\n\n cota_id_: [u8; 20],\n\n) -> Result<i64, Error> {\n\n let start_time = Local::now().timestamp_millis();\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n let (lock_hash_hex, lock_hash_crc_) = parse_lock_hash(lock_hash_);\n\n let cota_id_str = hex::encode(cota_id_);\n\n let hold_count: i64 = hold_cota_nft_kv_pairs\n\n .filter(lock_hash_crc.eq(lock_hash_crc_))\n\n .filter(lock_hash.eq(lock_hash_hex.clone()))\n\n .filter(cota_id.eq(cota_id_str))\n\n .count()\n\n .get_result::<i64>(conn)\n\n .map_err(|e| {\n\n error!(\"Query hold error: {}\", e.to_string());\n\n Error::DatabaseQueryError(e.to_string())\n\n })?;\n\n diff_time(start_time, \"SQL get_hold_cota_count_by_lock_hash\");\n\n Ok(hold_count)\n\n}\n\n\n", "file_path": "src/models/hold.rs", "rank": 59, "score": 143647.59654819328 }, { "content": "pub fn parse_define(obj: DefineCotaNFTEntries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"define\".to_owned());\n\n map.insert_obj_vec(\n\n \"define_keys\",\n\n obj.define_keys()\n\n .into_iter()\n\n .map(parse_define_cota_nft_id)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"define_values\",\n\n obj.define_values()\n\n .into_iter()\n\n .map(parse_define_cota_nft_value)\n\n .collect(),\n\n );\n\n map.insert_str(\"proof\", slice_to_hex(&obj.proof().raw_data().to_vec()));\n\n map.insert_str(\"action\", slice_to_hex(&obj.action().raw_data().to_vec()));\n\n map\n\n}\n\n\n", "file_path": "src/response/witness/parser.rs", "rank": 60, "score": 140164.03793212195 }, { "content": "pub fn parse_claim(obj: ClaimCotaNFTEntries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"claim\".to_owned());\n\n map.insert_obj_vec(\n\n \"hold_keys\",\n\n obj.hold_keys().into_iter().map(parse_cota_nft_id).collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"hold_values\",\n\n obj.hold_values()\n\n .into_iter()\n\n .map(parse_cota_nft_info)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"claim_keys\",\n\n obj.claim_keys()\n\n .into_iter()\n\n .map(parse_claim_cota_nft_key)\n\n .collect(),\n", "file_path": "src/response/witness/parser.rs", "rank": 61, "score": 140164.03793212195 }, { "content": "pub fn parse_update(obj: UpdateCotaNFTEntries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"update\".to_owned());\n\n map.insert_obj_vec(\n\n \"hold_keys\",\n\n obj.hold_keys().into_iter().map(parse_cota_nft_id).collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"hold_old_values\",\n\n obj.hold_old_values()\n\n .into_iter()\n\n .map(parse_cota_nft_info)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"hold_new_values\",\n\n obj.hold_new_values()\n\n .into_iter()\n\n .map(parse_cota_nft_info)\n\n .collect(),\n\n );\n\n map.insert_str(\"proof\", slice_to_hex(&obj.proof().raw_data().to_vec()));\n\n map.insert_str(\"action\", slice_to_hex(&obj.action().raw_data().to_vec()));\n\n map\n\n}\n\n\n", "file_path": "src/response/witness/parser.rs", "rank": 62, "score": 140164.03793212195 }, { "content": "pub fn parse_mint(obj: MintCotaNFTEntries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"mint\".to_owned());\n\n map.insert_obj_vec(\n\n \"define_keys\",\n\n obj.define_keys()\n\n .into_iter()\n\n .map(parse_define_cota_nft_id)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"define_old_values\",\n\n obj.define_old_values()\n\n .into_iter()\n\n .map(parse_define_cota_nft_value)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"define_new_values\",\n\n obj.define_new_values()\n", "file_path": "src/response/witness/parser.rs", "rank": 63, "score": 140164.03793212195 }, { "content": "pub fn parse_transfer(obj: TransferCotaNFTEntries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"transfer\".to_owned());\n\n map.insert_obj_vec(\n\n \"claim_keys\",\n\n obj.claim_keys()\n\n .into_iter()\n\n .map(parse_claim_cota_nft_key)\n\n .collect(),\n\n );\n\n map.insert_array(\n\n \"claim_values\",\n\n obj.claim_values()\n\n .into_iter()\n\n .map(|v| Value::String(slice_to_hex(v.as_slice())))\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"withdrawal_keys\",\n\n obj.withdrawal_keys()\n", "file_path": "src/response/witness/parser.rs", "rank": 64, "score": 140164.03793212195 }, { "content": "pub fn parse_transfer_v1(obj: TransferCotaNFTV1Entries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"transfer\".to_owned());\n\n map.insert_obj_vec(\n\n \"claim_keys\",\n\n obj.claim_keys()\n\n .into_iter()\n\n .map(parse_claim_cota_nft_key)\n\n .collect(),\n\n );\n\n map.insert_array(\n\n \"claim_values\",\n\n obj.claim_values()\n\n .into_iter()\n\n .map(|value| Value::String(slice_to_hex(value.as_slice())))\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"withdrawal_keys\",\n\n obj.withdrawal_keys()\n", "file_path": "src/response/witness/parser.rs", "rank": 65, "score": 138211.66108067668 }, { "content": "pub fn parse_mint_v1(obj: MintCotaNFTV1Entries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"mint\".to_owned());\n\n map.insert_obj_vec(\n\n \"define_keys\",\n\n obj.define_keys()\n\n .into_iter()\n\n .map(parse_define_cota_nft_id)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"define_old_values\",\n\n obj.define_old_values()\n\n .into_iter()\n\n .map(parse_define_cota_nft_value)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"define_new_values\",\n\n obj.define_new_values()\n", "file_path": "src/response/witness/parser.rs", "rank": 66, "score": 138211.66108067668 }, { "content": "pub fn parse_transfer_update(obj: TransferUpdateCotaNFTEntries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"transfer_update\".to_owned());\n\n map.insert_obj_vec(\n\n \"claim_keys\",\n\n obj.claim_keys()\n\n .into_iter()\n\n .map(parse_claim_cota_nft_key)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"claim_infos\",\n\n obj.claim_infos()\n\n .into_iter()\n\n .map(parse_claim_cota_nft_info)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"withdrawal_keys\",\n\n obj.withdrawal_keys()\n", "file_path": "src/response/witness/parser.rs", "rank": 67, "score": 136353.25656658798 }, { "content": "pub fn parse_claim_update(obj: ClaimUpdateCotaNFTEntries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"claim_update\".to_owned());\n\n map.insert_obj_vec(\n\n \"hold_keys\",\n\n obj.hold_keys().into_iter().map(parse_cota_nft_id).collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"hold_new_values\",\n\n obj.hold_values()\n\n .into_iter()\n\n .map(parse_cota_nft_info)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"claim_keys\",\n\n obj.claim_keys()\n\n .into_iter()\n\n .map(parse_claim_cota_nft_key)\n\n .collect(),\n", "file_path": "src/response/witness/parser.rs", "rank": 68, "score": 136353.25656658798 }, { "content": "pub fn parse_transfer_update_v1(obj: TransferUpdateCotaNFTV1Entries) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_str(\"type\", \"transfer_update\".to_owned());\n\n map.insert_obj_vec(\n\n \"claim_keys\",\n\n obj.claim_keys()\n\n .into_iter()\n\n .map(parse_claim_cota_nft_key)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"claim_infos\",\n\n obj.claim_infos()\n\n .into_iter()\n\n .map(parse_claim_cota_nft_info)\n\n .collect(),\n\n );\n\n map.insert_obj_vec(\n\n \"withdrawal_keys\",\n\n obj.withdrawal_keys()\n", "file_path": "src/response/witness/parser.rs", "rank": 69, "score": 134582.19917965183 }, { "content": "pub fn parse_claimed_response(claimed: bool, block_number: u64) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert(\"claimed\".to_string(), Value::Bool(claimed));\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n\n\n", "file_path": "src/response/claim.rs", "rank": 70, "score": 134299.0166740879 }, { "content": "fn get_selection() -> (cota_id, token_index, out_point) {\n\n (cota_id, token_index, out_point)\n\n}\n", "file_path": "src/models/claim.rs", "rank": 71, "score": 133690.30970933725 }, { "content": "fn parse_metadata(output_type: BytesOpt, mut cota_map: CotaMap) -> Result<CotaMap, Error> {\n\n match output_type.to_opt() {\n\n Some(output_type_) => {\n\n let metadata =\n\n from_slice::<Metadata<IssuerInfo>>(&output_type_.raw_data()).map_err(json_error)?;\n\n if metadata.metadata.type_ == \"issuer\" {\n\n cota_map.insert(\"info\".to_owned(), json!(metadata.metadata.data));\n\n return Ok(cota_map);\n\n }\n\n if metadata.metadata.type_ == \"cota\" {\n\n let class = from_slice::<Metadata<ClassInfo>>(&output_type_.raw_data())\n\n .map_err(json_error)?;\n\n cota_map.insert(\"info\".to_owned(), json!(class.metadata.data));\n\n return Ok(cota_map);\n\n }\n\n if cota_map.is_empty() {\n\n return Err(Error::WitnessParseError(\n\n \"Invalid CoTA entries or metadata\".to_string(),\n\n ));\n\n } else {\n", "file_path": "src/response/witness/cota.rs", "rank": 72, "score": 130442.81905991485 }, { "content": "fn json_error(e: serde_json::Error) -> Error {\n\n Error::WitnessParseError(format!(\"Parse metadata json error: {}\", e.to_string()))\n\n}\n", "file_path": "src/response/witness/cota.rs", "rank": 73, "score": 130182.3152746197 }, { "content": "fn parse_withdrawal_cota_nft_value(obj: WithdrawalCotaNFTValue) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_obj(\"nft_info\", parse_cota_nft_info(obj.nft_info()));\n\n map.insert_str(\"to_lock\", slice_to_hex(&obj.to_lock().raw_data().to_vec()));\n\n map.insert_str(\"out_point\", slice_to_hex(obj.out_point().as_slice()));\n\n map\n\n}\n\n\n", "file_path": "src/response/witness/parser.rs", "rank": 74, "score": 129274.26689587359 }, { "content": "fn entries_error(_e: VerificationError) -> Error {\n\n Error::WitnessParseError(\"Parse cota entries error\".to_string())\n\n}\n\n\n", "file_path": "src/response/witness/cota.rs", "rank": 75, "score": 126124.32065476685 }, { "content": "fn parse_withdrawal_cota_nft_key_v1(obj: WithdrawalCotaNFTKeyV1) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_obj(\"nft_id\", parse_cota_nft_id(obj.nft_id()));\n\n map.insert_str(\"out_point\", slice_to_hex(obj.out_point().as_slice()));\n\n map\n\n}\n\n\n", "file_path": "src/response/witness/parser.rs", "rank": 76, "score": 125783.53215114927 }, { "content": "fn parse_withdrawal_cota_nft_value_v1(obj: WithdrawalCotaNFTValueV1) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n map.insert_obj(\"nft_info\", parse_cota_nft_info(obj.nft_info()));\n\n map.insert_obj(\"to_lock\", parse_script(&obj.to_lock().raw_data().to_vec()));\n\n map\n\n}\n\n\n", "file_path": "src/response/witness/parser.rs", "rank": 77, "score": 125783.53215114927 }, { "content": "fn parse_script(scripts_: Vec<Script>) -> Vec<ScriptDb> {\n\n scripts_\n\n .into_iter()\n\n .map(|script| ScriptDb {\n\n id: script.id,\n\n code_hash: parse_bytes_n::<32>(script.code_hash).unwrap(),\n\n hash_type: script.hash_type,\n\n args: parse_bytes(script.args).unwrap(),\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/models/scripts.rs", "rank": 78, "score": 121265.60119686282 }, { "content": "pub fn is_exist_in_claim(\n\n lock_hash_: [u8; 32],\n\n cota_id_: [u8; 20],\n\n token_index_: [u8; 4],\n\n out_point_: [u8; 24],\n\n) -> bool {\n\n let conn = &POOL.clone().get().expect(\"Mysql pool connection error\");\n\n let (lock_hash_hex, lock_hash_crc_) = parse_lock_hash(lock_hash_);\n\n let cota_id_hex = hex::encode(cota_id_);\n\n let cota_id_crc_u32 = generate_crc(cota_id_hex.as_bytes());\n\n let token_index_u32 = u32::from_be_bytes(token_index_);\n\n let out_point_hex = hex::encode(out_point_);\n\n let out_point_crc_u32 = generate_crc(out_point_hex.as_bytes());\n\n claimed_cota_nft_kv_pairs\n\n .filter(lock_hash_crc.eq(lock_hash_crc_))\n\n .filter(lock_hash.eq(lock_hash_hex.clone()))\n\n .filter(cota_id_crc.eq(cota_id_crc_u32))\n\n .filter(cota_id.eq(cota_id_hex))\n\n .filter(token_index.eq(token_index_u32))\n\n .filter(out_point_crc.eq(out_point_crc_u32))\n\n .filter(out_point.eq(out_point_hex))\n\n .limit(1)\n\n .count()\n\n .get_result::<i64>(conn)\n\n .map_or(false, |count_| count_ > 0)\n\n}\n\n\n", "file_path": "src/models/claim.rs", "rank": 79, "score": 120400.25523782856 }, { "content": "pub fn generate_claim_key(\n\n cota_id: [u8; 20],\n\n token_index: [u8; 4],\n\n out_point: [u8; 24],\n\n) -> (ClaimCotaNFTKey, H256) {\n\n let nft_id = CotaNFTIdBuilder::default()\n\n .smt_type(Uint16::from_slice(&CLAIM_NFT_SMT_TYPE).unwrap())\n\n .cota_id(CotaId::from_slice(&cota_id).unwrap())\n\n .index(Uint32::from_slice(&token_index).unwrap())\n\n .build();\n\n let claimed_key = ClaimCotaNFTKeyBuilder::default()\n\n .nft_id(nft_id)\n\n .out_point(OutPointSlice::from_slice(&out_point).unwrap())\n\n .build();\n\n let key = H256::from(blake2b_256(claimed_key.as_slice()));\n\n (claimed_key, key)\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 80, "score": 118255.56733292437 }, { "content": "pub fn parse_mint_smt(\n\n (root_hash, mint_entries): (H256, MintCotaNFTV1Entries),\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let mint_entry = hex::encode(mint_entries.as_slice());\n\n let mint_root_hash = hex::encode(root_hash.as_slice());\n\n let mut map = Map::new();\n\n map.insert_str(\"smt_root_hash\", mint_root_hash);\n\n map.insert_str(\"mint_smt_entry\", mint_entry);\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n", "file_path": "src/response/mint.rs", "rank": 81, "score": 118255.56733292437 }, { "content": "pub fn parse_claimed_smt(\n\n (root_hash, claim_entries): (H256, ClaimCotaNFTEntries),\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let claim_entry = hex::encode(claim_entries.as_slice());\n\n let claim_root_hash = hex::encode(root_hash.as_slice());\n\n let mut map = Map::new();\n\n map.insert_str(\"smt_root_hash\", claim_root_hash);\n\n map.insert_str(\"claim_smt_entry\", claim_entry);\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n\n\n", "file_path": "src/response/claim.rs", "rank": 82, "score": 118255.56733292437 }, { "content": "pub fn parse_hold_response(\n\n holds: Vec<(HoldDb, Option<ClassInfoDb>)>,\n\n total: i64,\n\n page_size: i64,\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let nfts: Vec<Value> = holds.into_iter().map(parse_hold_value).collect();\n\n let mut map = Map::new();\n\n map.insert_i64(\"total\", total);\n\n map.insert_i64(\"page_size\", page_size);\n\n map.insert_u64(\"block_number\", block_number);\n\n map.insert_array(\"nfts\", nfts);\n\n map\n\n}\n\n\n", "file_path": "src/response/hold.rs", "rank": 83, "score": 118255.56733292437 }, { "content": "pub fn parse_transfer_smt(\n\n (root_hash, transfer_entries): (H256, TransferCotaNFTV1Entries),\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let transfer_entry = hex::encode(transfer_entries.as_slice());\n\n let transfer_root_hash = hex::encode(root_hash.as_slice());\n\n let mut map = Map::new();\n\n map.insert_str(\"smt_root_hash\", transfer_root_hash);\n\n map.insert_str(\"transfer_smt_entry\", transfer_entry);\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n\n\n", "file_path": "src/response/transfer.rs", "rank": 84, "score": 118255.56733292437 }, { "content": "pub fn parse_define_info(\n\n define_info: Option<DefineDb>,\n\n class_info: Option<ClassInfoDb>,\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n match define_info {\n\n Some(define) => {\n\n map.insert_u32(\"total\", define.total);\n\n map.insert_u32(\"issued\", define.issued);\n\n map.insert_str(\n\n \"configure\",\n\n format!(\"0x{}\", hex::encode(&[define.configure])),\n\n );\n\n }\n\n None => {\n\n map.insert_null(\"total\");\n\n map.insert_null(\"issued\");\n\n map.insert_null(\"configure\");\n\n }\n", "file_path": "src/response/define.rs", "rank": 85, "score": 118255.56733292437 }, { "content": "pub fn parse_update_smt(\n\n (root_hash, update_entries): (H256, UpdateCotaNFTEntries),\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let update_entry = hex::encode(update_entries.as_slice());\n\n let update_root_hash = hex::encode(root_hash.as_slice());\n\n let mut map = Map::new();\n\n map.insert_str(\"smt_root_hash\", update_root_hash);\n\n map.insert_str(\"update_smt_entry\", update_entry);\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n", "file_path": "src/response/update.rs", "rank": 86, "score": 118255.56733292437 }, { "content": "pub fn parse_issuer_response(\n\n issuer_info: Option<IssuerInfoDb>,\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let mut map = Map::new();\n\n match issuer_info {\n\n Some(issuer) => {\n\n map.insert_str(\"name\", issuer.name);\n\n map.insert_str(\"avatar\", issuer.avatar);\n\n map.insert_str(\"description\", issuer.description);\n\n }\n\n None => {\n\n map.insert_null(\"name\");\n\n map.insert_null(\"avatar\");\n\n map.insert_null(\"description\");\n\n }\n\n }\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n", "file_path": "src/response/issuer.rs", "rank": 87, "score": 118255.56733292437 }, { "content": "pub fn parse_mint_response(\n\n withdrawals: Vec<(WithdrawDb, Option<ClassInfoDb>)>,\n\n total: i64,\n\n page_size: i64,\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let nfts: Vec<Value> = withdrawals.into_iter().map(parse_mint_value).collect();\n\n let mut map = Map::new();\n\n map.insert_i64(\"total\", total);\n\n map.insert_i64(\"page_size\", page_size);\n\n map.insert_u64(\"block_number\", block_number);\n\n map.insert_array(\"nfts\", nfts);\n\n map\n\n}\n\n\n", "file_path": "src/response/mint.rs", "rank": 88, "score": 118255.56733292437 }, { "content": "pub fn get_hold_cota(\n\n lock_script: &[u8],\n\n page: i64,\n\n page_size: i64,\n\n cota_id_opt: Option<[u8; 20]>,\n\n) -> DBTotalResult<(HoldDb, Option<ClassInfoDb>)> {\n\n let lock_hash = blake2b_256(lock_script);\n\n let (hold_nfts, total, block_height) =\n\n get_hold_cota_by_lock_hash_and_page(lock_hash, page, page_size, cota_id_opt)?;\n\n let mut nfts: Vec<(HoldDb, Option<ClassInfoDb>)> = vec![];\n\n for hold in hold_nfts {\n\n let class_info = get_class_info_by_cota_id(hold.cota_id)?;\n\n nfts.push((hold, class_info))\n\n }\n\n Ok((nfts, total, block_height))\n\n}\n\n\n", "file_path": "src/models/common.rs", "rank": 89, "score": 118255.56733292437 }, { "content": "pub fn generate_hold_value(\n\n configure: u8,\n\n state: u8,\n\n characteristic: [u8; 20],\n\n) -> (CotaNFTInfo, H256) {\n\n let hold_value = CotaNFTInfoBuilder::default()\n\n .characteristic(Characteristic::from_slice(&characteristic).unwrap())\n\n .configure(Byte::from(configure))\n\n .state(Byte::from(state))\n\n .build();\n\n let mut hold_value_bytes = [0u8; 32];\n\n hold_value_bytes[0..22].copy_from_slice(hold_value.as_slice());\n\n hold_value_bytes[31] = 255u8;\n\n let value = H256::from(hold_value_bytes);\n\n (hold_value, value)\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 90, "score": 118255.56733292437 }, { "content": "pub fn generate_define_value(\n\n total: [u8; 4],\n\n issued: [u8; 4],\n\n configure: u8,\n\n block_number: u64,\n\n) -> (DefineCotaNFTValue, H256) {\n\n let after_padding = block_number > get_value_padding_block_height();\n\n let define_value = DefineCotaNFTValueBuilder::default()\n\n .total(Uint32::from_slice(&total).unwrap())\n\n .issued(Uint32::from_slice(&issued).unwrap())\n\n .configure(Byte::from(configure))\n\n .build();\n\n let mut define_value_bytes = [0u8; 32];\n\n define_value_bytes[0..9].copy_from_slice(define_value.as_slice());\n\n if after_padding || define_value_bytes == [0u8; 32] {\n\n define_value_bytes[31] = 255u8;\n\n }\n\n\n\n let value = H256::from(define_value_bytes);\n\n (define_value, value)\n\n}\n\n\n", "file_path": "src/entries/helper.rs", "rank": 91, "score": 118255.56733292437 }, { "content": "pub fn get_mint_cota(\n\n lock_script: &[u8],\n\n page: i64,\n\n page_size: i64,\n\n) -> DBTotalResult<(WithdrawDb, Option<ClassInfoDb>)> {\n\n let lock_hash = blake2b_256(lock_script);\n\n let defines = get_define_cota_by_lock_hash(lock_hash)?.0;\n\n let cota_ids: Vec<[u8; 20]> = defines.into_iter().map(|define| define.cota_id).collect();\n\n let (withdrawal_nfts, total, block_height) =\n\n get_withdrawal_cota_by_cota_ids(lock_hash, cota_ids, page, page_size)?;\n\n let mut nfts: Vec<(WithdrawDb, Option<ClassInfoDb>)> = vec![];\n\n for withdrawal in withdrawal_nfts {\n\n let class_info = get_class_info_by_cota_id(withdrawal.cota_id)?;\n\n nfts.push((withdrawal, class_info))\n\n }\n\n Ok((nfts, total, block_height))\n\n}\n\n\n", "file_path": "src/models/common.rs", "rank": 92, "score": 118255.56733292437 }, { "content": "pub fn check_cota_claimed(\n\n lock_script: &[u8],\n\n cota_id: [u8; 20],\n\n index: [u8; 4],\n\n) -> Result<(bool, u64), Error> {\n\n let lock_hash = blake2b_256(lock_script);\n\n check_hold_cota_by_lock_hash(lock_hash, (cota_id, index))\n\n}\n\n\n", "file_path": "src/models/common.rs", "rank": 93, "score": 118255.56733292437 }, { "content": "pub fn parse_define_smt(\n\n (root_hash, define_entries): (H256, DefineCotaNFTEntries),\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let define_entry = hex::encode(define_entries.as_slice());\n\n let define_root_hash = hex::encode(root_hash.as_slice());\n\n let mut map = Map::new();\n\n map.insert_str(\"smt_root_hash\", define_root_hash);\n\n map.insert_str(\"define_smt_entry\", define_entry);\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n\n\n", "file_path": "src/response/define.rs", "rank": 94, "score": 118255.56733292437 }, { "content": "pub fn init_smt<'a>(\n\n transaction: &'a StoreTransaction,\n\n lock_hash: [u8; 32],\n\n) -> Result<CotaSMT<'a>, Error> {\n\n let smt_store = SMTStore::new(\n\n lock_hash,\n\n COLUMN_SMT_LEAF,\n\n COLUMN_SMT_BRANCH,\n\n COLUMN_SMT_ROOT,\n\n COLUMN_SMT_TEMP_LEAVES,\n\n &transaction,\n\n );\n\n let root = smt_store\n\n .get_root()\n\n .map_err(|_e| Error::SMTError(\"Get smt root\".to_string()))?\n\n .unwrap_or_default();\n\n debug!(\n\n \"rocksdb smt root: {:?} of {:?}\",\n\n root,\n\n hex::encode(lock_hash)\n\n );\n\n Ok(CotaSMT::new(root, smt_store))\n\n}\n\n\n", "file_path": "src/entries/smt.rs", "rank": 95, "score": 116836.93514988743 }, { "content": "pub fn parse_transfer_update_smt(\n\n (root_hash, transfer_update_entries): (H256, TransferUpdateCotaNFTV1Entries),\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let transfer_update_entry = hex::encode(transfer_update_entries.as_slice());\n\n let transfer_root_hash = hex::encode(root_hash.as_slice());\n\n let mut map = Map::new();\n\n map.insert_str(\"smt_root_hash\", transfer_root_hash);\n\n map.insert_str(\"transfer_update_smt_entry\", transfer_update_entry);\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n", "file_path": "src/response/transfer.rs", "rank": 96, "score": 116242.6487380109 }, { "content": "pub fn parse_claimed_update_smt(\n\n (root_hash, claim_update_entries): (H256, ClaimUpdateCotaNFTEntries),\n\n block_number: u64,\n\n) -> Map<String, Value> {\n\n let claim_update_entry = hex::encode(claim_update_entries.as_slice());\n\n let claim_root_hash = hex::encode(root_hash.as_slice());\n\n let mut map = Map::new();\n\n map.insert_str(\"smt_root_hash\", claim_root_hash);\n\n map.insert_str(\"claim_update_smt_entry\", claim_update_entry);\n\n map.insert_u64(\"block_number\", block_number);\n\n map\n\n}\n", "file_path": "src/response/claim.rs", "rank": 97, "score": 116242.6487380109 }, { "content": "fn parse_define_cota_nft(defines: Vec<DefineCotaNft>) -> Vec<DefineDb> {\n\n defines\n\n .into_iter()\n\n .map(|define| DefineDb {\n\n cota_id: parse_bytes_n::<20>(define.cota_id).unwrap(),\n\n total: define.total,\n\n issued: define.issued,\n\n configure: define.configure,\n\n block_number: define.block_number,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/models/define.rs", "rank": 98, "score": 115507.25043749213 }, { "content": "fn parse_hold_cota_nfts(holds: Vec<HoldCotaNft>) -> Vec<HoldDb> {\n\n holds.into_iter().map(parse_hold_cota_nft).collect()\n\n}\n\n\n", "file_path": "src/models/hold.rs", "rank": 99, "score": 115507.25043749213 } ]
Rust
src/cluster/cut.rs
FridgeSeal/blip
0a202eb0061d8e2ac34a4bd501cfa08051cb4087
use super::{proto, Metadata, State}; use futures::stream::{unfold, Stream}; use rand::{thread_rng, Rng}; use std::{ collections::HashMap, convert::TryInto, net::SocketAddr, ops::Index, result, sync::{Arc, Weak}, }; use thiserror::Error; use tokio::sync::{ broadcast::{error::RecvError, Receiver}, RwLock, }; use tonic::transport::{self, Channel, ClientTlsConfig}; pub(crate) type Result = result::Result<(), Closed>; #[derive(Copy, Clone, Debug, Error)] #[error("closed")] pub struct Closed; pub struct Subscription { state: Weak<RwLock<State>>, rx: Receiver<MultiNodeCut>, } impl Subscription { pub(crate) fn new(state: Weak<RwLock<State>>, rx: Receiver<MultiNodeCut>) -> Self { Self { state, rx } } pub async fn recv(&mut self) -> result::Result<MultiNodeCut, Closed> { let n = match self.rx.recv().await { Ok(view_change) => { return Ok(view_change); } Err(RecvError::Closed) => { return Err(Closed); } Err(RecvError::Lagged(n)) => n, }; let state = self.state.upgrade().ok_or(Closed)?; let state = state.read().await; let mut cut = state.last_cut.clone().ok_or(Closed)?; cut.skipped = n; Ok(cut) } pub fn into_stream(self) -> impl Stream<Item = MultiNodeCut> { unfold(self, |mut s| async { Some((s.recv().await.ok()?, s)) }) } pub fn as_stream(&mut self) -> impl Stream<Item = MultiNodeCut> + '_ { unfold(self, |s| async { Some((s.recv().await.ok()?, s)) }) } } #[derive(Clone, Debug)] pub struct MultiNodeCut { pub(crate) skipped: u64, pub(crate) local_addr: SocketAddr, pub(crate) conf_id: u64, pub(crate) degraded: bool, pub(crate) members: Arc<[Member]>, pub(crate) joined: Arc<[Member]>, pub(crate) kicked: Arc<[Member]>, } impl Index<SocketAddr> for MultiNodeCut { type Output = Member; #[inline] fn index(&self, addr: SocketAddr) -> &Self::Output { self.lookup(addr).unwrap() } } impl MultiNodeCut { pub fn skipped(&self) -> u64 { self.skipped } pub fn local_addr(&self) -> SocketAddr { self.local_addr } pub fn conf_id(&self) -> u64 { self.conf_id } pub fn is_degraded(&self) -> bool { self.degraded } pub(crate) fn random_member(&self) -> &Member { &self.members[thread_rng().gen_range(0..self.members.len())] } pub fn members(&self) -> &Arc<[Member]> { &self.members } pub fn joined(&self) -> &Arc<[Member]> { &self.joined } pub fn kicked(&self) -> &Arc<[Member]> { &self.kicked } pub fn with_meta<K: AsRef<str>>(&self, key: K) -> impl Iterator<Item = (&Member, &[u8])> { self.members.iter().filter_map(move |m| { let val = m.meta.get(key.as_ref())?; Some((m, val.as_ref())) }) } pub fn lookup(&self, addr: SocketAddr) -> Option<&Member> { self.members .binary_search_by_key(&addr, |m| m.addr()) .ok() .map(|i| &self.members[i]) } } #[derive(Clone, Debug)] pub struct Member { addr: SocketAddr, tls: Option<Arc<ClientTlsConfig>>, meta: Metadata, chan: Channel, } impl From<&Member> for proto::Endpoint { #[inline] fn from(Member { addr, tls, .. }: &Member) -> Self { Self::from(*addr).tls(tls.is_some()) } } impl From<&Member> for transport::Endpoint { #[inline] fn from(Member { addr, tls, .. }: &Member) -> Self { endpoint(*addr, tls.as_deref()) } } #[inline] fn endpoint(addr: SocketAddr, tls: Option<&ClientTlsConfig>) -> transport::Endpoint { match tls.cloned() { Some(tls) => format!("https://{}", addr) .try_into() .map(|e: transport::Endpoint| e.tls_config(tls).unwrap()), None => format!("http://{}", addr).try_into(), } .unwrap() } impl Member { #[inline] pub(crate) fn new(addr: SocketAddr, tls: Option<Arc<ClientTlsConfig>>, meta: Metadata) -> Self { let chan = endpoint(addr, tls.as_deref()) .connect_lazy() .unwrap(); #[rustfmt::skip] let m = Self { addr, tls, meta, chan }; m } pub fn addr(&self) -> SocketAddr { self.addr } pub fn tls_config(&self) -> Option<&ClientTlsConfig> { self.tls.as_deref() } pub fn metadata(&self) -> &HashMap<String, Vec<u8>> { &self.meta.keys } pub fn channel(&self) -> Channel { self.chan.clone() } }
use super::{proto, Metadata, State}; use futures::stream::{unfold, Stream}; use rand::{thread_rng, Rng}; use std::{ collections::HashMap, convert::TryInto, net::SocketAddr, ops::Index, result, sync::{Arc, Weak}, }; use thiserror::Error; use tokio::sync::{ broadcast::{error::RecvError, Receiver}, RwLock, }; use tonic::transport::{self, Channel, ClientTlsConfig}; pub(crate) type Result = result::Result<(), Closed>; #[derive(Copy, Clone, Debug, Error)] #[error("closed")] pub struct Closed; pub struct Subscription { state: Weak<RwLock<State>>, rx: Receiver<MultiNodeCut>, } impl Subscription { pub(crate) fn new(state: Weak<RwLock<State>>, rx: Receiver<MultiNodeCut>) -> Self { Self { state, rx } } pub async fn recv(&mut self) -> result::Result<MultiNodeCut, Closed> { let n = match self.rx.recv().await { Ok(view_change) => { return Ok(view_change); } Err(RecvError::Closed) => { return Err(Closed); } Err(RecvError::Lagged(n)) => n, }; let state = self.state.upgrade().ok_or(Closed)?; let state = state.read().await; let mut cut = state.last_cut.clone().ok_or(Closed)?; cut.skipped = n; Ok(cut) } pub fn into_stream(self) -> impl Stream<Item = MultiNodeCut> { unfold(self, |mut s| async { Some((s.recv().await.ok()?, s)) }) } pub fn as_stream(&mut self) -> impl Stream<Item = MultiNodeCut> + '_ { unfold(self, |s| async { Some((s.recv().await.ok()?, s)) }) } } #[derive(Clone, Debug)] pub struct MultiNodeCut { pub(crate) skipped: u64, pub(crate) local_addr: SocketAddr, pub(crate) conf_id: u64, pub(crate) degraded: bool, pub(crate) members: Arc<[Member]>, pub(crate) joined: Arc<[Member]>, pub(crate) kicked: Arc<[Member]>, } impl Index<SocketAddr> for MultiNodeCut { type Output = Member; #[inline] fn index(&self, addr: SocketAddr) -> &Self::Output { self.lookup(addr).unwrap() } } impl MultiNodeCut { pub fn skipped(&self) -> u64 { self.skipped } pub fn local_addr(&self) -> SocketAddr { self.local_addr } pub fn conf_id(&self) -> u64 { self.conf_id } pub fn is_degraded(&self) -> bool { self.degraded } pub(crate) fn random_member(&self) -> &Member { &self.members[thread_rng().gen_range(0..self.members.len())] } pub fn members(&self) -> &Arc<[Member]> { &self.members } pub fn joined(&self) -> &Arc<[Member]> { &self.joined } pub fn kicked(&self) -> &Arc<[Member]> { &self.kicked } pub fn with_meta<K: AsRef<str>>(&self, key: K) -> impl Iterator<Item = (&Member, &[u8])> { self.members.iter().filter_map(move |m| { let val = m.meta.get(key.as_ref())?; Some((m, val.as_ref())) }) } pub fn lookup(&self, addr: SocketAddr) -> Option<&Member> { self.members .binary_search_by_key(&addr, |m| m.addr()) .ok() .map(|i| &self.members[i]) } } #[derive(Clone, Debug)] pub struct Member { addr: SocketAddr, tls: Option<Arc<ClientTlsConfig>>, meta: Metadata, chan: Channel, } impl From<&Member> for proto::Endpoint { #[inline] fn from(Member { addr, tls, .. }: &Member) -> Self { Self::from(*addr).tls(tls.is_some()) } } impl From<&Member> for transport::Endpoint { #[inline] fn from(Member { addr, tls, .. }: &Member) -> Self { endpoint(*addr, tls.as_deref()) } } #[inline]
impl Member { #[inline] pub(crate) fn new(addr: SocketAddr, tls: Option<Arc<ClientTlsConfig>>, meta: Metadata) -> Self { let chan = endpoint(addr, tls.as_deref()) .connect_lazy() .unwrap(); #[rustfmt::skip] let m = Self { addr, tls, meta, chan }; m } pub fn addr(&self) -> SocketAddr { self.addr } pub fn tls_config(&self) -> Option<&ClientTlsConfig> { self.tls.as_deref() } pub fn metadata(&self) -> &HashMap<String, Vec<u8>> { &self.meta.keys } pub fn channel(&self) -> Channel { self.chan.clone() } }
fn endpoint(addr: SocketAddr, tls: Option<&ClientTlsConfig>) -> transport::Endpoint { match tls.cloned() { Some(tls) => format!("https://{}", addr) .try_into() .map(|e: transport::Endpoint| e.tls_config(tls).unwrap()), None => format!("http://{}", addr).try_into(), } .unwrap() }
function_block-full_function
[ { "content": "type ResolvedMember = Result<Member, MemberResolutionError>;\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 0, "score": 134911.461126479 }, { "content": "pub fn addr_in(subnet: u32, host: u32) -> SocketAddr {\n\n let mut addr = (subnet & 0x1fff) << 12; // 11 bits of subnet\n\n addr |= host & 0xfff; // 12 bits of host\n\n addr |= 0x7f800000; // and the loopback stuff\n\n\n\n (addr.to_be_bytes(), 10000).into()\n\n}\n\n\n", "file_path": "tests/shared.rs", "rank": 1, "score": 99061.86687491024 }, { "content": "fn unixtime() -> u64 {\n\n SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"not to be approaching a spacetime singularity\")\n\n .as_secs()\n\n}\n\n\n\n/// An asynchronous event id.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub struct Id {\n\n unix: u64,\n\n uniq: u64,\n\n}\n\n\n\nimpl PartialOrd for Id {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n", "file_path": "src/collections/event.rs", "rank": 3, "score": 83137.97925825621 }, { "content": "fn main() -> io::Result<()> {\n\n tonic_build::compile_protos(\"proto/blip.proto\")?;\n\n\n\n #[cfg(feature = \"cache\")]\n\n tonic_build::compile_protos(\"proto/cache.proto\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 4, "score": 82311.20390070279 }, { "content": "pub fn init_logger() {\n\n let _ = TestLogger::init(LevelFilter::Info, Config::default());\n\n}\n\n\n", "file_path": "tests/shared.rs", "rank": 5, "score": 81883.21450707474 }, { "content": "pub fn subnet() -> u32 {\n\n static SUBNET: AtomicU32 = AtomicU32::new(0);\n\n let s = SUBNET.fetch_add(1, Relaxed);\n\n assert!(s <= 2 ^ 11);\n\n s\n\n}\n\n\n", "file_path": "tests/shared.rs", "rank": 6, "score": 78587.21585991772 }, { "content": "/// A [MeshService] that can be converted into a [Service] to be served over grpc.\n\npub trait ExposedService: MeshService + Clone {\n\n /// Add metadata to distribute to other members of the mesh.\n\n fn add_metadata<K: Extend<(String, Vec<u8>)>>(&self, _keys: &mut K) {}\n\n\n\n /// The service implementation.\n\n type Service: Service<HttpRequest<Body>, Response = HttpResponse<BoxBody>>\n\n + NamedService\n\n + Clone\n\n + Send\n\n + 'static;\n\n\n\n /// Convert self into a [Service].\n\n fn into_service(self) -> Self::Service;\n\n}\n", "file_path": "src/overlay.rs", "rank": 7, "score": 70363.00526566323 }, { "content": "fn join_task() -> (PendingJoin, JoinTask) {\n\n let (tx, rx) = oneshot::channel();\n\n\n\n let tx = Arc::new(tx);\n\n let pending = PendingJoin { tx };\n\n\n\n let tx = Arc::downgrade(&pending.tx);\n\n let join = JoinTask { rx, tx };\n\n\n\n (pending, join)\n\n}\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 8, "score": 69423.16555646643 }, { "content": "type Grpc<T> = Result<T, Status>;\n", "file_path": "src/cluster/mod.rs", "rank": 9, "score": 68354.02393307342 }, { "content": "struct JoinTask {\n\n rx: oneshot::Receiver<JoinResp>,\n\n #[allow(dead_code)]\n\n tx: Weak<oneshot::Sender<JoinResp>>,\n\n}\n\n\n\nimpl Future for JoinTask {\n\n type Output = Result<JoinResp, oneshot::error::RecvError>;\n\n\n\n #[inline]\n\n fn poll(mut self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Self::Output> {\n\n Pin::new(&mut self.rx).poll(ctx)\n\n }\n\n}\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 10, "score": 67885.35682837242 }, { "content": "struct PendingJoin {\n\n tx: Arc<oneshot::Sender<JoinResp>>,\n\n}\n\n\n\nimpl PendingJoin {\n\n /// Returns false if the originating task has dropped their [JoinTask].\n\n #[inline]\n\n fn task_is_waiting(&self) -> bool {\n\n Arc::weak_count(&self.tx) > 0\n\n }\n\n\n\n /// Notify the waiting task that a response is available.\n\n fn complete(self, join: JoinResp) {\n\n if !self.task_is_waiting() {\n\n return;\n\n }\n\n\n\n let _ = Arc::try_unwrap(self.tx)\n\n .expect(\"no other references to exist\")\n\n .send(join);\n\n }\n\n}\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 11, "score": 67885.35682837242 }, { "content": "pub fn cfg_handle() -> (CfgHandle, CfgService) {\n\n let (tx, rx) = mpsc::channel(32);\n\n\n\n let h = CfgHandle { rx };\n\n let s = CfgService { tx };\n\n\n\n (h, s)\n\n}\n\n\n\npub struct CfgService {\n\n tx: mpsc::Sender<MultiNodeCut>,\n\n}\n\n\n\n#[blip::async_trait]\n\nimpl MeshService for CfgService {\n\n async fn accept(mut self: Box<Self>, mut cuts: Subscription) {\n\n while let Ok(cut) = cuts.recv().await {\n\n self.tx.send(cut).await.unwrap();\n\n }\n\n }\n", "file_path": "tests/shared.rs", "rank": 12, "score": 66783.17510700575 }, { "content": "#[inline]\n\nfn err_when<F: FnOnce() -> Status>(cond: bool, err: F) -> Grpc<()> {\n\n if cond {\n\n Err(err())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl State {\n\n /// Verify that `host` is not already a member in the active configuration.\n\n fn verify_unused_host(&self, host: &Endpoint) -> Grpc<()> {\n\n err_when(self.nodes.contains(host), || {\n\n Status::already_exists(\"host already exists\")\n\n })\n\n }\n\n\n\n /// Verify that `uuid` has not yet been used by any node in the active configuration.\n\n fn verify_unused_uuid(&self, uuid: &NodeId) -> Grpc<()> {\n\n err_when(self.uuids.contains(uuid), || {\n\n Status::already_exists(\"uuid already exists\")\n", "file_path": "src/cluster/mod.rs", "rank": 13, "score": 63269.63013917473 }, { "content": "struct FnSource<F>(F);\n\n\n\n#[crate::async_trait]\n\nimpl<F> Source for FnSource<F>\n\nwhere F: 'static + Sync + Send + Fn(&[u8]) -> Vec<u8>\n\n{\n\n async fn get(&self, key: &[u8]) -> Result<Vec<u8>, Status> {\n\n Ok(self.0(key))\n\n }\n\n}\n\n\n\n/// A distributed binary cache. May be used standalone, or added to a [Mesh] to operate in\n\n/// networked mode.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use blip::service::Cache;\n\n/// use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};\n\n/// # use tonic::Status;\n\n///\n", "file_path": "src/service/cache.rs", "rank": 14, "score": 59058.62939648292 }, { "content": "#[derive(Debug, Error)]\n\nenum JoinError {\n\n #[error(\"timed out: {}\", .0)]\n\n TimedOut(#[from] Elapsed),\n\n\n\n #[error(\"endpoint resolution failed: {}\", .0)]\n\n Resolution(#[from] EndpointError),\n\n\n\n #[error(\"phase 1 failed: {}\", .0)]\n\n Phase1(GrpcError),\n\n\n\n #[error(\"phase 2 failed: {}\", .0)]\n\n Phase2(GrpcError),\n\n\n\n #[error(\"phase 2 failed: no observers\")]\n\n NoObservers,\n\n}\n\n\n", "file_path": "src/cluster/bootstrap.rs", "rank": 15, "score": 50502.928723429715 }, { "content": "#[derive(Copy, Clone, Debug, Error)]\n\nenum MemberResolutionError {\n\n #[error(\"invalid socketaddr: {}\", .0)]\n\n InvalidSocketAddr(#[from] SocketAddrError),\n\n\n\n #[error(\"missing metadata\")]\n\n MissingMetadata,\n\n}\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 16, "score": 47895.085502637776 }, { "content": "struct Lazy {\n\n sem: Semaphore,\n\n val: OnceCell<Result<Bytes, Status>>,\n\n}\n\n\n", "file_path": "src/service/cache.rs", "rank": 17, "score": 44112.924769753445 }, { "content": "#[derive(PartialEq, Eq, Hash)]\n\nstruct Vote {\n\n node: Endpoint,\n\n ring: u64,\n\n}\n\n\n\npub(crate) struct State {\n\n // membership state\n\n uuid: NodeId,\n\n conf_id: u64,\n\n nodes: Tumbler<Endpoint>,\n\n uuids: BTreeSet<NodeId>,\n\n metadata: HashMap<Endpoint, Metadata>,\n\n last_cut: Option<MultiNodeCut>,\n\n\n\n // broadcast protocol state\n\n bcast_filter: EventFilter,\n\n\n\n // join protocol state\n\n join_requests: HashMap<Endpoint, PendingJoin>,\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 18, "score": 44112.924769753445 }, { "content": "#[derive(Default)]\n\nstruct Remote {\n\n config: Option<MultiNodeCut>,\n\n shards: Ring<SocketAddr>,\n\n}\n\n\n\nimpl<S: ?Sized> Clone for Cache<S> {\n\n #[inline]\n\n fn clone(&self) -> Self {\n\n Self(self.0.clone())\n\n }\n\n}\n\n\n\n#[crate::async_trait]\n\nimpl MeshService for Cache {\n\n async fn accept(self: Box<Self>, mut cuts: Subscription) {\n\n while let Ok(cut) = cuts.recv().await {\n\n let mut r = self.0.remote.write().await;\n\n\n\n r.shards.clear();\n\n r.shards\n", "file_path": "src/service/cache.rs", "rank": 19, "score": 44112.924769753445 }, { "content": "struct PaxosRound {\n\n sender: Endpoint,\n\n conf_id: u64,\n\n members: usize,\n\n}\n\n\n\nimpl PaxosRound {\n\n async fn init_delay(self) -> Self {\n\n let exp = ((self.members + 1) as f64).log(2.0) * 4000.0;\n\n let ms = thread_rng().gen_range(1000..exp as u64);\n\n sleep(Duration::from_millis(ms)).await;\n\n self\n\n }\n\n}\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 20, "score": 42637.719478375504 }, { "content": "#[derive(Default)]\n\nstruct AlertBatch {\n\n started: bool,\n\n conf_id: u64,\n\n edges: Vec<Edge>,\n\n}\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 21, "score": 42637.719478375504 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_html_root_url() {\n\n version_sync::assert_html_root_url_updated!(\"src/lib.rs\");\n\n}\n\n\n\n#[macro_use]\n\nmod macros;\n\n\n\nmod collections;\n\n\n\npub mod cluster;\n\npub mod overlay;\n\npub mod service;\n\n\n\n#[doc(inline)]\n\npub use cluster::cut::{Member, MultiNodeCut, Subscription};\n\n#[doc(inline)]\n\npub use overlay::{ExposedService, Mesh, MeshService};\n\n\n\n/// A re-export of [async_trait] for convenience.\n\n///\n\n/// [async_trait]: https://docs.rs/async-trait/latest/async_trait/attr.async_trait.html\n\n#[doc(inline)]\n\npub use tonic::async_trait;\n", "file_path": "src/lib.rs", "rank": 22, "score": 41296.045335841445 }, { "content": "/// A bound around `T` that doesn't drop any `T`s.\n\nstruct UnsafeBounds<T> {\n\n s: Bound<ManuallyDrop<(u64, T)>>,\n\n e: Bound<ManuallyDrop<(u64, T)>>,\n\n}\n\n\n\nimpl<T> RangeBounds<(u64, T)> for UnsafeBounds<T> {\n\n fn start_bound(&self) -> Bound<&(u64, T)> {\n\n match &self.s {\n\n Bound::Excluded(b) => Bound::Excluded(&b),\n\n Bound::Included(b) => Bound::Included(&b),\n\n Bound::Unbounded => Bound::Unbounded,\n\n }\n\n }\n\n\n\n fn end_bound(&self) -> Bound<&(u64, T)> {\n\n match &self.e {\n\n Bound::Excluded(b) => Bound::Excluded(&b),\n\n Bound::Included(b) => Bound::Included(&b),\n\n Bound::Unbounded => Bound::Unbounded,\n\n }\n", "file_path": "src/collections/tumbler.rs", "rank": 23, "score": 39277.75205429403 }, { "content": "#[crate::async_trait]\n\npub trait MeshService: Send {\n\n /// Receive accepted view-change proposals.\n\n ///\n\n /// This method is called once for every [MeshService] added to a [Mesh], and will be\n\n /// polled for the entire period the mesh remains online. Resolving early is fine and\n\n /// does not constitute an error.\n\n ///\n\n /// Blocking (of the non-async variety) within this future should _never_ happen, and\n\n /// will starve the executor if it does.\n\n ///\n\n /// If the mesh exits (for any reason), this future will be dropped if it has not yet\n\n /// resolved (which may occur at any yield point).\n\n async fn accept(self: Box<Self>, cuts: Subscription);\n\n}\n\n\n", "file_path": "src/overlay.rs", "rank": 24, "score": 38306.64081266762 }, { "content": "struct Inner<S: ?Sized> {\n\n inflight: Mutex<HashMap<Bytes, Arc<Lazy>>>,\n\n remote: RwLock<Remote>,\n\n local_keys: Mutex<Cache2q<Bytes, Bytes>>,\n\n hot_keys: Mutex<Cache2q<Bytes, Bytes>>,\n\n source: S,\n\n}\n\n\n", "file_path": "src/service/cache.rs", "rank": 25, "score": 37663.244966837505 }, { "content": "#[crate::async_trait]\n\npub trait Source: Sync + Send + 'static {\n\n /// Retrieve a value for `key`.\n\n async fn get(&self, key: &[u8]) -> Result<Vec<u8>, Status>;\n\n}\n\n\n", "file_path": "src/service/cache.rs", "rank": 26, "score": 33544.47418639418 }, { "content": "type GrpcResponse<T> = Grpc<Response<T>>;\n\n\n\npub(crate) struct Cluster {\n\n cfg: Config,\n\n addr: SocketAddr,\n\n state: Arc<RwLock<State>>,\n\n cuts: broadcast::Sender<MultiNodeCut>,\n\n}\n\n\n\n#[crate::async_trait]\n\nimpl Membership for Arc<Cluster> {\n\n /// Handle a pre-join request (phase 1 of the join protocol).\n\n ///\n\n /// The only thing we need to do is inform the joiner about which nodes to contact in\n\n /// order to initiate phase 2 of the join protocol.\n\n async fn pre_join(&self, req: Request<PreJoinReq>) -> GrpcResponse<PreJoinResp> {\n\n let PreJoinReq { sender, uuid } = req.into_inner();\n\n sender.validate()?;\n\n\n\n let state = self.state.read().await;\n", "file_path": "src/cluster/mod.rs", "rank": 27, "score": 33193.30706013628 }, { "content": "#[derive(Debug, Error)]\n\nenum GrpcError {\n\n #[error(\"connection failed: {}\", .0)]\n\n Connect(#[from] transport::Error),\n\n\n\n #[error(\"call failed: {}\", .0)]\n\n Call(#[from] tonic::Status),\n\n}\n\n\n\nimpl Cluster {\n\n /// Handle network partitions where the local member is ejected from the cluster by rejoining\n\n /// through random healthy members (from the last-seen cut), or bootstrapping if this node is\n\n /// a seed node.\n\n pub(crate) async fn handle_parts(self: Arc<Self>, mut cuts: Subscription) -> cut::Result {\n\n self.initialize().await;\n\n\n\n loop {\n\n let cut = cuts.recv().await?;\n\n\n\n if !cut.is_degraded() {\n\n continue;\n", "file_path": "src/cluster/bootstrap.rs", "rank": 39, "score": 25255.291373432807 }, { "content": " let members: Arc<[_]> = vec![self\n\n .resolve_member_meta(self.cfg.meta.clone(), &node)\n\n .unwrap()]\n\n .into();\n\n\n\n state.join_node(node, Join { uuid, meta });\n\n\n\n let cut = MultiNodeCut {\n\n skipped: 0,\n\n local_addr: self.addr,\n\n conf_id: state.rehash_config(),\n\n degraded: false,\n\n members: members.clone(),\n\n joined: members,\n\n kicked: vec![].into(),\n\n };\n\n\n\n state.last_cut = Some(cut.clone());\n\n self.propagate_cut(cut);\n\n\n", "file_path": "src/cluster/bootstrap.rs", "rank": 40, "score": 32.48884832246627 }, { "content": "\n\n members.sort_by_key(|m| m.addr());\n\n\n\n let cut = MultiNodeCut {\n\n skipped: 0,\n\n local_addr: self.addr,\n\n degraded: !state.nodes.contains(&local_node),\n\n conf_id: state.rehash_config(),\n\n members: members.into(),\n\n joined: joined.into(),\n\n kicked: kicked.into(),\n\n };\n\n\n\n state.respond_to_joiners(&cut, local_node);\n\n state.last_cut = Some(cut.clone());\n\n self.propagate_cut(cut);\n\n }\n\n\n\n /// Resolve a [Member] by looking up its metadata in the cluster state.\n\n fn resolve_member(&self, state: &State, peer: &Endpoint) -> ResolvedMember {\n", "file_path": "src/cluster/mod.rs", "rank": 41, "score": 32.312583389246164 }, { "content": " .extend(cut.with_meta(key!(Self)).map(|m| m.0.addr()));\n\n r.config = Some(cut);\n\n }\n\n }\n\n}\n\n\n\nimpl ExposedService for Cache {\n\n #[inline]\n\n fn add_metadata<K: Extend<(String, Vec<u8>)>>(&self, keys: &mut K) {\n\n keys.extend(vec![(key!(Self).to_owned(), vec![])]);\n\n }\n\n\n\n type Service = CacheServer<Self>;\n\n\n\n #[inline]\n\n fn into_service(self) -> Self::Service {\n\n CacheServer::new(self)\n\n }\n\n}\n\n\n", "file_path": "src/service/cache.rs", "rank": 42, "score": 31.915042815252782 }, { "content": "///\n\n/// # Examples\n\n/// ```\n\n/// use blip::{MeshService, Subscription};\n\n///\n\n/// struct MySvc;\n\n///\n\n/// #[blip::async_trait]\n\n/// impl MeshService for MySvc {\n\n/// async fn accept(self: Box<Self>, mut cuts: Subscription) {\n\n/// while let Ok(cut) = cuts.recv().await {\n\n/// // handle membership change\n\n/// let _ = cut.members();\n\n/// let _ = cut.joined();\n\n/// let _ = cut.kicked();\n\n/// }\n\n/// }\n\n/// }\n\n/// ```\n\n#[crate::async_trait]\n", "file_path": "src/overlay.rs", "rank": 43, "score": 31.009198591328097 }, { "content": " pub fn from_fn<F>(max_keys: usize, source: F) -> Self\n\n where F: Sync + Send + 'static + Fn(&[u8]) -> Vec<u8> {\n\n Self::new(max_keys, FnSource(source))\n\n }\n\n\n\n /// Retrieve the value associated with `key`.\n\n pub async fn get<K: Into<Bytes>>(&self, key: K) -> Result<Bytes, Status> {\n\n let key = key.into();\n\n\n\n match self.liftoff(key.clone()).await {\n\n Flight::Leader(lazy) => {\n\n let call = self.get_inner(key.clone()).await;\n\n lazy.val.set(call).unwrap();\n\n lazy.sem.add_permits(2 ^ 24);\n\n self.0.inflight.lock().await.remove(&key);\n\n lazy.val.get().unwrap().clone()\n\n }\n\n\n\n Flight::Follower(lazy) => {\n\n drop(lazy.sem.acquire().await);\n", "file_path": "src/service/cache.rs", "rank": 44, "score": 28.78419477082359 }, { "content": " self.cfg.k = cfg.subjects_per_observer;\n\n self\n\n }\n\n\n\n /// Set a seed node to contact in order to join an existing network. If this is left\n\n /// unset (the default), a new mesh will be bootstrapped with the local node as the\n\n /// sole member.\n\n pub fn join_seed(mut self, addr: SocketAddr, use_tls: bool) -> Self {\n\n self.cfg.seed = Some((addr, use_tls).into());\n\n self\n\n }\n\n\n\n /// Add metadata to distribute to other members of the mesh.\n\n pub fn add_metadata<I: IntoIterator<Item = (String, Vec<u8>)>>(mut self, iter: I) -> Self {\n\n self.cfg.meta.extend(iter);\n\n self\n\n }\n\n\n\n /// Configure TLS for outgoing connections to mesh members that are expecting TLS.\n\n ///\n", "file_path": "src/overlay.rs", "rank": 45, "score": 28.723522061605948 }, { "content": " for node in proposal {\n\n if let Some(Join { uuid, meta }) = state.cd_joiners.remove(&node) {\n\n joined.push(self.resolve_member_meta(meta.clone(), &node).unwrap());\n\n state.join_node(node, Join { uuid, meta });\n\n } else {\n\n let meta = state.kick_node(&node);\n\n kicked.push(self.resolve_member_meta(meta, &node).unwrap());\n\n }\n\n }\n\n\n\n state.clear_consensus();\n\n\n\n let local_node = self.local_node();\n\n\n\n joined.sort_by_key(|m| m.addr());\n\n kicked.sort_by_key(|m| m.addr());\n\n\n\n let mut members: Vec<_> = (state.nodes.iter())\n\n .map(|node| self.resolve_member(state, node).unwrap())\n\n .collect();\n", "file_path": "src/cluster/mod.rs", "rank": 46, "score": 28.022204568640696 }, { "content": " }\n\n for uuid in uuids {\n\n assert!(state.uuids.insert(uuid));\n\n }\n\n\n\n joined.sort_by_key(|m| m.addr());\n\n\n\n let mut members: Vec<_> = (state.nodes.iter())\n\n .map(|node| self.resolve_member(&state, node).unwrap())\n\n .collect();\n\n\n\n members.sort_by_key(|m| m.addr());\n\n\n\n let cut = MultiNodeCut {\n\n skipped: 0,\n\n local_addr: self.addr,\n\n degraded: !state.nodes.contains(&self.local_node()),\n\n conf_id: state.rehash_config(),\n\n members: members.into(),\n\n joined: joined.into(),\n", "file_path": "src/cluster/bootstrap.rs", "rank": 47, "score": 27.76425779993856 }, { "content": " let addr: SocketAddr = peer.try_into()?;\n\n\n\n let meta = (state.metadata)\n\n .get(peer)\n\n .ok_or(MemberResolutionError::MissingMetadata)?\n\n .clone();\n\n\n\n let tls = self.get_client_tls(peer.tls);\n\n\n\n Ok(Member::new(addr, tls, meta))\n\n }\n\n\n\n /// Resolve a [Member] without performing a metadata lookup.\n\n ///\n\n /// This is useful if the endpoint has not been added to the cluster state.\n\n fn resolve_member_meta(&self, meta: Metadata, peer: &Endpoint) -> ResolvedMember {\n\n let addr: SocketAddr = peer.try_into()?;\n\n let tls = self.get_client_tls(peer.tls);\n\n\n\n Ok(Member::new(addr, tls, meta))\n", "file_path": "src/cluster/mod.rs", "rank": 48, "score": 27.671696612463208 }, { "content": " kicked: vec![].into(),\n\n };\n\n\n\n state.last_cut = Some(cut.clone());\n\n self.propagate_cut(cut);\n\n\n\n info!(\"joined: conf_id={}\", state.conf_id);\n\n Ok(())\n\n }\n\n\n\n /// Request to join the provided seed node. Returns `Ok(_)` if both phases of the join\n\n /// protocol completed successfully.\n\n async fn request_join(&self, state: &State, seed: &Endpoint) -> Result<JoinResp, JoinError> {\n\n let p1j_req = PreJoinReq {\n\n sender: self.local_node(),\n\n uuid: state.uuid.clone(),\n\n };\n\n\n\n let r1 = self.join_phase1(p1j_req, seed).await?;\n\n\n", "file_path": "src/cluster/bootstrap.rs", "rank": 49, "score": 27.658584740183684 }, { "content": "impl NodeId {\n\n #[inline]\n\n pub fn generate() -> Self {\n\n random::<u128>().into()\n\n }\n\n}\n\n\n\nimpl Deref for Metadata {\n\n type Target = HashMap<String, Vec<u8>>;\n\n\n\n #[inline]\n\n fn deref(&self) -> &Self::Target {\n\n &self.keys\n\n }\n\n}\n\n\n\nimpl DerefMut for Metadata {\n\n #[inline]\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.keys\n", "file_path": "src/cluster/proto.rs", "rank": 50, "score": 27.091336979045792 }, { "content": " let scheme = if e.tls { \"https\" } else { \"http\" };\n\n\n\n Ok(format!(\"{}://{}\", scheme, addr).try_into()?)\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Error)]\n\npub enum SocketAddrError {\n\n #[error(\"invalid host len: {}\", .0)]\n\n InvalidLen(usize),\n\n\n\n #[error(\"invalid port: {}\", .0)]\n\n InvalidPort(u32),\n\n}\n\n\n\nimpl TryFrom<&Endpoint> for SocketAddr {\n\n type Error = SocketAddrError;\n\n\n\n fn try_from(Endpoint { host, port, .. }: &Endpoint) -> Result<Self, Self::Error> {\n\n if *port > std::u16::MAX as u32 {\n", "file_path": "src/cluster/proto.rs", "rank": 51, "score": 26.320338996657043 }, { "content": " }\n\n\n\n /// Attempt to join a cluster via the provided seed node.\n\n async fn join_via(&self, seed: &Endpoint, max_wait: Duration) -> Result<(), JoinError> {\n\n let mut state = self.state.write().await;\n\n\n\n state.uuid = NodeId::generate();\n\n\n\n info!(\"requesting join: timeout={:?}\", max_wait);\n\n let JoinResp { nodes, uuids, .. } =\n\n timeout(max_wait, self.request_join(&state, seed)).await??;\n\n\n\n state.clear_consensus();\n\n state.clear_membership();\n\n\n\n let mut joined = Vec::with_capacity(nodes.len());\n\n for NodeMetadata { node, meta } in nodes {\n\n joined.push(self.resolve_member_meta(meta.clone(), &node).unwrap());\n\n assert!(state.nodes.insert(node.clone()));\n\n assert!(state.metadata.insert(node, meta).is_none());\n", "file_path": "src/cluster/bootstrap.rs", "rank": 52, "score": 25.679293310377005 }, { "content": "}\n\n\n\npub struct CfgHandle {\n\n rx: mpsc::Receiver<MultiNodeCut>,\n\n}\n\n\n\nimpl CfgHandle {\n\n /// Blocks until a view-change proposal with `n` peers is accepted.\n\n pub async fn cfg_change(&mut self, n: usize) -> MultiNodeCut {\n\n while let Some(cut) = self.rx.recv().await {\n\n if cut.members().len() == n {\n\n return cut;\n\n }\n\n }\n\n panic!(\"cfg_handle sender closed!\");\n\n }\n\n}\n", "file_path": "tests/shared.rs", "rank": 53, "score": 25.18369641446357 }, { "content": " }\n\n\n\n #[inline]\n\n pub(crate) fn into_service(self: Arc<Self>) -> MembershipServer<Arc<Self>> {\n\n MembershipServer::new(self)\n\n }\n\n\n\n pub(crate) fn subscribe(&self) -> Subscription {\n\n let state = Arc::downgrade(&self.state);\n\n let rx = self.cuts.subscribe();\n\n Subscription::new(state, rx)\n\n }\n\n\n\n #[inline]\n\n fn local_node(&self) -> Endpoint {\n\n Endpoint::from(self.addr).tls(self.cfg.server_tls)\n\n }\n\n\n\n /// Resolve an `Endpoint` to a `transport::Endpoint`, applying the configured client TLS\n\n /// settings if specified by the endpoint.\n", "file_path": "src/cluster/mod.rs", "rank": 54, "score": 23.534253612563056 }, { "content": " fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match SocketAddr::try_from(self) {\n\n Ok(s) => write!(w, \"{}\", s),\n\n Err(e) => write!(w, \"{}\", e),\n\n }\n\n }\n\n}\n\n\n\nimpl Endpoint {\n\n /// Set whether tls is expected for this endpoint.\n\n pub const fn tls(mut self, tls: bool) -> Self {\n\n self.tls = tls;\n\n self\n\n }\n\n\n\n /// Verify that the ip address and port in this endpoint are valid.\n\n pub fn validate(&self) -> Result<(), Status> {\n\n self.try_into()\n\n .map_err(|_| Status::invalid_argument(\"invalid endpoint\"))\n\n .map(|_: SocketAddr| {})\n", "file_path": "src/cluster/proto.rs", "rank": 55, "score": 23.496648703303403 }, { "content": " /// use blip::service::{cache::Source, Cache};\n\n /// use tonic::Status;\n\n ///\n\n /// struct Echo;\n\n ///\n\n /// #[blip::async_trait]\n\n /// impl Source for Echo {\n\n /// async fn get(&self, key: &[u8]) -> Result<Vec<u8>, Status> {\n\n /// Ok(key.into())\n\n /// }\n\n /// }\n\n ///\n\n /// let cache = Cache::new(1024, Echo);\n\n /// ```\n\n pub fn new<S: Source>(max_keys: usize, source: S) -> Self {\n\n let max_hot = cmp::max(1, max_keys / 8);\n\n\n\n let inner = Inner {\n\n inflight: Mutex::default(),\n\n remote: RwLock::default(),\n", "file_path": "src/service/cache.rs", "rank": 56, "score": 23.30676984269386 }, { "content": " async fn lookup_shard(&self, key: &[u8]) -> Option<Channel> {\n\n let conn = self.0.remote.read().map(|r| {\n\n // if there's no configuration, we're in standalone mode or the mesh hasn't yet\n\n // bootstrapped; in either case, assume we're the owner of key.\n\n let cut = r.config.as_ref()?;\n\n\n\n match *r.shards.try_get(key)? {\n\n // if the shard's addr is our addr, it's ours.\n\n s if s == cut.local_addr() => None,\n\n // otherwise it's some other node's.\n\n s => Some(cut[s].channel()),\n\n }\n\n });\n\n\n\n Some(conn.await?)\n\n }\n\n}\n\n\n\n/// Load a key's value from the cache.\n\n#[inline]\n", "file_path": "src/service/cache.rs", "rank": 57, "score": 23.018781476981175 }, { "content": " where\n\n S: ExposedService + 'static,\n\n <<S as ExposedService>::Service as Service<HttpRequest<Body>>>::Future: Send + 'static,\n\n <<S as ExposedService>::Service as Service<HttpRequest<Body>>>::Error:\n\n Into<Box<dyn error::Error + Send + Sync>> + Send,\n\n {\n\n #[rustfmt::skip]\n\n let Mesh { mut cfg, mut grpc, svcs } = self.add_mesh_service(svc.clone());\n\n svc.add_metadata(&mut *cfg.meta);\n\n let grpc = grpc.add_service(svc.into_service());\n\n\n\n Mesh { cfg, grpc, svcs }\n\n }\n\n\n\n /// Consume this [Mesh], creating a future that will run on a tokio executor.\n\n ///\n\n /// Resolves once the mesh has exited.\n\n #[inline]\n\n pub async fn serve(self, addr: SocketAddr) -> Result {\n\n self.serve_with_shutdown(addr, pending()).await\n", "file_path": "src/overlay.rs", "rank": 58, "score": 22.611311019914226 }, { "content": "// Copyright 2020 nytopop (Eric Izoita)\n\n//\n\n// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or\n\n// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or\n\n// http://opensource.org/licenses/MIT>, at your option. This file may not be\n\n// copied, modified, or distributed except according to those terms.\n\n//! Logic for cluster initialization and partition detection.\n\nuse super::{\n\n cut::{self, MultiNodeCut, Subscription},\n\n proto::{\n\n membership_client::MembershipClient, Endpoint, EndpointError, Join, JoinReq, JoinResp,\n\n NodeId, NodeMetadata, PreJoinReq, PreJoinResp,\n\n },\n\n Cluster, State,\n\n};\n\nuse futures::{\n\n future::TryFutureExt,\n\n stream::{FuturesUnordered, StreamExt},\n\n};\n\nuse log::{info, warn};\n\nuse std::{borrow::Cow, cmp, sync::Arc, time::Duration};\n\nuse thiserror::Error;\n\nuse tokio::time::{error::Elapsed, sleep, timeout};\n\nuse tonic::transport;\n\n\n\n#[derive(Debug, Error)]\n", "file_path": "src/cluster/bootstrap.rs", "rank": 59, "score": 22.462344757500034 }, { "content": "derive_cmp_with!(Rank, r => (r.round, r.node_idx));\n\n\n\n#[derive(Debug, Error)]\n\npub enum EndpointError {\n\n #[error(\"invalid uri: {:?}\", .0)]\n\n InvalidUri(#[from] http::uri::InvalidUri),\n\n\n\n #[error(\"invalid socketaddr: {:?}\", .0)]\n\n InvalidSocketAddr(#[from] SocketAddrError),\n\n\n\n #[error(\"invalid tls conf: {:?}\", .0)]\n\n InvalidTls(#[from] transport::Error),\n\n}\n\n\n\nimpl TryFrom<&Endpoint> for transport::Endpoint {\n\n type Error = EndpointError;\n\n\n\n fn try_from(e: &Endpoint) -> Result<Self, Self::Error> {\n\n let addr: SocketAddr = e.try_into()?;\n\n\n", "file_path": "src/cluster/proto.rs", "rank": 60, "score": 22.01582766061359 }, { "content": " return;\n\n }\n\n\n\n struct Src(Mutex<HashSet<Vec<u8>>>);\n\n\n\n #[crate::async_trait]\n\n impl Source for Src {\n\n async fn get(&self, key: &[u8]) -> Result<Vec<u8>, Status> {\n\n let mut seen = self.0.lock().await;\n\n assert!(seen.insert(key.to_vec()), \"key was fetched twice :(\");\n\n Ok(key.to_vec())\n\n }\n\n }\n\n\n\n let cache = Cache::new(keys.len(), Src(Mutex::default()));\n\n\n\n for key in keys.into_iter() {\n\n assert_eq!(key, cache.get(key.clone()).await.unwrap());\n\n }\n\n }\n\n}\n", "file_path": "src/service/cache.rs", "rank": 61, "score": 21.32154032657408 }, { "content": " let conf_id = r1.conf_id;\n\n\n\n let p2j_req = |ring| JoinReq {\n\n sender: self.local_node(),\n\n ring: ring as u64,\n\n uuid: state.uuid.clone(),\n\n conf_id,\n\n meta: self.cfg.meta.clone(),\n\n };\n\n\n\n let mut joins = (r1.contact.into_iter())\n\n .enumerate()\n\n .map(move |(ring, observer)| (p2j_req(ring), observer))\n\n .map(|(req, observer)| self.join_phase2(req, observer))\n\n .collect::<FuturesUnordered<_>>();\n\n\n\n let mut e = None;\n\n while let Some(resp) = joins.next().await {\n\n match resp {\n\n Ok(resp) => return Ok(resp),\n", "file_path": "src/cluster/bootstrap.rs", "rank": 62, "score": 20.980274657070396 }, { "content": " self.last_cut = None;\n\n }\n\n\n\n /// Add `node` to the active configuration.\n\n fn join_node(&mut self, node: Endpoint, Join { uuid, meta }: Join) {\n\n assert!(self.nodes.insert(node.clone()));\n\n assert!(self.uuids.insert(uuid));\n\n assert!(self.metadata.insert(node, meta).is_none());\n\n }\n\n\n\n /// Remove `node` from the active configuration.\n\n fn kick_node(&mut self, node: &Endpoint) -> Metadata {\n\n assert!(self.nodes.remove(node));\n\n self.metadata.remove(node).unwrap()\n\n }\n\n\n\n /// Re-hash the active configuration, returning (and setting) its id.\n\n fn rehash_config(&mut self) -> u64 {\n\n let mut h = FnvHasher::default();\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 63, "score": 20.84863209453111 }, { "content": "};\n\n\n\nimpl Cluster {\n\n /// Run the fault detector until the cluster is brought down.\n\n pub(crate) async fn detect_faults(self: Arc<Self>, mut cuts: Subscription) -> cut::Result {\n\n loop {\n\n select! {\n\n _ = self.spin_fd_probes() => {}\n\n cut = cuts.recv() => { cut?; }\n\n }\n\n }\n\n }\n\n\n\n /// Initialize a fault detection round and continuously probe all observed subjects. Edge\n\n /// failures are reported to the rest of the cluster if the configured number of successive\n\n /// faults is encountered.\n\n ///\n\n /// Resolves (and should be restarted) when the next view-change proposal is accepted.\n\n async fn spin_fd_probes(self: &Arc<Self>) {\n\n let (conf_id, mut subjects) = async {\n", "file_path": "src/cluster/faultdetect.rs", "rank": 64, "score": 20.333358680331784 }, { "content": "// Copyright 2020 nytopop (Eric Izoita)\n\n//\n\n// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or\n\n// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or\n\n// http://opensource.org/licenses/MIT>, at your option. This file may not be\n\n// copied, modified, or distributed except according to those terms.\n\n//! Batteries-included grpc service mesh.\n\nuse super::cluster::{\n\n cut::{Closed, Subscription},\n\n Cluster, Config,\n\n};\n\n\n\nuse futures::{\n\n future::{pending, FutureExt, TryFutureExt},\n\n stream::{FuturesUnordered, StreamExt},\n\n};\n\nuse std::{error, future::Future, net::SocketAddr, result, sync::Arc, time::Duration};\n\nuse thiserror::Error;\n\nuse tokio::select;\n\nuse tonic::{\n", "file_path": "src/overlay.rs", "rank": 65, "score": 20.240260879893267 }, { "content": " Err(SocketAddrError::InvalidPort(*port))\n\n } else if host.len() == 4 {\n\n Ok(SocketAddr::new(\n\n IpAddr::from(<[u8; 4]>::try_from(host.as_slice()).unwrap()),\n\n *port as u16,\n\n ))\n\n } else if host.len() == 16 {\n\n Ok(SocketAddr::new(\n\n IpAddr::from(<[u8; 16]>::try_from(host.as_slice()).unwrap()),\n\n *port as u16,\n\n ))\n\n } else {\n\n Err(SocketAddrError::InvalidLen(host.len()))\n\n }\n\n }\n\n}\n\n\n\nimpl From<SocketAddr> for Endpoint {\n\n fn from(addr: SocketAddr) -> Self {\n\n let host = match addr {\n", "file_path": "src/cluster/proto.rs", "rank": 66, "score": 20.021467436983187 }, { "content": "\n\n #[inline]\n\n pub async fn serve(self, addr: SocketAddr) -> Result {\n\n self.serve_with_shutdown(addr, pending()).await\n\n }\n\n\n\n pub async fn serve_with_shutdown<F>(self, addr: SocketAddr, signal: F) -> Result\n\n where F: Future<Output = ()> + Send {\n\n let cluster = Arc::new(Cluster::new(self.cfg, addr));\n\n\n\n select! {\n\n r = self.svcs.into_iter()\n\n .map(|s| s.accept(cluster.subscribe()))\n\n .collect::<FuturesUnordered<_>>()\n\n .for_each(|_| async {})\n\n .then(|_| pending()) => r,\n\n\n\n r = Arc::clone(&cluster)\n\n .detect_faults(cluster.subscribe())\n\n .err_into() => r,\n", "file_path": "src/overlay.rs", "rank": 67, "score": 19.81643256938108 }, { "content": "use cut::{Member, MultiNodeCut, Subscription};\n\nuse proto::{\n\n broadcast_req::{Broadcasted::*, *},\n\n membership_client::*,\n\n membership_server::*,\n\n *,\n\n};\n\n\n\nuse fnv::FnvHasher;\n\nuse futures::future::TryFutureExt;\n\nuse log::{error, info, warn};\n\nuse rand::{thread_rng, Rng};\n\nuse std::{\n\n collections::{hash_map::Entry, BTreeMap, BTreeSet, HashMap, HashSet},\n\n convert::{TryFrom, TryInto},\n\n future::Future,\n\n hash::{Hash, Hasher},\n\n mem,\n\n net::SocketAddr,\n\n pin::Pin,\n", "file_path": "src/cluster/mod.rs", "rank": 68, "score": 19.73642199696372 }, { "content": " #[rustfmt::skip]\n\n let JoinReq { sender, ring, uuid, conf_id, meta } = req.into_inner();\n\n sender.validate()?;\n\n let mut state = self.state.write().await;\n\n\n\n state.verify_config(conf_id)?;\n\n state.verify_unused_host(&sender)?;\n\n state.verify_unused_uuid(&uuid)?;\n\n state.verify_ring(&self.local_node(), &sender, ring)?;\n\n\n\n self.enqueue_edge(\n\n &mut state,\n\n Edge {\n\n node: sender.clone(),\n\n ring,\n\n join: Some(Join { uuid, meta }),\n\n },\n\n );\n\n\n\n match state.join_requests.get(&sender) {\n", "file_path": "src/cluster/mod.rs", "rank": 69, "score": 19.67585733847595 }, { "content": " fn resolve_endpoint(&self, e: &Endpoint) -> Result<transport::Endpoint, EndpointError> {\n\n if !e.tls {\n\n return e.try_into();\n\n }\n\n\n\n let tls = (self.cfg.client_tls)\n\n .as_deref()\n\n .cloned()\n\n .unwrap_or_else(ClientTlsConfig::new);\n\n\n\n Ok(transport::Endpoint::try_from(e)?.tls_config(tls)?)\n\n }\n\n\n\n /// Apply a view-change proposal to `state`. This will propagate the view-change to any\n\n /// subscribed tasks, and unblock any joining nodes in the proposal (if we're the ones\n\n /// handling their join request).\n\n fn apply_view_change(&self, state: &mut State, proposal: Vec<Endpoint>) {\n\n let mut joined = Vec::with_capacity(proposal.len());\n\n let mut kicked = Vec::with_capacity(proposal.len());\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 70, "score": 19.11754353879861 }, { "content": " state.clear_consensus();\n\n state.clear_membership();\n\n\n\n self.bootstrap(&mut state);\n\n }\n\n }\n\n\n\n /// Boostrap a new cluster. This will not reset any membership state, and should only\n\n /// be called with a blank [State].\n\n fn bootstrap(&self, state: &mut State) {\n\n // NOTE(invariant): must only be called with completely cleared state\n\n assert!(state.nodes.is_empty());\n\n assert!(state.uuids.is_empty());\n\n assert!(state.metadata.is_empty());\n\n assert!(state.last_cut.is_none());\n\n\n\n let node = Endpoint::from(self.addr).tls(self.cfg.server_tls);\n\n let uuid = state.uuid.clone();\n\n let meta = self.cfg.meta.clone();\n\n\n", "file_path": "src/cluster/bootstrap.rs", "rank": 71, "score": 18.836142859989113 }, { "content": "/// # #[tokio::main]\n\n/// # async fn main() -> Result<(), Status> {\n\n/// let loads = AtomicUsize::new(0);\n\n/// let c = Cache::from_fn(32, move |key| {\n\n/// assert_eq!(0, loads.swap(1, SeqCst));\n\n/// key.into()\n\n/// });\n\n///\n\n/// let val = c.get(\"test key\").await?;\n\n/// assert_eq!(&*val, b\"test key\");\n\n/// let val = c.get(\"test key\").await?;\n\n/// assert_eq!(&*val, b\"test key\");\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n///\n\n/// [Mesh]: crate::Mesh\n\npub struct Cache<S: ?Sized = dyn Source>(Arc<Inner<S>>);\n\n\n", "file_path": "src/service/cache.rs", "rank": 72, "score": 18.74039075266416 }, { "content": "use proto::{cache_client::CacheClient, cache_server::CacheServer, Key, Value};\n\nuse rand::{thread_rng, Rng};\n\nuse std::{\n\n cmp,\n\n collections::{hash_map::Entry, HashMap},\n\n net::SocketAddr,\n\n sync::Arc,\n\n};\n\nuse tokio::sync::{Mutex, RwLock, Semaphore};\n\nuse tonic::{transport::Channel, Request, Response, Status};\n\n\n\n/// A type that can produce a binary value, given a key.\n\n#[crate::async_trait]\n", "file_path": "src/service/cache.rs", "rank": 73, "score": 18.69969897500936 }, { "content": " broadcasted: Some(msg),\n\n });\n\n\n\n if let Err(e) = self.broadcast(req).await {\n\n warn!(\"broadcast failed: {}\", e);\n\n }\n\n }\n\n\n\n async fn begin_px_round(self: Arc<Self>, px: PaxosRound) {\n\n fn fnv_hash<T: Hash>(val: T) -> u64 {\n\n let mut h = FnvHasher::default();\n\n val.hash(&mut h);\n\n h.finish()\n\n }\n\n\n\n #[rustfmt::skip]\n\n let PaxosRound { sender, conf_id, .. } = px.init_delay().await;\n\n\n\n let rank = {\n\n let mut state = self.state.write().await;\n", "file_path": "src/cluster/mod.rs", "rank": 74, "score": 18.501823646125647 }, { "content": "async fn load(cache: &Mutex<Cache2q<Bytes, Bytes>>, key: &[u8]) -> Option<Bytes> {\n\n cache.lock().await.get(key).cloned()\n\n}\n\n\n\n/// Store a key/value pair in the cache.\n\n#[inline]\n\nasync fn store(cache: &Mutex<Cache2q<Bytes, Bytes>>, key: Bytes, buf: Bytes) {\n\n cache.lock().await.insert(key, buf);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::collections::HashSet;\n\n\n\n /// Tests to make sure that values returned from a call to [Cache::get] are actually\n\n /// cached on subsequent calls.\n\n #[quickcheck_async::tokio]\n\n async fn values_are_cached(keys: HashSet<Vec<u8>>) {\n\n if keys.is_empty() {\n", "file_path": "src/service/cache.rs", "rank": 75, "score": 18.373819595421867 }, { "content": "\n\nimpl<T: Hash + Eq> FreqSet<T> {\n\n /// Insert an element into the set.\n\n ///\n\n /// Returns the updated number of times the element has been inserted.\n\n pub fn insert(&mut self, key: T) -> usize {\n\n *self.inner.entry(key).and_modify(|v| *v += 1).or_insert(1)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use quickcheck_macros::quickcheck;\n\n use std::{collections::HashSet, num::NonZeroUsize};\n\n\n\n #[quickcheck]\n\n fn counts_are_accurate(n: NonZeroUsize, input: HashSet<u32>) -> bool {\n\n let mut fs: FreqSet<u32> = input.iter().copied().collect();\n\n // let n = n.get();\n", "file_path": "src/collections/freqset.rs", "rank": 76, "score": 18.320987689029668 }, { "content": " sync::{Arc, Weak},\n\n task::{Context, Poll},\n\n time::Duration,\n\n};\n\nuse thiserror::Error;\n\nuse tokio::{\n\n sync::{broadcast, oneshot, RwLock},\n\n task,\n\n time::sleep,\n\n};\n\nuse tonic::{\n\n transport::{self, ClientTlsConfig},\n\n Code, Request, Response, Status,\n\n};\n\n\n\npub(crate) struct Config {\n\n pub lh: (usize, usize),\n\n pub k: usize,\n\n pub seed: Option<Endpoint>,\n\n pub meta: Metadata,\n\n pub server_tls: bool,\n\n pub client_tls: Option<Arc<ClientTlsConfig>>,\n\n pub fd_timeout: Duration,\n\n pub fd_strikes: usize,\n\n}\n\n\n", "file_path": "src/cluster/mod.rs", "rank": 77, "score": 18.15458262597511 }, { "content": " /// Retrieve the value associated with `key`. Unlike [Cache::get], this does _not_\n\n /// deduplicate requests for the same key.\n\n async fn get_inner(&self, key: Bytes) -> Result<Bytes, Status> {\n\n // check if key is already loaded in the hot cache.\n\n if let Some(buf) = load(&self.0.hot_keys, &key).await {\n\n return Ok(buf);\n\n }\n\n\n\n // check if key hashes onto another node.\n\n if let Some(shard) = self.lookup_shard(&key).await {\n\n let mut c = CacheClient::new(shard);\n\n\n\n let val = c.get(Key { key: key.to_vec() }).await?;\n\n let buf = Bytes::from(val.into_inner().buf);\n\n\n\n // store in the hot cache 1/8 of the time (space is limited).\n\n if thread_rng().gen_range(0, 8) == 4 {\n\n store(&self.0.hot_keys, key, buf.clone()).await;\n\n }\n\n\n", "file_path": "src/service/cache.rs", "rank": 78, "score": 17.719162647263406 }, { "content": " }\n\n\n\n if cut.members().is_empty() {\n\n self.initialize().await;\n\n continue;\n\n }\n\n\n\n self.join_via_backoff(|| Cow::Owned(cut.random_member().into()))\n\n .await;\n\n }\n\n }\n\n\n\n /// Initialize as if we just started up by attempting to join a seed node, or becoming\n\n /// a single node bootstrap cluster.\n\n async fn initialize(&self) {\n\n if let Some(seed) = self.cfg.seed.as_ref() {\n\n self.join_via_backoff(|| Cow::Borrowed(seed)).await;\n\n } else {\n\n let mut state = self.state.write().await;\n\n\n", "file_path": "src/cluster/bootstrap.rs", "rank": 79, "score": 17.4874238515157 }, { "content": " }\n\n}\n\n\n\nimpl Edge {\n\n pub const fn down(node: Endpoint, ring: u64) -> Self {\n\n Self {\n\n node,\n\n ring,\n\n join: None,\n\n }\n\n }\n\n}\n\n\n\nimpl Rank {\n\n pub const fn new(round: u32, node_idx: u64) -> Self {\n\n Self { round, node_idx }\n\n }\n\n\n\n pub const fn zero() -> Self {\n\n Self::new(0, 0)\n\n }\n\n\n\n pub const fn fast_round() -> Self {\n\n Self::new(1, 1)\n\n }\n\n}\n", "file_path": "src/cluster/proto.rs", "rank": 80, "score": 17.38266555038485 }, { "content": "\n\nimpl<T: Ord + Hash + Clone, S: BuildHasher> Tumbler<T, S> {\n\n /// Create a new tumbler with `size` rings and the provided `hasher`.\n\n ///\n\n /// # Panics\n\n /// Panics if `size == 0`.\n\n pub fn with_hasher(size: usize, hasher: S) -> Self {\n\n assert!(size >= 1);\n\n\n\n let mut rings = Vec::with_capacity(size);\n\n rings.resize_with(size, Default::default);\n\n Self { hasher, rings }\n\n }\n\n\n\n fn hash(&self, seed: usize, val: &T) -> u64 {\n\n let mut h = self.hasher.build_hasher();\n\n seed.hash(&mut h);\n\n val.hash(&mut h);\n\n h.finish()\n\n }\n", "file_path": "src/collections/tumbler.rs", "rank": 81, "score": 17.325190270417487 }, { "content": " /// Initiate phase 2 of the join protocol with the provided `observer`.\n\n async fn join_phase2(&self, req: JoinReq, observer: Endpoint) -> Result<JoinResp, JoinError> {\n\n let observer = self.resolve_endpoint(&observer)?;\n\n\n\n let mut c = MembershipClient::connect(observer)\n\n .map_err(|e| JoinError::Phase2(e.into()))\n\n .await?;\n\n\n\n (c.join(req).map_ok(|r| r.into_inner()))\n\n .map_err(|e| JoinError::Phase2(e.into()))\n\n .await\n\n }\n\n}\n", "file_path": "src/cluster/bootstrap.rs", "rank": 82, "score": 17.167881183015396 }, { "content": " #[error(\"mesh: {}\", .0)]\n\n Transport(#[from] transport::Error),\n\n\n\n /// An error encountered when an internal task exits unexpectedly.\n\n #[error(\"mesh: task closed\")]\n\n Closed(#[from] Closed),\n\n}\n\n\n\n/// Specifies observer/subject thresholds for the cut detector.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct CutDetectorConfig {\n\n /// Threshold of reports required to place a subject into unstable report mode.\n\n ///\n\n /// Must be non-zero, and less than or equal to `stable_threshold`.\n\n ///\n\n /// Defaults to 4.\n\n pub unstable_threshold: usize,\n\n\n\n /// Threshold of reports required to place a subject into stable report mode.\n\n ///\n", "file_path": "src/overlay.rs", "rank": 83, "score": 17.12374293958676 }, { "content": " SocketAddr::V4(s) => s.ip().octets().to_vec(),\n\n SocketAddr::V6(s) => s.ip().octets().to_vec(),\n\n };\n\n\n\n Self {\n\n host,\n\n port: addr.port() as u32,\n\n tls: false,\n\n }\n\n }\n\n}\n\n\n\nimpl From<(SocketAddr, bool)> for Endpoint {\n\n #[inline]\n\n fn from((addr, tls): (SocketAddr, bool)) -> Self {\n\n Self::from(addr).tls(tls)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Endpoint {\n", "file_path": "src/cluster/proto.rs", "rank": 84, "score": 17.08102749848353 }, { "content": " .serve(addr_in(net, 1));\n\n task::spawn(s1);\n\n\n\n let (mut h2, hs2) = cfg_handle();\n\n let s2 = Mesh::low_latency()\n\n .add_mesh_service(hs2)\n\n .add_metadata(vec![(\"this key is s2\".to_owned(), b\"s2\".to_vec())])\n\n .join_seed(addr_in(net, 1), false)\n\n .serve(addr_in(net, 2));\n\n task::spawn(s2);\n\n\n\n let (c1, c2) = join(h1.cfg_change(2), h2.cfg_change(2)).await;\n\n\n\n assert!(c1.conf_id() == c2.conf_id());\n\n\n\n for m in c1.members().into_iter().chain(c2.members().into_iter()) {\n\n if m.addr() == addr_in(net, 1) {\n\n assert_eq!(b\"s1\".as_ref(), &*m.metadata()[\"this key is s1\"]);\n\n } else {\n\n assert_eq!(b\"s2\".as_ref(), &*m.metadata()[\"this key is s2\"]);\n", "file_path": "tests/membership.rs", "rank": 85, "score": 16.78024301458806 }, { "content": "\n\nimpl Cluster {\n\n pub(crate) fn new(cfg: Config, addr: SocketAddr) -> Self {\n\n let state = Arc::new(RwLock::new(State {\n\n uuid: NodeId::generate(),\n\n conf_id: 0,\n\n nodes: Tumbler::new(cfg.k),\n\n uuids: BTreeSet::new(),\n\n metadata: HashMap::new(),\n\n last_cut: None,\n\n\n\n bcast_filter: EventFilter::new(Duration::from_secs(3600)),\n\n\n\n join_requests: HashMap::new(),\n\n\n\n cd_batch: AlertBatch::default(),\n\n cd_joiners: HashMap::new(),\n\n cd_reports: BTreeMap::new(),\n\n\n\n fpx_announced: false,\n", "file_path": "src/cluster/mod.rs", "rank": 86, "score": 16.47215255869354 }, { "content": " }\n\n\n\n /// Consume this [Mesh], creating a future that will run on a tokio executor.\n\n ///\n\n /// Shutdown will be initiated when `signal` resolves.\n\n ///\n\n /// Resolves once the mesh has exited.\n\n pub async fn serve_with_shutdown<F>(mut self, addr: SocketAddr, signal: F) -> Result\n\n where F: Future<Output = ()> + Send {\n\n let cluster = Arc::new(Cluster::new(self.cfg, addr));\n\n\n\n select! {\n\n r = self.svcs.into_iter()\n\n .map(|s| s.accept(cluster.subscribe()))\n\n .collect::<FuturesUnordered<_>>()\n\n .for_each(|_| async {})\n\n .then(|_| pending()) => r,\n\n\n\n r = Arc::clone(&cluster)\n\n .detect_faults(cluster.subscribe())\n", "file_path": "src/overlay.rs", "rank": 87, "score": 16.42639643701755 }, { "content": " Err(err) => e = Some(err),\n\n }\n\n }\n\n\n\n Err(e.unwrap_or(JoinError::NoObservers))\n\n }\n\n\n\n /// Initiate phase 1 of the join protocol via the provided endpoint.\n\n async fn join_phase1(&self, req: PreJoinReq, via: &Endpoint) -> Result<PreJoinResp, JoinError> {\n\n let seed = self.resolve_endpoint(via)?;\n\n\n\n let mut c = MembershipClient::connect(seed)\n\n .map_err(|e| JoinError::Phase1(e.into()))\n\n .await?;\n\n\n\n (c.pre_join(req).map_ok(|r| r.into_inner()))\n\n .map_err(|e| JoinError::Phase1(e.into()))\n\n .await\n\n }\n\n\n", "file_path": "src/cluster/bootstrap.rs", "rank": 88, "score": 16.340368086182885 }, { "content": "\n\n #[allow(clippy::type_complexity)]\n\n type IntoIter = Map<IntoIter<(u64, T)>, fn((u64, T)) -> T>;\n\n\n\n fn into_iter(mut self) -> Self::IntoIter {\n\n mem::replace(&mut self.rings[0], BTreeSet::new())\n\n .into_iter()\n\n .map(|(_, t)| t)\n\n }\n\n}\n\n\n\nimpl<T: Ord + Hash + Clone> Tumbler<T> {\n\n /// Create a new tumbler with `size` rings.\n\n ///\n\n /// # Panics\n\n /// Panics if `size == 0`.\n\n pub fn new(size: usize) -> Self {\n\n Self::with_hasher(size, Default::default())\n\n }\n\n}\n", "file_path": "src/collections/tumbler.rs", "rank": 89, "score": 15.984541251672786 }, { "content": " #[inline]\n\n async fn accept(self: Box<Self>, _: Subscription) {}\n\n}\n\n\n\nimpl<S> ExposedService for GrpcService<S>\n\nwhere S: Service<HttpRequest<Body>, Response = HttpResponse<BoxBody>>\n\n + NamedService\n\n + Clone\n\n + Send\n\n + 'static\n\n{\n\n type Service = S;\n\n\n\n #[inline]\n\n fn into_service(self) -> Self::Service {\n\n self.svc\n\n }\n\n}\n\n\n\n/// A service that has access to accepted membership view-change proposals.\n", "file_path": "src/overlay.rs", "rank": 90, "score": 15.920724524274627 }, { "content": " lazy.val.get().unwrap().clone()\n\n }\n\n }\n\n }\n\n\n\n /// Start a coordinated load operation.\n\n #[inline]\n\n async fn liftoff(&self, key: Bytes) -> Flight {\n\n match self.0.inflight.lock().await.entry(key) {\n\n // we must set a value and notify any followers.\n\n Entry::Vacant(v) => Flight::Leader(Arc::clone(v.insert(Arc::new(Lazy {\n\n sem: Semaphore::new(0),\n\n val: OnceCell::new(),\n\n })))),\n\n\n\n // we can read the value when leader notifies us.\n\n Entry::Occupied(o) => Flight::Follower(Arc::clone(o.get())),\n\n }\n\n }\n\n\n", "file_path": "src/service/cache.rs", "rank": 91, "score": 15.84143178116086 }, { "content": "impl Ord for Id {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n (self.unix, self.uniq)\n\n .cmp(&(other.unix, other.uniq))\n\n .reverse()\n\n }\n\n}\n\n\n\nimpl Id {\n\n /// Reconstruct an event id from a timestamp and unique identifier.\n\n pub const fn new(unix: u64, uniq: u64) -> Self {\n\n Self { unix, uniq }\n\n }\n\n\n\n /// Generates a new event id.\n\n pub fn generate() -> Self {\n\n Self::new(unixtime(), random())\n\n }\n\n\n\n /// Returns an event id that is ~always expired.\n", "file_path": "src/collections/event.rs", "rank": 92, "score": 15.787412568539331 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Tests that all members in a three node configuration agree on each other's metadata, even\n\n/// if a member leaves the cluster and immediately rejoins with changed metadata.\n\n#[tokio::test]\n\nasync fn three_node_cluster_metadata_rejoin_consensus() {\n\n init_logger();\n\n let net = subnet();\n\n\n\n let (mut h1, hs1) = cfg_handle();\n\n let mut s1 = Mesh::low_latency()\n\n .add_mesh_service(hs1)\n\n .add_metadata(vec![(\"this key is s1\".to_owned(), b\"s1\".to_vec())])\n\n .serve(addr_in(net, 1))\n\n .boxed();\n\n\n\n let (mut h2, hs2) = cfg_handle();\n\n let mut s2 = Mesh::low_latency()\n", "file_path": "tests/membership.rs", "rank": 93, "score": 15.759746424822996 }, { "content": "}\n\n\n\nimpl<S> GrpcService<S>\n\nwhere\n\n S: Service<HttpRequest<Body>, Response = HttpResponse<BoxBody>>\n\n + NamedService\n\n + Clone\n\n + Send\n\n + 'static,\n\n S::Future: Send + 'static,\n\n S::Error: Into<Box<dyn error::Error + Send + Sync>> + Send,\n\n{\n\n /// Wrap a type that implements [Service].\n\n pub fn new(svc: S) -> Self {\n\n Self { svc }\n\n }\n\n}\n\n\n\n#[crate::async_trait]\n\nimpl<S: Send> MeshService for GrpcService<S> {\n", "file_path": "src/overlay.rs", "rank": 94, "score": 15.733321298280176 }, { "content": " assert!(c1.conf_id() == c2.conf_id());\n\n assert!(c2.conf_id() == c3.conf_id());\n\n\n\n for m in c1.members().into_iter()\n\n .chain(c2.members().into_iter())\n\n .chain(c3.members().into_iter())\n\n {\n\n if m.addr() == addr_in(net, 1) {\n\n assert_eq!(b\"s1\".as_ref(), &*m.metadata()[\"this key is s1\"]);\n\n } else if m.addr() == addr_in(net, 2) {\n\n assert_eq!(b\"s2\".as_ref(), &*m.metadata()[\"this key is s2\"]);\n\n } else {\n\n assert_eq!(b\"s3-original\".as_ref(), &*m.metadata()[\"this key is s3\"]);\n\n }\n\n }\n\n }\n\n }\n\n\n\n // re-create s3 from scratch, with a new uuid and different metadata\n\n let (mut h3, hs3) = cfg_handle();\n", "file_path": "tests/membership.rs", "rank": 95, "score": 15.63680147528283 }, { "content": "\n\n/// Tests that key distribution amongst multiple nodes is stable when individual cache\n\n/// nodes evict keys.\n\n#[tokio::test]\n\nasync fn key_placement_is_stable() {\n\n init_logger();\n\n let net = subnet();\n\n\n\n let a = Cache::from_fn(1, |_| b\"a\".to_vec());\n\n let (mut ha, hsa) = cfg_handle();\n\n let af = Mesh::low_latency()\n\n .add_mesh_service(hsa)\n\n .add_service(a.clone())\n\n .serve(addr_in(net, 1));\n\n\n\n let b = Cache::from_fn(1, |_| b\"b\".to_vec());\n\n let (mut hb, hsb) = cfg_handle();\n\n let bf = Mesh::low_latency()\n\n .add_mesh_service(hsb)\n\n .add_service(b.clone())\n", "file_path": "tests/cache.rs", "rank": 96, "score": 15.626344100500987 }, { "content": " body::BoxBody,\n\n codegen::{\n\n http::{HeaderMap, Request as HttpRequest, Response as HttpResponse},\n\n Service,\n\n },\n\n transport::{\n\n self,\n\n server::{Router, Unimplemented},\n\n Body, ClientTlsConfig, NamedService, Server, ServerTlsConfig,\n\n },\n\n};\n\nuse tracing::Span;\n\n\n\n/// A `Result<(), Error>`.\n\npub type Result = result::Result<(), Error>;\n\n\n\n/// An error which causes a mesh to exit.\n\n#[derive(Debug, Error)]\n\npub enum Error {\n\n /// An error encountered in the grpc transport.\n", "file_path": "src/overlay.rs", "rank": 97, "score": 15.528629679182734 }, { "content": " A::Future: Send + 'static,\n\n A::Error: Into<Box<dyn error::Error + Send + Sync>> + Send,\n\n B: Service<HttpRequest<Body>, Response = HttpResponse<BoxBody>> + Clone + Send + 'static,\n\n B::Future: Send + 'static,\n\n B::Error: Into<Box<dyn error::Error + Send + Sync>> + Send,\n\n{\n\n pub fn add_service<S>(self, svc: S) -> Mesh<Router<S::Service, impl Service<HttpRequest<Body>>>>\n\n where\n\n S: ExposedService + 'static,\n\n <<S as ExposedService>::Service as Service<HttpRequest<Body>>>::Future: Send + 'static,\n\n <<S as ExposedService>::Service as Service<HttpRequest<Body>>>::Error:\n\n Into<Box<dyn error::Error + Send + Sync>> + Send,\n\n {\n\n #[rustfmt::skip]\n\n let Mesh { mut cfg, grpc, svcs } = self.add_mesh_service(svc.clone());\n\n svc.add_metadata(&mut *cfg.meta);\n\n let grpc = grpc.add_service(svc.into_service());\n\n\n\n Mesh { cfg, grpc, svcs }\n\n }\n", "file_path": "src/overlay.rs", "rank": 98, "score": 15.527036807507978 }, { "content": "\n\n state.verify_unused_host(&sender)?;\n\n state.verify_unused_uuid(&uuid)?;\n\n\n\n let resp = PreJoinResp {\n\n sender: self.local_node(),\n\n conf_id: state.conf_id,\n\n contact: state.nodes.predecessors(&sender).cloned().collect(),\n\n };\n\n\n\n Ok(Response::new(resp))\n\n }\n\n\n\n /// Handle a join request (phase 2 of the join protocol).\n\n ///\n\n /// The sender has (presumably) already performed a pre-join request, and received a\n\n /// list of observers to contact (of which we are ~one (presumably)).\n\n ///\n\n /// Our role here is to notify the rest of the cluster that sender wants to join.\n\n async fn join(&self, req: Request<JoinReq>) -> GrpcResponse<JoinResp> {\n", "file_path": "src/cluster/mod.rs", "rank": 99, "score": 15.250986772799893 } ]
Rust
src/interpreter/internal.rs
wareya/gammakit
63d217f8ebc291844b8df69db12a03b9c906bef4
use crate::interpreter::*; impl Interpreter { pub (super) fn stack_len(&mut self) -> usize { self.top_frame.len() } pub (super) fn stack_pop_val(&mut self) -> Option<Value> { self.top_frame.pop_val() } pub (super) fn stack_pop_var(&mut self) -> Option<Variable> { self.top_frame.pop_var() } pub (super) fn stack_pop(&mut self) -> Option<StackValue> { self.top_frame.pop() } pub (super) fn stack_pop_as_val(&mut self) -> Option<Value> { match self.top_frame.pop() { Some(StackValue::Var(x)) => self.evaluate_value(x).ok(), Some(StackValue::Val(x)) => Some(x), _ => None } } pub (super) fn stack_push_val(&mut self, value : Value) { self.top_frame.push_val(value) } pub (super) fn stack_push_var(&mut self, variable : Variable) { self.top_frame.push_var(variable) } pub (super) fn stack_push(&mut self, stackvalue : StackValue) { self.top_frame.push(stackvalue) } fn call_arrow_function(&mut self, subfuncval : SubFuncVal, args : Vec<Value>, isexpr : bool) -> Result<(), String> { if let Some(binding) = self.get_trivial_arrow_binding(subfuncval.name) { match subfuncval.source { StackValue::Val(val) => { let ret = binding(ValueLoc::Static(val), args)?; if isexpr { self.stack_push_val(ret); } } StackValue::Var(source) => { let val = self.evaluate(source)?; let ret = binding(val, args)?; if isexpr { self.stack_push_val(ret); } } }; } else if let Some(binding_wrapper) = self.get_arrow_binding(subfuncval.name) { let binding = &mut *binding_wrapper.try_borrow_mut().or_else(|_| plainerr("error: tried to borrow internal function while it was borrowed elsewhere"))?; match subfuncval.source { StackValue::Val(val) => { let ret = binding(ValueLoc::Static(val), args)?; if isexpr { self.stack_push_val(ret); } } StackValue::Var(source) => { let val = self.evaluate(source)?; let ret = binding(val, args)?; if isexpr { self.stack_push_val(ret); } } }; } else { return Err(format!("error: no such arrow function `{}`", subfuncval.name)) } Ok(()) } pub (super) fn handle_func_call_or_expr(&mut self, isexpr : bool) -> Result<(), String> { let argcount = self.read_usize(); if cfg!(stack_len_debugging) && argcount+1 > self.stack_len() { return plainerr("internal error: fewer values on stack than expected in FUNCEXPR/FUNCCALL"); } let mut args = vec!(Value::Null; argcount); for i in argcount-1..=0 { args[i] = self.stack_pop_val().ok_or_else(|| minierr("internal error: expected values, got variable on stack in FUNCEXPR/FUNCCALL"))?; } let funcdata = self.stack_pop_as_val().ok_or_else(|| minierr("internal error: not enough values on stack to run instruction FUNCEXPR/FUNCCALL (after args)"))?; match funcdata { Value::Func(funcdata) => self.call_function(funcdata, args, isexpr)?, Value::InternalFunc(funcdata) => self.call_internal_function(funcdata, args, isexpr)?, Value::SubFunc(subfuncval) => self.call_arrow_function(*subfuncval, args, isexpr)?, _ => return Err(format!("internal error: value meant to hold function data in FUNCEXPR/FUNCCALL was not holding function data; {:?}", funcdata)) } Ok(()) } }
use crate::interpreter::*; impl Interpreter { pub (super) fn stack_len(&mut self) -> usize { self.top_frame.len() } pub (super) fn stack_pop_val(&mut self) -> Option<Value> { self.top_frame.pop_val() } pub (super) fn stack_pop_var(&mut self) -> Option<Variable> { self.top_frame.pop_var() } pub (super) fn stack_pop(&mut self) -> Option<StackValue> { self.top_frame.pop() } pub (super) fn stack_pop_as_val(&mut self) -> Option<Value> { match self.top_frame.pop() { Some(StackValue::Var(x)) => self.evaluate_value(x).ok(), Some(StackValue::Val(x)) => Some(x), _ => None } } pub (super) fn stack_push_val(&mut self, value : Value) { self.top_frame.push_val(value) } pub (super) fn stack_push_var(&mut self, variable : Variable) { self.top_frame.push_var(variable) } pub (super) fn stack_push(&mut self, stackvalue : StackValue) { self.top_frame.push(stackvalue) } fn call_arrow_function(&mut self, subfuncval : SubFuncVal, args : Vec<Value>, isexpr : bool) -> Result<(), String> { if let Some(binding) = self.get_trivial_arrow_binding(subfuncval.name) { match subfuncval.source { StackValue::Val(val) => { let ret = binding(ValueLoc::Static(val), args)?; if isexpr { self.stack_push_val(ret); } } StackValue::Var(source) => { let val = self.evaluate(source)?; let ret = binding(val, args)?; if isexpr { self.stack_push_val(ret); } } }; } else if let Some(binding_wrapper) = self.get_arrow_binding(subfuncval.name) { let binding = &mut *binding_wrapper.try_borrow_mut().or_else(|_| plainerr("error: tried to borrow internal function while it was borrowed elsewhere"))?; match subfuncval.source { StackValue::Val(val) => { let ret = binding(ValueLoc::Static(val), args)?; if isexpr { self.stack_push_val(ret); } } StackValue::Var(source) => { let val = self.evaluate(source)?; let ret = binding(val, args)?; if isexpr { self.stack_push_val(ret); } } }; } else { return Err(format!("error: no such arrow function `{}`", subfuncval.name)) }
s not holding function data; {:?}", funcdata)) } Ok(()) } }
Ok(()) } pub (super) fn handle_func_call_or_expr(&mut self, isexpr : bool) -> Result<(), String> { let argcount = self.read_usize(); if cfg!(stack_len_debugging) && argcount+1 > self.stack_len() { return plainerr("internal error: fewer values on stack than expected in FUNCEXPR/FUNCCALL"); } let mut args = vec!(Value::Null; argcount); for i in argcount-1..=0 { args[i] = self.stack_pop_val().ok_or_else(|| minierr("internal error: expected values, got variable on stack in FUNCEXPR/FUNCCALL"))?; } let funcdata = self.stack_pop_as_val().ok_or_else(|| minierr("internal error: not enough values on stack to run instruction FUNCEXPR/FUNCCALL (after args)"))?; match funcdata { Value::Func(funcdata) => self.call_function(funcdata, args, isexpr)?, Value::InternalFunc(funcdata) => self.call_internal_function(funcdata, args, isexpr)?, Value::SubFunc(subfuncval) => self.call_arrow_function(*subfuncval, args, isexpr)?, _ => return Err(format!("internal error: value meant to hold function data in FUNCEXPR/FUNCCALL wa
random
[ { "content": "type CompilerBinding<'a> = fn(&mut CompilerState<'a>, &ASTNode) -> Result<(), String>;\n\n\n", "file_path": "src/compiler.rs", "rank": 0, "score": 132890.84490723212 }, { "content": "pub fn default_step_result() -> StepResult\n\n{\n\n Ok(())\n\n}\n\n/// Type signature of functions to be registered as bindings.\n\npub type Binding = dyn FnMut(&mut Interpreter, Vec<Value>) -> Result<Value, String>;\n\n/// For trivial bindings.\n\npub type TrivialBinding = fn(&mut Interpreter, Vec<Value>) -> Result<Value, String>;\n\n/// For simple bindings.\n\npub type SimpleBinding = dyn FnMut(Vec<Value>) -> Result<Value, String>;\n\n/// For trivial simple bindings.\n\npub type TrivialSimpleBinding = fn(Vec<Value>) -> Result<Value, String>;\n\n/// For arrow bindings.\n\npub type ArrowBinding = dyn FnMut(ValueLoc, Vec<Value>) -> Result<Value, String>;\n\n/// For trivial arrow bindings.\n\npub type TrivialArrowBinding = fn(ValueLoc, Vec<Value>) -> Result<Value, String>;\n\n\n", "file_path": "src/interpreter.rs", "rank": 1, "score": 132565.79911067852 }, { "content": "fn main() -> Result<(), String>\n\n{\n\n use std::time::Instant;\n\n let mut interpreter = Interpreter::new(Parser::new_from_default()?);\n\n interpreter.insert_default_bindings();\n\n interpreter.restart_full_of_nops(100_000_000);\n\n \n\n let start_time = Instant::now();\n\n \n\n let steps = interpreter.step_cached_until_error_or_exit().unwrap_or(0);\n\n if let Some(err) = &interpreter.last_error\n\n {\n\n panic!(\"{}\", err);\n\n }\n\n \n\n let duration = Instant::now().duration_since(start_time);\n\n println!(\"simulation took {:?}\", duration);\n\n println!(\"steps {:?}\", steps);\n\n println!(\"{:?} steps per second\", steps as f64 / (duration.as_micros() as f64 / 1000_000.0));\n\n println!(\"{:?} nanoseconds per step\", duration.as_micros() as f64 * 1000.0 / steps as f64);\n", "file_path": "examples/nop.rs", "rank": 2, "score": 128089.85693521783 }, { "content": "fn main() -> Result<(), String>\n\n{\n\n let mut interpreter = Interpreter::new(Parser::new_from_default()?);\n\n interpreter.insert_default_bindings();\n\n\n\n let program = include_str!(\"general.txt\").to_string();\n\n \n\n interpreter.restart_into_string(&program)?;\n\n \n\n File::create(\"bytecode_dump_general.bin\").unwrap().write_all(&interpreter.dump_code()).unwrap();\n\n \n\n interpreter.step_until_error_or_exit().ok();\n\n if let Some(err) = &interpreter.last_error\n\n {\n\n panic!(\"{}\", err);\n\n }\n\n \n\n Ok(())\n\n}", "file_path": "examples/general.rs", "rank": 3, "score": 128089.85693521783 }, { "content": "fn main() -> Result<(), String>\n\n{\n\n let mut interpreter = Interpreter::new(Parser::new_from_default()?);\n\n interpreter.insert_default_bindings();\n\n\n\n let program = include_str!(\"generator.txt\").to_string();\n\n \n\n interpreter.restart_into_string(&program)?;\n\n \n\n File::create(\"bytecode_dump_generator.bin\").unwrap().write_all(&interpreter.dump_code()).unwrap();\n\n \n\n interpreter.step_until_error_or_exit().ok();\n\n if let Some(err) = &interpreter.last_error\n\n {\n\n panic!(\"{}\", err);\n\n }\n\n \n\n Ok(())\n\n}", "file_path": "examples/generator.rs", "rank": 4, "score": 128089.85693521783 }, { "content": "fn main() -> Result<(), String>\n\n{\n\n use std::time::Instant;\n\n let mut interpreter = Interpreter::new(Parser::new_from_default()?);\n\n interpreter.insert_default_bindings();\n\n\n\n let program = include_str!(\"nbody.txt\").to_string();\n\n \n\n let start_time = Instant::now();\n\n interpreter.restart_into_string(&program)?;\n\n let duration = Instant::now().duration_since(start_time);\n\n println!(\"Compilation took {:?}\", duration);\n\n \n\n File::create(\"bytecode_dump_nbodies.bin\").unwrap().write_all(&interpreter.dump_code()).unwrap();\n\n \n\n let start_time = Instant::now();\n\n \n\n //let steps = interpreter.step_cached_until_error_or_exit().unwrap_or(0);\n\n let steps = interpreter.step_until_error_or_exit().unwrap_or(0);\n\n if let Some(err) = &interpreter.last_error\n", "file_path": "examples/nbody.rs", "rank": 5, "score": 128089.85693521783 }, { "content": "pub fn compile_bytecode<'a>(ast : &ASTNode, global : &'a mut GlobalState) -> Result<Code, String>\n\n{\n\n let mut state = CompilerState::new(global);\n\n let signal = state.compile_any(ast);\n\n state.trap_error(signal)?;\n\n Ok(state.code)\n\n}\n", "file_path": "src/compiler.rs", "rank": 6, "score": 123686.50954252025 }, { "content": "type OpFunc = fn(&mut Interpreter) -> StepResult;\n\n\n\n\n\n// interpreter state\n\n/// Interprets compiled bytecode.\n\npub struct Interpreter {\n\n top_frame: Frame,\n\n frames: Vec<Frame>,\n\n global: GlobalState,\n\n /// Last error returned by step(). Gets cleared (reset to None) when step() runs without returning an error.\n\n pub last_error: Option<String>,\n\n}\n\n\n\n#[cfg(feature = \"track_op_performance\")]\n\nstatic mut OP_MAP_HITS : [u64; 256] = [0; 256];\n\n#[cfg(feature = \"track_op_performance\")]\n\nstatic mut OP_MAP : [u64; 256] = [0; 256];\n\n#[cfg(feature = \"track_op_performance\")]\n\nstatic mut LAST_TIME : u64 = 0;\n\n\n", "file_path": "src/interpreter.rs", "rank": 7, "score": 120698.47435336518 }, { "content": "pub trait VecHelpers<Value> {\n\n /// Slow; use extract() instead\n\n fn pop_front(&mut self) -> Option<Value>;\n\n /// If the given element exists, extracts it by value, replacing what was there with Value::default()\n\n /// Otherwise returns None\n\n fn extract(&mut self, index : usize) -> Option<Value>;\n\n /// For numbers.\n\n fn extract_num(&mut self, index : usize) -> Result<f64, String>;\n\n /// Same as extract(), but returns Err(...message that the error should be unreachable...) on out-of-range.\n\n fn expect_extract(&mut self, index : usize) -> Result<Value, String>;\n\n}\n\n\n\nimpl VecHelpers<Value> for Vec<Value> {\n\n fn pop_front(&mut self) -> Option<Value>\n\n {\n\n if self.is_empty() { None } else { Some(self.remove(0)) }\n\n }\n\n fn extract(&mut self, index : usize) -> Option<Value>\n\n {\n\n if index < self.len()\n", "file_path": "src/interpreter/bindings.rs", "rank": 8, "score": 112010.01034648053 }, { "content": "#[inline]\n\nfn strange_err_plain<A, S : ToString>(text : S) -> Result<A, String>\n\n{\n\n #[cfg(feature = \"broken_compiler_debugging\")]\n\n {\n\n return Err(text.to_string());\n\n }\n\n panic!(text.to_string())\n\n}\n", "file_path": "src/interpreter/simulation.rs", "rank": 9, "score": 111892.66753965733 }, { "content": "fn plainerr<T>(mystr : &'static str) -> Result<T, String>\n\n{\n\n Err(minierr(mystr))\n\n}\n", "file_path": "src/interpreter.rs", "rank": 10, "score": 110794.69090204814 }, { "content": "#[inline]\n\nfn stack_access_err_err<A, S : ToString>(text : S) -> Result<A, String>\n\n{\n\n #[cfg(feature = \"stack_access_debugging\")]\n\n {\n\n return Err(text.to_string());\n\n }\n\n panic!(text.to_string())\n\n}\n", "file_path": "src/interpreter/simulation.rs", "rank": 11, "score": 108795.96901661297 }, { "content": "fn plainerr<T>(mystr : &str) -> Result<T, String>\n\n{\n\n Err(mystr.to_string())\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 12, "score": 97222.27532114275 }, { "content": "fn plainerr<T>(mystr : &str) -> Result<T, String>\n\n{\n\n Err(minierr(mystr))\n\n}\n\n\n\nimpl GrammarForm\n\n{\n\n #[allow(clippy::new_ret_no_self)]\n\n pub (crate) fn new(line : &str, parser : &mut Parser, intoken : bool) -> Result<GrammarForm, String>\n\n {\n\n let re = &mut parser.internal_regexes;\n\n let mut ret = GrammarForm { tokens : Vec::new() };\n\n let tokens : Vec<&str> = line.split(' ').collect();\n\n for token in &tokens\n\n {\n\n if *token == \"\"\n\n {\n\n continue;\n\n }\n\n if *token == \">>?\"\n", "file_path": "src/grammar.rs", "rank": 13, "score": 97222.27532114275 }, { "content": "fn plainerr<T>(mystr : &str) -> Result<T, String>\n\n{\n\n Err(minierr(mystr))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 14, "score": 97222.27532114275 }, { "content": "fn minierr(mystr : &'static str) -> String\n\n{\n\n mystr.to_string()\n\n}\n", "file_path": "src/interpreter.rs", "rank": 15, "score": 95005.44739312117 }, { "content": "#[inline]\n\nfn strange_err<S : ToString>(text : S) -> String\n\n{\n\n #[cfg(feature = \"broken_compiler_debugging\")]\n\n {\n\n return text.to_string();\n\n }\n\n panic!(text.to_string())\n\n}\n\n\n\npub (crate) static mut OPTABLE : [OpFunc; 256] = [Interpreter::sim_INVALID as OpFunc; 256];\n\npub (crate) static mut REVERSE_OPTABLE : Option<BTreeMap<usize, u8>> = None;\n\n\n\npub (crate) fn build_opfunc_table()\n\n{\n\n macro_rules! set { ( $x:ident, $y:ident ) => { unsafe { OPTABLE[$x as usize] = Interpreter::$y; } } }\n\n \n\n set!(NOP, sim_NOP);\n\n set!(PUSHFLT, sim_PUSHFLT);\n\n set!(PUSHSTR, sim_PUSHSTR);\n\n set!(PUSHNULL, sim_PUSHNULL);\n", "file_path": "src/interpreter/simulation.rs", "rank": 16, "score": 93384.76341634025 }, { "content": "#[inline]\n\nfn stack_access_err<S : ToString>(text : S) -> String\n\n{\n\n #[cfg(feature = \"stack_access_debugging\")]\n\n {\n\n return text.to_string();\n\n }\n\n panic!(text.to_string())\n\n}\n", "file_path": "src/interpreter/simulation.rs", "rank": 17, "score": 90681.9951221077 }, { "content": "fn minierr(mystr : &str) -> String\n\n{\n\n mystr.to_string()\n\n}\n\n\n", "file_path": "src/grammar.rs", "rank": 18, "score": 78594.24904081077 }, { "content": "fn minierr(mystr : &str) -> String\n\n{\n\n mystr.to_string()\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 19, "score": 78594.24904081077 }, { "content": "fn minierr(mystr : &str) -> String\n\n{\n\n mystr.to_string()\n\n}\n\n\n", "file_path": "src/compiler.rs", "rank": 20, "score": 78594.24904081077 }, { "content": "fn trim_at_null(mystr : &[u8]) -> &[u8]\n\n{\n\n let mut nullpos = 0usize;\n\n while nullpos < mystr.len() && mystr[nullpos] != 0\n\n {\n\n nullpos += 1\n\n }\n\n &mystr[..nullpos]\n\n}\n\n\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Debug)]\n\npub (crate) enum MiniStr {\n\n Short([u8; 8]),\n\n Long(String)\n\n}\n\n\n\nimpl MiniStr {\n\n pub (crate) fn from(text : &str) -> MiniStr\n\n {\n\n if text.len() <= 8\n", "file_path": "src/strings.rs", "rank": 21, "score": 62549.275918663385 }, { "content": "fn fat_vec<T>() -> Vec<T>\n\n{\n\n Vec::with_capacity(4)\n\n}\n\n\n\n// global interpreter data\n\npub struct GlobalState {\n\n instance_id: usize,\n\n pub (crate) instances: BTreeMap<usize, Instance>,\n\n pub (crate) instances_by_type: Box<BTreeMap<usize, BTreeSet<usize>>>,\n\n \n\n pub (crate) objects: Box<BTreeMap<usize, ObjSpec>>,\n\n pub (crate) variables: BTreeMap<usize, Value>, // accessed as global.varname\n\n pub (crate) barevariables: BTreeMap<usize, Value>, // accessed as varname\n\n pub (crate) functions: BTreeMap<usize, Value>, // accessed as funcname\n\n \n\n // TODO: same map\n\n pub (crate) bindings: Box<BTreeMap<usize, Rc<RefCell<Binding>>>>,\n\n pub (crate) trivial_bindings: Box<BTreeMap<usize, TrivialBinding>>,\n\n pub (crate) simple_bindings: Box<BTreeMap<usize, Rc<RefCell<SimpleBinding>>>>,\n", "file_path": "src/interpreter.rs", "rank": 22, "score": 59668.23878202894 }, { "content": " {\n\n self.global.trivial_simple_bindings.get(&name).copied()\n\n }\n\n pub (crate) fn get_arrow_binding(&self, name : usize) -> Option<Rc<RefCell<ArrowBinding>>>\n\n {\n\n match_or_none!(self.global.arrow_bindings.get(&name), Some(f) => Rc::clone(f))\n\n }\n\n pub (crate) fn get_trivial_arrow_binding(&self, name : usize) -> Option<TrivialArrowBinding>\n\n {\n\n self.global.trivial_arrow_bindings.get(&name).copied()\n\n }\n\n pub (crate) fn sim_func_print(mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n for arg in args.drain(..)\n\n {\n\n println!(\"{}\", format_val(&arg).ok_or_else(|| minierr(\"error: tried to print unprintable value\"))?);\n\n }\n\n Ok(Value::default())\n\n }\n\n pub (crate) fn sim_func_printraw(mut args : Vec<Value>) -> Result<Value, String>\n", "file_path": "src/interpreter/bindings.rs", "rank": 30, "score": 58132.4610893501 }, { "content": " \n\n let object_id = self.vec_pop_front_object(&mut args).ok_or_else(|| minierr(\"error: first argument to object_has_variable() must be an object\"))?;\n\n let text = self.vec_pop_front_text(&mut args).ok_or_else(|| minierr(\"error: second argument to object_has_variable() must be a string\"))?;\n\n let text_id = self.get_string_index(&text);\n\n \n\n let object = self.global.objects.get(&object_id).ok_or_else(|| format!(\"error: tried to use non-extant object type {}\", object_id))?;\n\n Ok(Value::Number(bool_floaty(object.variables.contains_key(&text_id))))\n\n }\n\n pub (crate) fn sim_func_object_has_function(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 2\n\n {\n\n return Err(format!(\"error: wrong number of arguments to object_has_function(); expected 2, got {}\", args.len()));\n\n }\n\n \n\n let object_id = self.vec_pop_front_object(&mut args).ok_or_else(|| minierr(\"error: first argument to object_has_function() must be an object\"))?;\n\n let text = self.vec_pop_front_text(&mut args).ok_or_else(|| minierr(\"error: second argument to object_has_function() must be a string\"))?;\n\n let text_id = self.get_string_index(&text);\n\n \n\n let object = self.global.objects.get(&object_id).ok_or_else(|| format!(\"error: tried to use non-extant object type {}\", object_id))?;\n", "file_path": "src/interpreter/bindings.rs", "rank": 31, "score": 58122.037050713385 }, { "content": " {\n\n let mut val = Value::default();\n\n std::mem::swap(&mut self[index], &mut val);\n\n Some(val)\n\n }\n\n else\n\n {\n\n None\n\n }\n\n }\n\n fn extract_num(&mut self, index : usize) -> Result<f64, String>\n\n {\n\n let val = self.extract(index).ok_or_else(|| format!(\"error: wrong number of arguments; expected at least {}\", index+1))?;\n\n match_or_err!(val, Value::Number(num) => num, minierr(\"error: expected a number, got something else\"))\n\n }\n\n fn expect_extract(&mut self, index : usize) -> Result<Value, String>\n\n {\n\n self.extract(index).ok_or_else(|| minierr(\"internal error: error that should be unreachable in expect_extract\"))\n\n }\n\n}\n", "file_path": "src/interpreter/bindings.rs", "rank": 32, "score": 58121.31673925127 }, { "content": " let key = args.expect_extract(0)?;\n\n Ok(match myself.as_ref()\n\n {\n\n Value::Dict(ref dict) => Value::Number(bool_floaty(dict.contains_key(&val_to_hashval(key)?))),\n\n Value::Set (ref set ) => Value::Number(bool_floaty(set .contains (&val_to_hashval(key)?))),\n\n _ => return plainerr(\"error: remove() must be called with an array, dictionary, or set as its argument\")\n\n })\n\n }\n\n pub (crate) fn sim_subfunc_insert(mut myself : ValueLoc, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n match myself.as_mut()?\n\n {\n\n Value::Text(ref mut string) =>\n\n {\n\n if args.len() != 2\n\n {\n\n return Err(format!(\"error: wrong number of arguments to insert() on a string; expected 2, got {}\", args.len()));\n\n }\n\n let key = args.expect_extract(0)?;\n\n if let Value::Text(value) = args.expect_extract(1)?\n", "file_path": "src/interpreter/bindings.rs", "rank": 33, "score": 58121.27884203025 }, { "content": " pub (crate) fn sim_subfunc_typeof_num(myself : ValueLoc, _args : Vec<Value>) -> Result<Value, String>\n\n {\n\n Ok(Value::Number(match myself.as_ref()\n\n {\n\n Value::Null => 0,\n\n Value::Number(_) => 1,\n\n Value::Text(_) => 2,\n\n Value::Array(_) => 3,\n\n Value::Dict(_) => 4,\n\n Value::Set(_) => 5,\n\n Value::Instance(_) => 6,\n\n Value::Object(_) => 7,\n\n Value::Func(_) => 8,\n\n Value::InternalFunc(_) => 9,\n\n Value::Generator(_) => 10,\n\n Value::Custom(_) => 11,\n\n Value::SubFunc(_) => 12,\n\n } as f64))\n\n }\n\n pub (crate) fn sim_subfunc_discriminator(myself : ValueLoc, _args : Vec<Value>) -> Result<Value, String>\n\n {\n\n match myself.as_ref()\n\n {\n\n Value::Custom(val) => Ok(Value::Number(val.discrim as f64)),\n\n _ => Err(\"error: used ->discriminator() on a value that was not a `Custom` value (used as a typed opaque pointer by applications that embed gammakit)\".to_string()),\n\n }\n\n }\n\n}", "file_path": "src/interpreter/bindings.rs", "rank": 34, "score": 58120.97824641162 }, { "content": " _ => plainerr(\"error: push() must be called with an array or string as the first argument\")\n\n }\n\n }\n\n pub (crate) fn sim_subfunc_remove(mut myself : ValueLoc, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to remove(); expected 1, got {}\", args.len()));\n\n }\n\n let key = args.expect_extract(0)?;\n\n match myself.as_mut()?\n\n {\n\n Value::Text(ref mut string) =>\n\n {\n\n let mut chars : Vec<char> = string.chars().collect();\n\n \n\n let index = match_or_err!(key, Value::Number(index) => index.round() as isize, minierr(\"error: tried to remove from a string with a non-number index\"))?;\n\n let index = if index < 0 {chars.len() - (-index as usize)} else {index as usize} as usize;\n\n \n\n let mid = chars.get(index..=index).ok_or_else(|| minierr(\"error: tried to remove from a string at an out-of-range index\"))?.iter().collect::<String>();\n", "file_path": "src/interpreter/bindings.rs", "rank": 35, "score": 58120.83489312479 }, { "content": " {\n\n for arg in args.drain(..)\n\n {\n\n print!(\"{}\", format_val(&arg).ok_or_else(|| minierr(\"error: tried to print unprintable value\"))?);\n\n }\n\n Ok(Value::default())\n\n }\n\n pub (crate) fn sim_func_string(args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to string(); expected 1, got {}\", args.len()));\n\n }\n\n Ok(Value::Text(format_val(&args[0]).ok_or_else(|| minierr(\"error: tried to stringify an unprintable value\"))?))\n\n }\n\n pub (crate) fn sim_func_round(mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n let num = args.extract_num(0)?;\n\n Ok(Value::Number(num.round()))\n\n }\n", "file_path": "src/interpreter/bindings.rs", "rank": 36, "score": 58120.46225301376 }, { "content": " Ok(Value::default())\n\n }\n\n pub (crate) fn sim_func_object_count(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to object_count(); expected 1, got {}\", args.len()));\n\n }\n\n \n\n let object_id = self.vec_pop_front_object(&mut args).ok_or_else(|| minierr(\"error: first argument to object_count() must be an object\"))?;\n\n \n\n let instance_list = self.global.instances_by_type.get(&object_id).ok_or_else(|| format!(\"error: tried to use non-extant object type {}\", object_id))?;\n\n Ok(Value::Number(instance_list.len() as f64))\n\n }\n\n pub (crate) fn sim_func_object_has_variable(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 2\n\n {\n\n return Err(format!(\"error: wrong number of arguments to object_has_variable(); expected 2, got {}\", args.len()));\n\n }\n", "file_path": "src/interpreter/bindings.rs", "rank": 37, "score": 58120.043565217115 }, { "content": " \n\n Ok(Value::default())\n\n }\n\n pub (crate) fn sim_func_instance_has_function(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 2\n\n {\n\n return Err(format!(\"error: wrong number of arguments to instance_has_function(); expected 2, got {}\", args.len()));\n\n }\n\n \n\n let instance_id = self.vec_pop_front_instance(&mut args).ok_or_else(|| minierr(\"error: first argument to instance_has_function() must be an instance\"))?;\n\n let text = self.vec_pop_front_text(&mut args).ok_or_else(|| minierr(\"error: second argument to instance_has_function() must be a string\"))?;\n\n let text_id = self.get_string_index(&text);\n\n \n\n if let Some(inst) = self.global.instances.get(&instance_id)\n\n {\n\n let object = self.global.objects.get(&inst.objtype).ok_or_else(|| format!(\"error: tried to use instance of non-extant object type {}\", inst.objtype))?;\n\n return Ok(Value::Number(bool_floaty(object.functions.contains_key(&text_id))));\n\n }\n\n \n", "file_path": "src/interpreter/bindings.rs", "rank": 38, "score": 58119.69986740487 }, { "content": " })\n\n }\n\n pub (crate) fn sim_subfunc_keys(myself : ValueLoc, args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if !args.is_empty()\n\n {\n\n return Err(format!(\"error: wrong number of arguments to keys(); expected 0, got {}\", args.len()));\n\n }\n\n \n\n Ok(match myself.as_ref()\n\n {\n\n Value::Array(ref array) => Value::Array((0..array.len()).map(|i| Value::Number(i as f64)).collect()),\n\n Value::Dict(ref dict) => Value::Array(dict.iter().map(|(key, _)| hashval_to_val(key.clone())).collect()),\n\n _ => return plainerr(\"error: tried to take length of lengthless type\")\n\n })\n\n }\n\n pub (crate) fn sim_subfunc_slice(myself : ValueLoc, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 2\n\n {\n", "file_path": "src/interpreter/bindings.rs", "rank": 39, "score": 58118.97545114134 }, { "content": " {\n\n if !args.is_empty()\n\n {\n\n return Err(format!(\"error: wrong number of arguments to pop(); expected 0, got {}\", args.len()));\n\n }\n\n match myself.as_mut()?\n\n {\n\n Value::Array(ref mut array) =>\n\n {\n\n let ret = array.pop().ok_or_else(|| minierr(\"error: tried to call pop() on an empty array\"))?;\n\n Ok(ret)\n\n }\n\n _ => plainerr(\"error: pop() must be called with an array as the first argument\")\n\n }\n\n }\n\n pub (crate) fn sim_subfunc_replace_char(mut myself : ValueLoc, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 2\n\n {\n\n return Err(format!(\"error: wrong number of arguments to replace_char(); expected 2, got {}\", args.len()));\n", "file_path": "src/interpreter/bindings.rs", "rank": 40, "score": 58118.89815842667 }, { "content": " pub (crate) fn sim_func_ceil(mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n let num = args.extract_num(0)?;\n\n Ok(Value::Number(num.ceil()))\n\n }\n\n pub (crate) fn sim_func_floor(mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n let num = args.extract_num(0)?;\n\n Ok(Value::Number(num.floor()))\n\n }\n\n pub (crate) fn sim_func_sqrt(mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n let num = args.extract_num(0)?;\n\n Ok(Value::Number(num.sqrt()))\n\n }\n\n pub (crate) fn sim_func_pow(mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n let num1 = args.extract_num(0)?;\n\n let num2 = args.extract_num(1)?;\n\n Ok(Value::Number(num1.powf(num2)))\n", "file_path": "src/interpreter/bindings.rs", "rank": 41, "score": 58117.33383558617 }, { "content": " return Ok(Value::Object(inst.objtype));\n\n }\n\n \n\n Ok(Value::default())\n\n }\n\n pub (crate) fn sim_func_instance_has_variable(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 2\n\n {\n\n return Err(format!(\"error: wrong number of arguments to instance_has_variable(); expected 2, got {}\", args.len()));\n\n }\n\n \n\n let instance_id = self.vec_pop_front_instance(&mut args).ok_or_else(|| minierr(\"error: first argument to instance_has_variable() must be an instance\"))?;\n\n let text = self.vec_pop_front_text(&mut args).ok_or_else(|| minierr(\"error: second argument to instance_has_variable() must be a string\"))?;\n\n let text_id = self.get_string_index(&text);\n\n \n\n if let Some(inst) = self.global.instances.get(&instance_id)\n\n {\n\n return Ok(Value::Number(bool_floaty(inst.variables.contains_key(&text_id))));\n\n }\n", "file_path": "src/interpreter/bindings.rs", "rank": 42, "score": 58117.2912669526 }, { "content": " }\n\n pub (crate) fn sim_subfunc_typeof_str(myself : ValueLoc, _args : Vec<Value>) -> Result<Value, String>\n\n {\n\n Ok(Value::Text(match myself.as_ref()\n\n {\n\n Value::Null => \"null\",\n\n Value::Number(_) => \"number\",\n\n Value::Text(_) => \"string\",\n\n Value::Array(_) => \"array\",\n\n Value::Dict(_) => \"dict\",\n\n Value::Set(_) => \"set\",\n\n Value::Instance(_) => \"instance\",\n\n Value::Object(_) => \"object\",\n\n Value::Func(_) => \"function\",\n\n Value::InternalFunc(_) => \"internal function\",\n\n Value::Generator(_) => \"generator state\",\n\n Value::Custom(_) => \"custom\",\n\n Value::SubFunc(_) => \"arrow function\",\n\n }.to_string()))\n\n }\n", "file_path": "src/interpreter/bindings.rs", "rank": 43, "score": 58116.809025427705 }, { "content": " }\n\n pub (crate) fn sim_func_log(mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n let num1 = args.extract_num(0)?;\n\n let num2 = args.extract_num(1)?;\n\n Ok(Value::Number(num1.log(num2)))\n\n }\n\n pub (crate) fn sim_func_ln(mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n let num = args.extract_num(0)?;\n\n Ok(Value::Number(num.ln()))\n\n }\n\n pub (crate) fn sim_func_instance_create(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to instance_create(); expected 1, got {}\", args.len()));\n\n }\n\n let create_index = self.get_string_index(&\"create\".to_string());\n\n let id_index = self.get_string_index(&\"id\".to_string());\n", "file_path": "src/interpreter/bindings.rs", "rank": 44, "score": 58116.78269251574 }, { "content": " else\n\n {\n\n plainerr(\"error: tried to remove non-extant key from dict\")\n\n }\n\n }\n\n Value::Set(ref mut set) =>\n\n {\n\n if set.remove(&val_to_hashval(key.clone())?)\n\n {\n\n Ok(Value::default())\n\n }\n\n else\n\n {\n\n plainerr(\"error: tried to remove non-extant value from set\")\n\n }\n\n }\n\n _ => plainerr(\"error: remove() must be called with an array, dictionary, or set as its argument\")\n\n }\n\n }\n\n pub (crate) fn sim_subfunc_pop(mut myself : ValueLoc, args : Vec<Value>) -> Result<Value, String>\n", "file_path": "src/interpreter/bindings.rs", "rank": 45, "score": 58116.6953488553 }, { "content": " let value = args.expect_extract(1)?;\n\n dict.insert(val_to_hashval(key)?, value);\n\n Ok(Value::default())\n\n }\n\n Value::Set(ref mut set) =>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to insert() on a set; expected 1, got {}\", args.len()));\n\n }\n\n let key = args.expect_extract(0)?;\n\n set.insert(val_to_hashval(key)?);\n\n Ok(Value::default())\n\n }\n\n _ => plainerr(\"error: insert() must be called with an array, dictionary, set, or string as the first argument\")\n\n }\n\n }\n\n pub (crate) fn sim_subfunc_push(mut myself : ValueLoc, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n", "file_path": "src/interpreter/bindings.rs", "rank": 46, "score": 58116.096777428385 }, { "content": " {\n\n // FIXME use codepoint indexes for this\n\n let chars : Vec<char> = string.chars().collect();\n\n \n\n let index = match_or_err!(key, Value::Number(index) => index.round() as isize, minierr(\"error: tried to insert into a string with a non-number index\"))?;\n\n let index = if index < 0 {chars.len() - (-index as usize)} else {index as usize} as usize;\n\n \n\n let left = chars.get(0..index).ok_or_else(|| minierr(\"error: tried to insert into a string at an out-of-range index\"))?.iter().collect::<String>();\n\n let right = chars.get(index..chars.len()).ok_or_else(|| minierr(\"error: tried to insert into a string at an out-of-range index\"))?.iter().collect::<String>();\n\n \n\n let newstr = format!(\"{}{}{}\", left, value, right);\n\n *string = newstr;\n\n \n\n return Ok(Value::default());\n\n }\n\n plainerr(\"error: tried to insert a non-string into a string with insert()\")\n\n }\n\n Value::Array(ref mut array) =>\n\n {\n\n if args.len() != 2\n", "file_path": "src/interpreter/bindings.rs", "rank": 47, "score": 58115.8401288224 }, { "content": " while self.global.instances.contains_key(&self.global.instance_id)\n\n {\n\n self.global.instance_id += 1;\n\n }\n\n \n\n Ok(Value::Instance(instance_id))\n\n }\n\n pub (crate) fn sim_func_instance_exists(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to instance_create(); expected 1, got {}\", args.len()));\n\n }\n\n \n\n let instance_id = self.vec_pop_front_instance(&mut args).ok_or_else(|| minierr(\"error: first argument to instance_exists() must be an instance\"))?;\n\n \n\n Ok(Value::Number(bool_floaty(self.global.instances.contains_key(&instance_id))))\n\n }\n\n pub (crate) fn sim_func_instance_kill(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n", "file_path": "src/interpreter/bindings.rs", "rank": 48, "score": 58115.83170986348 }, { "content": " Ok(Value::Number(bool_floaty(object.functions.contains_key(&text_id))))\n\n }\n\n pub (crate) fn sim_func_parse_text(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to parse_text(); expected 1, got {}\", args.len()));\n\n }\n\n \n\n let text = self.vec_pop_front_text(&mut args).ok_or_else(|| minierr(\"error: first argument to parse_text() must be a string\"))?;\n\n let parser = &mut self.global.parser;\n\n \n\n let program_lines : Vec<String> = text.lines().map(|x| x.to_string()).collect();\n\n let tokens = parser.tokenize(&program_lines, true)?;\n\n \n\n let ast = parser.parse_program(&tokens, &program_lines, true)?.ok_or_else(|| minierr(\"error: string failed to parse\"))?;\n\n \n\n Ok(ast_to_dict(&ast))\n\n }\n\n pub (crate) fn sim_func_parse_text_with_grammar(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n", "file_path": "src/interpreter/bindings.rs", "rank": 49, "score": 58115.49213189613 }, { "content": " \n\n insert_arrow!(\"replace_char\" , sim_subfunc_replace_char );\n\n \n\n insert_arrow!(\"typeof_str\" , sim_subfunc_typeof_str );\n\n insert_arrow!(\"typeof_num\" , sim_subfunc_typeof_num );\n\n insert_arrow!(\"discriminator\" , sim_subfunc_discriminator );\n\n }\n\n pub (crate) fn get_binding(&self, name : usize) -> Option<Rc<RefCell<Binding>>>\n\n {\n\n match_or_none!(self.global.bindings.get(&name), Some(f) => Rc::clone(f))\n\n }\n\n pub (crate) fn get_trivial_binding(&self, name : usize) -> Option<TrivialBinding>\n\n {\n\n self.global.trivial_bindings.get(&name).copied()\n\n }\n\n pub (crate) fn get_simple_binding(&self, name : usize) -> Option<Rc<RefCell<SimpleBinding>>>\n\n {\n\n match_or_none!(self.global.simple_bindings.get(&name), Some(f) => Rc::clone(f))\n\n }\n\n pub (crate) fn get_trivial_simple_binding(&self, name : usize) -> Option<TrivialSimpleBinding>\n", "file_path": "src/interpreter/bindings.rs", "rank": 50, "score": 58115.448139408785 }, { "content": " }\n\n let indexnum = match_or_err!(args.expect_extract(0)?, Value::Number(indexnum) => indexnum, minierr(\"error: argument 1 to replace_char must be a number\"))?.round() as usize;\n\n let insert = match_or_err!(args.expect_extract(1)?, Value::Text(text) => text, minierr(\"error: argument 2 to replace_char must be text\"))?;\n\n \n\n match myself.as_mut()?\n\n {\n\n Value::Text(ref mut string) =>\n\n {\n\n if let Some((i, c)) = string.char_indices().nth(indexnum)\n\n {\n\n string.replace_range(i..i+c.len_utf8(), &insert);\n\n Ok(Value::default())\n\n }\n\n else\n\n {\n\n plainerr(\"error: tried to access past the end of a string with replace_char\")\n\n }\n\n }\n\n _ => plainerr(\"error: replace_char() must be called on a string\")\n\n }\n", "file_path": "src/interpreter/bindings.rs", "rank": 51, "score": 58115.438597054024 }, { "content": " {\n\n return Err(format!(\"error: wrong number of arguments to push(); expected 1, got {}\", args.len()));\n\n }\n\n let value = args.expect_extract(0)?;\n\n match myself.as_mut()?\n\n {\n\n Value::Text(ref mut string) =>\n\n {\n\n if let Value::Text(value) = value\n\n {\n\n *string = format!(\"{}{}\", string, value);\n\n return Ok(Value::default());\n\n }\n\n plainerr(\"error: tried to concatenate a non-string to a string with push()\")\n\n }\n\n Value::Array(ref mut array) =>\n\n {\n\n array.push(value);\n\n Ok(Value::default())\n\n }\n", "file_path": "src/interpreter/bindings.rs", "rank": 52, "score": 58114.96859190583 }, { "content": " pub fn insert_arrow_binding(&mut self, funcname : String, func : Rc<RefCell<ArrowBinding>>)\n\n {\n\n let index = self.get_string_index(&funcname);\n\n self.global.arrow_bindings.insert(index, func);\n\n }\n\n pub fn insert_trivial_arrow_binding(&mut self, funcname : String, func : TrivialArrowBinding)\n\n {\n\n let index = self.get_string_index(&funcname);\n\n self.global.trivial_arrow_bindings.insert(index, func);\n\n }\n\n /// Inserts or reinserts the default bindings. These SHOULD be safe, but if you're paranoid or you're making a very restrictive implementation of gammakit, you can feel free not to call this after initializing the interpreter.\n\n pub fn insert_default_bindings(&mut self)\n\n {\n\n macro_rules! insert { ( $x:expr, $y:ident ) => { self.insert_trivial_binding($x.to_string(), Interpreter::$y); } }\n\n \n\n insert!(\"parse_text\" , sim_func_parse_text );\n\n insert!(\"parse_text_with_grammar\", sim_func_parse_text_with_grammar );\n\n \n\n insert!(\"compile_text\" , sim_func_compile_text );\n\n insert!(\"compile_ast\" , sim_func_compile_ast );\n", "file_path": "src/interpreter/bindings.rs", "rank": 53, "score": 58114.96212046055 }, { "content": " forcecontext : 0,\n\n generator : false,\n\n }\n\n ) )\n\n }\n\n \n\n pub (crate) fn sim_subfunc_len(myself : ValueLoc, args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if !args.is_empty()\n\n {\n\n return Err(format!(\"error: wrong number of arguments to len(); expected 0, got {}\", args.len()));\n\n }\n\n \n\n Ok(match myself.as_ref()\n\n {\n\n Value::Text(ref string) => Value::Number(string.chars().count() as f64),\n\n Value::Array(ref array) => Value::Number(array.len() as f64),\n\n Value::Dict(ref dict) => Value::Number(dict.keys().len() as f64),\n\n Value::Set(ref set) => Value::Number(set.len() as f64),\n\n _ => return plainerr(\"error: tried to take length of lengthless type\")\n", "file_path": "src/interpreter/bindings.rs", "rank": 54, "score": 58113.65580532151 }, { "content": " return Err(format!(\"error: wrong number of arguments to slice(); expected 2, got {}\", args.len()));\n\n }\n\n let start = args.expect_extract(0)?;\n\n let end = args.expect_extract(1)?;\n\n let start = match_or_err!(start, Value::Number(start) => start.round() as i64, minierr(\"error: start and end indexes passed to slice() must be numbers\"))?;\n\n let end = match_or_err!(end, Value::Number(end) => end.round() as i64, minierr(\"error: start and end indexes passed to slice() must be numbers\"))?;\n\n \n\n Ok(match myself.as_ref()\n\n {\n\n Value::Text(ref string) => slice_any(&string.chars().collect::<Vec<char>>(), start, end).map(|array| Value::Text(array.iter().cloned().collect())).ok_or_else(|| minierr(\"error: slice() on string went out of range\"))?,\n\n Value::Array(ref array) => slice_any(&array, start, end).map(|array| Value::Array(array.to_vec())).ok_or_else(|| minierr(\"error: slice() on array went out of range\"))?,\n\n _ => return plainerr(\"error: tried to slice lengthless type\")\n\n })\n\n }\n\n pub (crate) fn sim_subfunc_contains(myself : ValueLoc, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to contains(); expected 1, got {}\", args.len()));\n\n }\n", "file_path": "src/interpreter/bindings.rs", "rank": 55, "score": 58113.49938634782 }, { "content": " chars.drain(index..=index);\n\n *string = chars.iter().collect();\n\n Ok(Value::Text(mid))\n\n }\n\n Value::Array(ref mut array) =>\n\n {\n\n let index = match_or_err!(key, Value::Number(index) => index.round() as isize, minierr(\"error: tried to remove from an array with a non-number index\"))?;\n\n if index < 0 || index as usize > array.len()\n\n {\n\n return plainerr(\"error: tried to remove from an array at an out-of-range index\");\n\n }\n\n let removed = array.remove(index as usize);\n\n Ok(removed)\n\n }\n\n Value::Dict(ref mut dict) =>\n\n {\n\n if let Some(removed) = dict.remove(&val_to_hashval(key)?)\n\n {\n\n Ok(removed)\n\n }\n", "file_path": "src/interpreter/bindings.rs", "rank": 56, "score": 58113.42017232631 }, { "content": " \n\n Value::Dict(Box::new(astdict))\n\n}\n\n\n\npub (crate) fn dict_to_ast(dict : &HashMap<HashableValue, Value>) -> Result<ASTNode, String>\n\n{\n\n let mut ast = dummy_astnode();\n\n \n\n macro_rules! get { ( $as:ident, $dict:expr, $str:expr ) =>\n\n {\n\n match $dict.get(&HashableValue::Text($str.to_string()))\n\n {\n\n Some(Value::$as(this)) => Ok(this),\n\n Some(_) => Err(format!(\"error: tried to turn dict into ast, but dict's {} field was of the wrong type\", $str)),\n\n _ => Err(format!(\"error: tried to turn dict into ast, but dict lacked {} field\", $str))\n\n }\n\n } }\n\n \n\n ast.text = get!(Text, dict, \"text\")?.clone();\n\n ast.line = get!(Number, dict, \"line\")?.round() as usize;\n", "file_path": "src/interpreter/bindings.rs", "rank": 57, "score": 58113.36458731775 }, { "content": " forcecontext : 0,\n\n generator : false,\n\n }\n\n ) )\n\n }\n\n pub (crate) fn sim_func_compile_ast_generator(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to compile_ast_generator(); expected 1, got {}\", args.len()));\n\n }\n\n \n\n let dict = self.vec_pop_front_dict(&mut args).ok_or_else(|| minierr(\"error: first argument to compile_ast_generator() must be a dictionary\"))?;\n\n let ast = dict_to_ast(&dict)?;\n\n let code = compile_bytecode(&ast, &mut self.global)?;\n\n \n\n // endaddr at the start because Rc::new() moves `code`\n\n Ok\n\n ( Value::new_funcval\n\n ( None,\n", "file_path": "src/interpreter/bindings.rs", "rank": 58, "score": 58113.12308405193 }, { "content": " }\n\n /// Insert a normal binding that does not need access to the interpreter.\n\n pub fn insert_simple_binding(&mut self, funcname : String, func : Rc<RefCell<SimpleBinding>>)\n\n {\n\n let index = self.get_string_index(&funcname);\n\n self.global.bindings.remove(&index);\n\n self.global.trivial_bindings.remove(&index);\n\n self.global.trivial_simple_bindings.remove(&index);\n\n self.global.simple_bindings.insert(index, func);\n\n }\n\n /// Insert a normal binding that does not need access to the interpreter or indeed any other external state.\n\n pub fn insert_trivial_simple_binding(&mut self, funcname : String, func : TrivialSimpleBinding)\n\n {\n\n let index = self.get_string_index(&funcname);\n\n self.global.bindings.remove(&index);\n\n self.global.trivial_bindings.remove(&index);\n\n self.global.simple_bindings.remove(&index);\n\n self.global.trivial_simple_bindings.insert(index, func);\n\n }\n\n /// Insert an associated function (\"arrow\" function) binding.\n", "file_path": "src/interpreter/bindings.rs", "rank": 59, "score": 58112.28509046886 }, { "content": " {\n\n return Err(format!(\"error: wrong number of arguments to insert() on an array; expected 2, got {}\", args.len()));\n\n }\n\n let key = args.expect_extract(0)?;\n\n let value = args.expect_extract(1)?;\n\n let index = match_or_err!(key, Value::Number(index) => index.round() as isize, minierr(\"error: tried to insert into an array with a non-number index\"))?;\n\n if index < 0 || index as usize > array.len()\n\n {\n\n return plainerr(\"error: tried to insert into an array at an out-of-range index\");\n\n }\n\n array.insert(index as usize, value);\n\n Ok(Value::default())\n\n }\n\n Value::Dict(ref mut dict) =>\n\n {\n\n if args.len() != 2\n\n {\n\n return Err(format!(\"error: wrong number of arguments to insert() on a dict; expected 2, got {}\", args.len()));\n\n }\n\n let key = args.expect_extract(0)?;\n", "file_path": "src/interpreter/bindings.rs", "rank": 60, "score": 58112.07430223389 }, { "content": " FuncSpec\n\n { endaddr : code.len(), // must be before code : Rc::new(code)\n\n argcount : 0,\n\n code,\n\n startaddr : 0,\n\n fromobj : false,\n\n parentobj : 0,\n\n forcecontext : 0,\n\n generator : true,\n\n }\n\n ) )\n\n }\n\n\n\n pub (crate) fn sim_func_compile_text(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to compile_text(); expected 1, got {}\", args.len()));\n\n }\n\n let text = self.vec_pop_front_text(&mut args).ok_or_else(|| minierr(\"error: first argument to compile_text() must be a string\"))?;\n", "file_path": "src/interpreter/bindings.rs", "rank": 61, "score": 58111.778301846476 }, { "content": " {\n\n if let Some(ref mut instance_list) = self.global.instances_by_type.get_mut(&inst.objtype)\n\n {\n\n instance_list.remove(&instance_id);\n\n }\n\n }\n\n \n\n Ok(Value::default())\n\n }\n\n pub (crate) fn sim_func_instance_object(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n\n if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to instance_object(); expected 1, got {}\", args.len()));\n\n }\n\n \n\n let instance_id = self.vec_pop_front_instance(&mut args).ok_or_else(|| minierr(\"error: first argument to instance_kill() must be an instance\"))?;\n\n \n\n if let Some(inst) = self.global.instances.get(&instance_id)\n\n {\n", "file_path": "src/interpreter/bindings.rs", "rank": 62, "score": 58111.31616470902 }, { "content": " {\n\n if args.len() != 2\n\n {\n\n return Err(format!(\"error: wrong number of arguments to parse_text_with_grammar(); expected 2, got {}\", args.len()));\n\n }\n\n \n\n let text = self.vec_pop_front_text(&mut args).ok_or_else(|| minierr(\"error: first argument to parse_text_with_grammar() must be a string\"))?;\n\n let grammar = self.vec_pop_front_text(&mut args).ok_or_else(|| minierr(\"error: second argument to parse_text_with_grammar() must be a string\"))?;\n\n let mut parser = Parser::new_from_grammar(&grammar)?;\n\n \n\n let program_lines : Vec<String> = text.lines().map(|x| x.to_string()).collect();\n\n let tokens = parser.tokenize(&program_lines, true)?;\n\n \n\n let ast = parser.parse_program(&tokens, &program_lines, true)?.ok_or_else(|| minierr(\"error: string failed to parse\"))?;\n\n \n\n Ok(ast_to_dict(&ast))\n\n }\n\n\n\n pub (crate) fn sim_func_compile_ast(&mut self, mut args : Vec<Value>) -> Result<Value, String>\n\n {\n", "file_path": "src/interpreter/bindings.rs", "rank": 63, "score": 58110.89779678197 }, { "content": "#![allow(clippy::type_complexity)]\n\n#![allow(clippy::cast_lossless)]\n\n\n\nuse super::*;\n\nuse super::variableaccess::ValueLoc;\n\nuse super::types::ops::{float_booly, bool_floaty};\n\n\n", "file_path": "src/interpreter/bindings.rs", "rank": 64, "score": 58110.306969466685 }, { "content": " ast.position = get!(Number, dict, \"position\")?.round() as usize;\n\n ast.isparent = float_booly(*get!(Number, dict, \"isparent\")?);\n\n \n\n // ast.children from dummy_astnode() starts out extant but empty\n\n \n\n for child in get!(Array, dict, \"children\")?.iter()\n\n {\n\n let subnode = match_or_err!(child, Value::Dict(dict) => dict, minierr(\"error: values in list of children in ast node must be dictionaries that are themselves ast nodes\"))?;\n\n ast.children.push(dict_to_ast(subnode)?);\n\n }\n\n \n\n // FIXME make this not use get! or something\n\n ast.precedence = get!(Number, dict, \"precedence\").map(|x| x.round() as u64).ok();\n\n \n\n Ok(ast)\n\n}\n\n\n\nimpl Interpreter\n\n{\n\n /// Insert a normal binding that needs access to the interpreter.\n", "file_path": "src/interpreter/bindings.rs", "rank": 65, "score": 58110.23956350544 }, { "content": " ///\n\n /// The reference-counter wrapping is required to pass functions.\n\n ///\n\n /// The reference cell wrapping is required to support lambdas that have closure over mutable references, because that closure may indirectly include the interpreter itself. See magmakit for examples.\n\n pub fn insert_binding(&mut self, funcname : String, func : Rc<RefCell<Binding>>)\n\n {\n\n let index = self.get_string_index(&funcname);\n\n self.global.trivial_bindings.remove(&index);\n\n self.global.simple_bindings.remove(&index);\n\n self.global.trivial_simple_bindings.remove(&index);\n\n self.global.bindings.insert(index, func);\n\n }\n\n /// Insert a normal binding that does not need access to the interpreter or indeed any other external state.\n\n pub fn insert_trivial_binding(&mut self, funcname : String, func : TrivialBinding)\n\n {\n\n let index = self.get_string_index(&funcname);\n\n self.global.bindings.remove(&index);\n\n self.global.simple_bindings.remove(&index);\n\n self.global.trivial_simple_bindings.remove(&index);\n\n self.global.trivial_bindings.insert(index, func);\n", "file_path": "src/interpreter/bindings.rs", "rank": 66, "score": 58110.04178394237 }, { "content": " if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to instance_create(); expected 1, got {}\", args.len()));\n\n }\n\n \n\n let destroy_index = self.get_string_index(&\"destroy\".to_string());\n\n let instance_id = self.vec_pop_front_instance(&mut args).ok_or_else(|| minierr(\"error: first argument to instance_kill() must be an instance\"))?;\n\n \n\n if let Some(inst) = self.global.instances.get(&instance_id)\n\n {\n\n let object = self.global.objects.get(&inst.objtype).ok_or_else(|| format!(\"error: tried to kill instance of non-extant object type {}\", inst.objtype))?;\n\n if let Some(function) = object.functions.get(&destroy_index)\n\n {\n\n let mut mydata = function.clone();\n\n mydata.forcecontext = instance_id;\n\n let pseudo_funcvar = Box::new(FuncVal{predefined : None, userdefdata : mydata});\n\n self.call_function(pseudo_funcvar, Vec::new(), false)?;\n\n }\n\n }\n\n if let Some(inst) = self.global.instances.remove(&instance_id)\n", "file_path": "src/interpreter/bindings.rs", "rank": 67, "score": 58109.658463744345 }, { "content": " \n\n let object_id = self.vec_pop_front_object(&mut args).ok_or_else(|| minierr(\"error: first argument to instance_create() must be an object\"))?;\n\n \n\n let instance_id = self.global.instance_id as usize;\n\n if self.global.instances.len() == !0usize\n\n {\n\n return plainerr(\"error: ran out of instance id space\");\n\n }\n\n let object = self.global.objects.get(&object_id).ok_or_else(|| format!(\"error: tried to create instance of non-extant object type {}\", object_id))?;\n\n \n\n let mut variables = BTreeMap::new();\n\n variables.insert(id_index, Value::Instance(instance_id));\n\n for var in object.variables.keys() // FIXME make this stuff use exact index somehow\n\n {\n\n if *var != id_index\n\n {\n\n variables.insert(*var, Value::default());\n\n }\n\n }\n\n self.global.instances.insert(instance_id, Instance { objtype : object_id, ident : instance_id, variables });\n", "file_path": "src/interpreter/bindings.rs", "rank": 68, "score": 58108.108068889094 }, { "content": " insert!(\"compile_ast_generator\" , sim_func_compile_ast_generator );\n\n \n\n insert!(\"instance_create\" , sim_func_instance_create );\n\n insert!(\"instance_exists\" , sim_func_instance_exists );\n\n insert!(\"instance_kill\" , sim_func_instance_kill );\n\n insert!(\"instance_object\" , sim_func_instance_object );\n\n insert!(\"instance_has_variable\" , sim_func_instance_has_variable );\n\n insert!(\"instance_has_function\" , sim_func_instance_has_function );\n\n \n\n insert!(\"object_count\" , sim_func_object_count );\n\n insert!(\"object_has_variable\" , sim_func_object_has_variable );\n\n insert!(\"object_has_function\" , sim_func_object_has_function );\n\n \n\n macro_rules! insert_simple { ( $x:expr, $y:ident ) => { self.insert_trivial_simple_binding($x.to_string(), Interpreter::$y); } }\n\n \n\n insert_simple!(\"print\" , sim_func_print );\n\n insert_simple!(\"printraw\" , sim_func_printraw );\n\n insert_simple!(\"string\" , sim_func_string );\n\n \n\n insert_simple!(\"round\" , sim_func_round );\n", "file_path": "src/interpreter/bindings.rs", "rank": 69, "score": 58108.0238449229 }, { "content": "\n\npub (crate) fn ast_to_dict(ast : &ASTNode) -> Value\n\n{\n\n let mut astdict = HashMap::<HashableValue, Value>::new();\n\n \n\n macro_rules! to_key { ( $str:expr ) => { HashableValue::Text($str.to_string()) } }\n\n \n\n astdict.insert(to_key!(\"text\"), Value::Text(ast.text.clone()));\n\n astdict.insert(to_key!(\"line\"), Value::Number(ast.line as f64));\n\n astdict.insert(to_key!(\"position\"), Value::Number(ast.line as f64));\n\n astdict.insert(to_key!(\"isparent\"), Value::Number(bool_floaty(ast.isparent)));\n\n \n\n let children : Vec<Value> = ast.children.iter().map(|child| ast_to_dict(child)).collect();\n\n \n\n astdict.insert(to_key!(\"children\"), Value::Array(children));\n\n \n\n if let Some(precedence) = ast.precedence\n\n {\n\n astdict.insert(to_key!(\"precedence\"), Value::Number(precedence as f64));\n\n }\n", "file_path": "src/interpreter/bindings.rs", "rank": 70, "score": 58105.60165312194 }, { "content": " if args.len() != 1\n\n {\n\n return Err(format!(\"error: wrong number of arguments to compile_ast(); expected 1, got {}\", args.len()));\n\n }\n\n \n\n let dict = self.vec_pop_front_dict(&mut args).ok_or_else(|| minierr(\"error: first argument to compile_ast() must be a dictionary\"))?;\n\n let ast = dict_to_ast(&dict)?;\n\n let code = compile_bytecode(&ast, &mut self.global)?;\n\n \n\n // endaddr at the start because Rc::new() moves `code`\n\n Ok\n\n ( Value::new_funcval\n\n ( None,\n\n FuncSpec\n\n { endaddr : code.len(), // must be before code : Rc::new(code)\n\n argcount : 0,\n\n code,\n\n startaddr : 0,\n\n fromobj : false,\n\n parentobj : 0,\n", "file_path": "src/interpreter/bindings.rs", "rank": 71, "score": 58104.972039709144 }, { "content": " \n\n let program_lines : Vec<String> = text.lines().map(|x| x.to_string()).collect();\n\n let parser = &mut self.global.parser;\n\n \n\n let tokens = parser.tokenize(&program_lines, true)?;\n\n let ast = parser.parse_program(&tokens, &program_lines, true)?.ok_or_else(|| minierr(\"error: string failed to parse\"))?;\n\n \n\n let code = compile_bytecode(&ast, &mut self.global)?;\n\n \n\n // endaddr at the start because Rc::new() moves `code`\n\n Ok\n\n ( Value::new_funcval\n\n ( None,\n\n FuncSpec\n\n { endaddr : code.len(), // must be before code : Rc::new(code)\n\n argcount : 0,\n\n code,\n\n startaddr : 0,\n\n fromobj : false,\n\n parentobj : 0,\n", "file_path": "src/interpreter/bindings.rs", "rank": 72, "score": 58104.679536403346 }, { "content": " \n\n if let Some(ref mut instance_list) = self.global.instances_by_type.get_mut(&object_id)\n\n {\n\n instance_list.insert(instance_id);\n\n }\n\n else\n\n {\n\n let mut instance_list = BTreeSet::new();\n\n instance_list.insert(instance_id);\n\n self.global.instances_by_type.insert(object_id, instance_list);\n\n }\n\n \n\n if let Some(function) = object.functions.get(&create_index)\n\n {\n\n let mut mydata = function.clone();\n\n mydata.forcecontext = instance_id;\n\n let pseudo_funcvar = Box::new(FuncVal{predefined : None, userdefdata : mydata});\n\n self.call_function(pseudo_funcvar, Vec::new(), false)?;\n\n }\n\n \n", "file_path": "src/interpreter/bindings.rs", "rank": 73, "score": 58104.606474651264 }, { "content": " insert_simple!(\"floor\" , sim_func_floor );\n\n insert_simple!(\"ceil\" , sim_func_ceil );\n\n \n\n insert_simple!(\"sqrt\" , sim_func_sqrt );\n\n insert_simple!(\"pow\" , sim_func_pow );\n\n insert_simple!(\"log\" , sim_func_log );\n\n insert_simple!(\"ln\" , sim_func_ln );\n\n \n\n macro_rules! insert_arrow { ( $x:expr, $y:ident ) => { self.insert_trivial_arrow_binding($x.to_string(), Interpreter::$y); } }\n\n \n\n insert_arrow!(\"len\" , sim_subfunc_len );\n\n insert_arrow!(\"keys\" , sim_subfunc_keys );\n\n insert_arrow!(\"slice\" , sim_subfunc_slice );\n\n insert_arrow!(\"contains\" , sim_subfunc_contains );\n\n \n\n insert_arrow!(\"insert\" , sim_subfunc_insert );\n\n insert_arrow!(\"remove\" , sim_subfunc_remove );\n\n \n\n insert_arrow!(\"push\" , sim_subfunc_push );\n\n insert_arrow!(\"pop\" , sim_subfunc_pop );\n", "file_path": "src/interpreter/bindings.rs", "rank": 74, "score": 58104.46766169461 }, { "content": "\n\nmacro_rules! match_or_none { ( $expr:expr, $as:pat => $ok:expr ) =>\n\n{\n\n match $expr\n\n {\n\n $as => Some($ok),\n\n _ => None\n\n }\n\n} }\n", "file_path": "src/matches.rs", "rank": 75, "score": 33468.06656293666 }, { "content": "// This is a braindead version of the matches! macro in SimonSapin(?)'s rust-std-candidates repository: https://github.com/SimonSapin/rust-std-candidates#the-matches-macro\n\nmacro_rules! matches { ( $x:expr , $( $p:pat )|+ ) =>\n\n{\n\n match $x\n\n {\n\n $($p)|+ => true,\n\n _ => false\n\n }\n\n} }\n\n\n\n\n\nmacro_rules! match_or_err { ( $expr:expr, $as:pat => $ok:expr, $err:expr ) =>\n\n{\n\n match $expr\n\n {\n\n $as => Ok($ok),\n\n _ => Err($err)\n\n }\n\n} }\n\n\n", "file_path": "src/matches.rs", "rank": 76, "score": 33463.82886285447 }, { "content": " {\n\n let mut ret : [u8; 8] = [0,0,0,0,0,0,0,0];\n\n for (i, c) in text.bytes().enumerate()\n\n {\n\n ret[i] = c;\n\n }\n\n return MiniStr::Short(ret);\n\n }\n\n MiniStr::Long(text.to_string())\n\n }\n\n #[allow(clippy::wrong_self_convention)]\n\n pub (crate) fn into_string(self) -> String\n\n {\n\n match self\n\n {\n\n MiniStr::Short(bytes) => std::str::from_utf8(trim_at_null(&bytes)).map(|x| x.to_string()).unwrap_or_else(|_| \"<err>\".to_string()),\n\n MiniStr::Long(string) => string\n\n }\n\n }\n\n}\n", "file_path": "src/strings.rs", "rank": 77, "score": 33174.71865355437 }, { "content": " 't' => ret.push('\\t'),\n\n '\"' => ret.push('\"'),\n\n _ => ret.extend(&[c, c2])\n\n }\n\n }\n\n }\n\n ret\n\n}\n\n\n\npub (crate) fn escape(text: &str) -> String\n\n{\n\n let mut ret = String::with_capacity(text.len());\n\n let mut chars : Vec<_> = text.chars().rev().collect();\n\n while let Some(c) = chars.pop()\n\n {\n\n match c\n\n {\n\n '\\\\' => ret.extend(&['\\\\', '\\\\']),\n\n '\\n' => ret.extend(&['\\\\', 'n']),\n\n '\\r' => ret.extend(&['\\\\', 'r']),\n\n '\\t' => ret.extend(&['\\\\', 't']),\n\n '\\\"' => ret.extend(&['\\\\', '\"']),\n\n _ => ret.push(c)\n\n }\n\n }\n\n ret\n\n}\n\n\n", "file_path": "src/strings.rs", "rank": 78, "score": 33173.23892615882 }, { "content": " slice_any_to_end(&text.chars().collect::<Vec<char>>(), start).map(|chars| chars.iter().collect()).unwrap_or_else(|| \"\".to_string())\n\n}\n\n\n\npub (crate) fn unescape(text: &str) -> String\n\n{\n\n let mut ret = String::with_capacity(text.len());\n\n let mut chars : Vec<_> = text.chars().rev().collect();\n\n while let Some(c) = chars.pop()\n\n {\n\n if c != '\\\\'\n\n {\n\n ret.push(c);\n\n }\n\n else if let Some(c2) = chars.pop()\n\n {\n\n match c2\n\n {\n\n '\\\\' => ret.push(c),\n\n 'n' => ret.push('\\n'),\n\n 'r' => ret.push('\\r'),\n", "file_path": "src/strings.rs", "rank": 79, "score": 33172.441849768446 }, { "content": "pub (crate) fn slice_any<T>(collection : &[T], start : i64, end : i64) -> Option<&[T]>\n\n{\n\n let u_start = if start < 0 {collection.len() - (-start as usize)} else {start as usize};\n\n let u_end = if end < 0 {collection.len() - (-end as usize)} else {end as usize};\n\n \n\n collection.get(u_start..u_end)\n\n}\n\npub (crate) fn slice(text : &str, start : i64, end : i64) -> String\n\n{\n\n slice_any(&text.chars().collect::<Vec<char>>(), start, end).map(|chars| chars.iter().collect()).unwrap_or_else(|| \"\".to_string())\n\n}\n\n\n\npub (crate) fn slice_any_to_end<T>(collection : &[T], start : i64) -> Option<&[T]>\n\n{\n\n let u_start = if start < 0 {collection.len() - (-start as usize)} else {start as usize};\n\n \n\n collection.get(u_start..)\n\n}\n\npub (crate) fn slice_to_end(text : &str, start : i64) -> String\n\n{\n", "file_path": "src/strings.rs", "rank": 80, "score": 33167.42112859397 }, { "content": "use std::collections::{HashMap, HashSet, BTreeMap, BTreeSet};\n\nuse std::rc::Rc;\n\nuse std::cell::RefCell;\n\n\n\nuse super::{strings::*, ast::*, parser::*, bytecode::*, compiler::*};\n\n\n\nmod bindings;\n\nmod internal;\n\nmod simulation;\n\nmod manipulation;\n\nmod jumping;\n\npub (crate) mod types;\n\nmod variableaccess;\n\n\n\npub use self::types::*;\n\nuse variableaccess::ValueLoc;\n\n\n\n/// Returned by the step() method of an interpreter.\n\npub type StepResult = Result<(), String>;\n", "file_path": "src/interpreter.rs", "rank": 81, "score": 29315.40243625709 }, { "content": " pub (crate) trivial_simple_bindings: Box<BTreeMap<usize, TrivialSimpleBinding>>,\n\n \n\n // TODO: same map\n\n pub (crate) arrow_bindings: Box<BTreeMap<usize, Rc<RefCell<ArrowBinding>>>>,\n\n pub (crate) trivial_arrow_bindings: Box<BTreeMap<usize, TrivialArrowBinding>>,\n\n \n\n string_index: usize,\n\n string_table : Box<HashMap<String, usize>>,\n\n string_table_reverse : Box<BTreeMap<usize, String>>,\n\n \n\n parser: Box<Parser>,\n\n}\n\n\n\nimpl GlobalState {\n\n fn new(parser : Parser) -> GlobalState\n\n {\n\n GlobalState {\n\n instance_id : 1,\n\n instances : BTreeMap::new(),\n\n instances_by_type : Box::new(BTreeMap::new()),\n", "file_path": "src/interpreter.rs", "rank": 82, "score": 29308.43429076792 }, { "content": " return string.clone();\n\n }\n\n format!(\"<index {} with no associated string>\", index)\n\n }\n\n pub (crate) fn insert_bare_global(&mut self, index : usize)\n\n {\n\n self.barevariables.insert(index, Value::default());\n\n }\n\n pub (crate) fn insert_global(&mut self, index : usize)\n\n {\n\n self.variables.insert(index, Value::default());\n\n }\n\n pub (crate) fn insert_globalfunc(&mut self, index : usize, func : FuncSpec)\n\n {\n\n self.functions.insert(index, Value::new_funcval(None, func));\n\n }\n\n}\n\n\n", "file_path": "src/interpreter.rs", "rank": 83, "score": 29307.99449127732 }, { "content": " }\n\n }\n\n /// Loads new code into the interpreter.\n\n /// \n\n /// Unloads the old bytecode and all interpreter state, no matter what state the interpreter was in.\n\n /// \n\n /// Does not unload the parser that was loaded into the interpreter upon creation.\n\n /// \n\n /// Does not unload internal function bindings.\n\n /// \n\n /// Does not reset global state (objects/instances).\n\n pub fn restart(&mut self, code: &Code)\n\n {\n\n self.top_frame = Frame::new_root(code);\n\n self.frames = fat_vec();\n\n self.last_error = None;\n\n }\n\n pub fn restart_full_of_nops(&mut self, count : usize)\n\n {\n\n let mut code = Code::new();\n", "file_path": "src/interpreter.rs", "rank": 84, "score": 29307.722747870266 }, { "content": " }\n\n }\n\n pub fn step_cached_until_error_or_exit(&mut self) -> Result<u64, String>\n\n {\n\n if !self.top_frame.code.cached\n\n {\n\n Interpreter::prepare_cache(&mut self.top_frame.code);\n\n for spec in self.global.objects.iter_mut()\n\n {\n\n for func in spec.1.functions.iter_mut()\n\n {\n\n Interpreter::prepare_cache(&mut func.1.code);\n\n }\n\n }\n\n for func in self.global.functions.iter_mut()\n\n {\n\n match func.1\n\n {\n\n Value::Func(func) => Interpreter::prepare_cache(&mut func.userdefdata.code),\n\n _ => panic!()\n", "file_path": "src/interpreter.rs", "rank": 85, "score": 29307.54568878626 }, { "content": " };\n\n ret\n\n }\n\n }\n\n /// Steps the interpreter by a single operation.\n\n ///\n\n /// Handles flow control after stepping, not before.\n\n ///\n\n /// If execution can continue, Ok(false) is returned. Stepping the interpreter past this point will trigger an error.\n\n ///\n\n /// If execution has exited normally, Ok(true) is returned.\n\n ///\n\n /// If an error occurs, Err(String) is returned. This includes graceful exits (end of code).\n\n pub fn step(&mut self) -> StepResult\n\n {\n\n #[cfg(feature = \"track_op_performance\")]\n\n unsafe { LAST_TIME = core::arch::x86_64::_rdtsc() };\n\n \n\n let ret = self.step_internal();\n\n match ret\n", "file_path": "src/interpreter.rs", "rank": 86, "score": 29307.423970724223 }, { "content": " pub fn step_until_error_or_exit(&mut self) -> Result<u64, String>\n\n {\n\n #[cfg(feature = \"track_op_performance\")]\n\n unsafe { LAST_TIME = core::arch::x86_64::_rdtsc() };\n\n \n\n let mut steps = 0;\n\n let mut ret = Ok(());\n\n while ret.is_ok()\n\n {\n\n ret = self.step_internal();\n\n steps += 1;\n\n }\n\n if let Err(err) = ret\n\n {\n\n if err.as_str() == \"GRACEFUL_EXIT\"\n\n {\n\n return Ok(steps);\n\n }\n\n let pc = self.get_pc();\n\n if let Some(info) = self.top_frame.code.get_debug_info(pc)\n", "file_path": "src/interpreter.rs", "rank": 87, "score": 29304.319396822753 }, { "content": "\n\nimpl Interpreter {\n\n /// Creates a new interpreter \n\n pub fn new(parser : Parser) -> Interpreter\n\n {\n\n println!(\"- sizeof Value {}\", std::mem::size_of::<Value>());\n\n println!(\"- sizeof Variable {}\", std::mem::size_of::<Variable>());\n\n println!(\"- sizeof FuncSpec {}\", std::mem::size_of::<FuncSpec>());\n\n println!(\"- sizeof Frame {}\", std::mem::size_of::<Frame>());\n\n println!(\"- sizeof Vec<StackValue> {}\", std::mem::size_of::<Vec<StackValue>>());\n\n println!(\"- sizeof HashMap<HashableValue, Value> {}\", std::mem::size_of::<HashMap<HashableValue, Value>>());\n\n println!(\"- sizeof NonArrayVariable {}\", std::mem::size_of::<NonArrayVariable>());\n\n println!(\"- sizeof Interpreter {}\", std::mem::size_of::<Interpreter>());\n\n println!(\"- sizeof GlobalState {}\", std::mem::size_of::<GlobalState>());\n\n simulation::build_opfunc_table();\n\n Interpreter {\n\n top_frame : Frame::new_root(&Code::new()),\n\n frames : fat_vec(),\n\n global : GlobalState::new(parser),\n\n last_error : None,\n", "file_path": "src/interpreter.rs", "rank": 88, "score": 29303.579656004524 }, { "content": " #[allow(clippy::ptr_arg)]\n\n pub (crate) fn get_string_index(&mut self, string : &String) -> usize\n\n {\n\n if let Some(index) = self.string_table.get(string)\n\n {\n\n *index\n\n }\n\n else\n\n {\n\n let index = self.string_index;\n\n self.string_index += 1;\n\n self.string_table.insert(string.clone(), index);\n\n self.string_table_reverse.insert(index, string.clone());\n\n index\n\n }\n\n }\n\n pub (crate) fn get_string(&self, index : usize) -> String\n\n {\n\n if let Some(string) = self.string_table_reverse.get(&index)\n\n {\n", "file_path": "src/interpreter.rs", "rank": 89, "score": 29303.275261039096 }, { "content": " #[inline]\n\n fn step_internal(&mut self) -> StepResult\n\n {\n\n #[cfg(not(feature = \"track_op_performance\"))]\n\n {\n\n unsafe { simulation::OPTABLE[self.pull_single_from_code() as usize](self) }\n\n }\n\n #[cfg(feature = \"track_op_performance\")]\n\n {\n\n let op = self.pull_single_from_code();\n\n \n\n let ret = unsafe { simulation::OPTABLE[op as usize](self) };\n\n \n\n unsafe\n\n {\n\n let end_time = core::arch::x86_64::_rdtsc();\n\n let real_time = end_time - LAST_TIME;\n\n LAST_TIME = end_time;\n\n OP_MAP_HITS[op as usize] += 1;\n\n OP_MAP[op as usize] += real_time;\n", "file_path": "src/interpreter.rs", "rank": 90, "score": 29300.219104190517 }, { "content": " \n\n let code = compile_bytecode(&ast, &mut self.global)?;\n\n self.restart(&code);\n\n Ok(code)\n\n }\n\n /// Clears global state (objects/instances).\n\n /// \n\n /// This GRACELESSLY deletes all objects and instances, even if they contained code that has not yet finished running or needs special destruction.\n\n /// \n\n /// Does not unload the parser that was loaded into the interpreter upon creation.\n\n /// \n\n /// Does not unload internal function bindings.\n\n /// \n\n /// Does not reset global state (objects/instances).\n\n pub fn clear_global_state(&mut self)\n\n {\n\n let mut parser = Parser::default();\n\n std::mem::swap(&mut parser, &mut self.global.parser);\n\n self.global = GlobalState::new(parser);\n\n }\n", "file_path": "src/interpreter.rs", "rank": 91, "score": 29299.904390108877 }, { "content": " for _ in 0..count\n\n {\n\n code.push_for_nop_thing_only(NOP);\n\n }\n\n code.push_for_nop_thing_only(EXIT);\n\n self.restart(&code);\n\n }\n\n \n\n pub fn restart_in_place(&mut self)\n\n {\n\n self.restart(&self.top_frame.code.clone());\n\n }\n\n \n\n pub fn restart_into_string(&mut self, text: &str) -> Result<Code, String>\n\n {\n\n let program_lines : Vec<String> = text.lines().map(|x| x.to_string()).collect();\n\n \n\n let tokens = self.global.parser.tokenize(&program_lines, false)?;\n\n \n\n let ast = self.global.parser.parse_program(&tokens, &program_lines, false)?.ok_or_else(|| \"failed to parse program\".to_string())?;\n", "file_path": "src/interpreter.rs", "rank": 92, "score": 29299.124563049787 }, { "content": " \n\n objects : Box::new(BTreeMap::new()),\n\n variables : BTreeMap::new(),\n\n barevariables : BTreeMap::new(),\n\n functions : BTreeMap::new(),\n\n \n\n bindings : Box::new(BTreeMap::new()),\n\n trivial_bindings : Box::new(BTreeMap::new()),\n\n simple_bindings : Box::new(BTreeMap::new()),\n\n trivial_simple_bindings : Box::new(BTreeMap::new()),\n\n arrow_bindings : Box::new(BTreeMap::new()),\n\n trivial_arrow_bindings : Box::new(BTreeMap::new()),\n\n \n\n parser : Box::new(parser),\n\n \n\n string_index : 1,\n\n string_table : Box::new(HashMap::new()),\n\n string_table_reverse : Box::new(BTreeMap::new()),\n\n }\n\n }\n", "file_path": "src/interpreter.rs", "rank": 93, "score": 29297.486693619387 }, { "content": " }\n\n code.cached = true;\n\n \n\n let code_data = Rc::get_mut(&mut code.code).unwrap();\n\n \n\n for addr in code.booklet.iter()\n\n {\n\n let op = &mut code_data[*addr];\n\n let opfunc = unsafe { simulation::OPTABLE[*op as usize] };\n\n let ptr = opfunc as *const OpFunc;\n\n *op = ptr as u64;\n\n }\n\n }\n\n \n\n pub fn step_cached(&mut self) -> StepResult\n\n {\n\n let f : OpFunc = unsafe { std::mem::transmute(self.top_frame.code[self.top_frame.pc] as *const OpFunc) };\n\n self.top_frame.pc += 1;\n\n \n\n #[cfg(not(feature = \"track_op_performance\"))]\n", "file_path": "src/interpreter.rs", "rank": 94, "score": 29297.210784089966 }, { "content": " }\n\n }\n\n }\n\n \n\n #[cfg(feature = \"track_op_performance\")]\n\n unsafe { LAST_TIME = core::arch::x86_64::_rdtsc() };\n\n \n\n let mut steps = 0;\n\n let mut ret = Ok(());\n\n while ret.is_ok()\n\n {\n\n ret = self.step_cached();\n\n steps += 1;\n\n }\n\n if let Err(err) = ret\n\n {\n\n if err.as_str() == \"GRACEFUL_EXIT\"\n\n {\n\n return Ok(steps);\n\n }\n", "file_path": "src/interpreter.rs", "rank": 95, "score": 29295.34117848512 }, { "content": " {\n\n f(self)\n\n }\n\n #[cfg(feature = \"track_op_performance\")]\n\n {\n\n let op = f as *const OpFunc as usize;\n\n let op = * unsafe { simulation::REVERSE_OPTABLE.as_ref().unwrap().get(&op).unwrap() };\n\n \n\n let ret = f(self);\n\n \n\n unsafe\n\n {\n\n let end_time = core::arch::x86_64::_rdtsc();\n\n let real_time = end_time - LAST_TIME;\n\n LAST_TIME = end_time;\n\n OP_MAP_HITS[op as usize] += 1;\n\n OP_MAP[op as usize] += real_time;\n\n };\n\n \n\n ret\n", "file_path": "src/interpreter.rs", "rank": 96, "score": 29294.922835269994 }, { "content": " {\n\n self.last_error = Some(format!(\"{}\\nline: {}\\ncolumn: {}\\npc: 0x{:X} (off by one instruction)\", err, info.last_line, info.last_index, pc));\n\n }\n\n else\n\n {\n\n self.last_error = Some(format!(\"{}\\n(unknown or missing context - code probably desynced - location {} - map {:?})\", err, pc, self.top_frame.code.debug));\n\n }\n\n return Err(err);\n\n }\n\n else\n\n {\n\n panic!();\n\n }\n\n }\n\n \n\n pub fn prepare_cache(code : &mut Code)\n\n {\n\n if code.cached\n\n {\n\n return\n", "file_path": "src/interpreter.rs", "rank": 97, "score": 29291.89966243928 }, { "content": " let mut out = Vec::new();\n\n for word in self.top_frame.code.get(..).unwrap()\n\n {\n\n out.extend(&word.to_ne_bytes());\n\n }\n\n out\n\n }\n\n \n\n #[cfg(feature = \"track_op_performance\")]\n\n pub fn print_op_perf_log(&self)\n\n {\n\n let op_map = unsafe { OP_MAP.iter().enumerate().filter(|(k, _v)| OP_MAP_HITS[*k] != 0).map(|(k, v)| (k, if *v > u64::MAX >> 2 { 0 } else { *v } )) };\n\n // messy per hit\n\n //let op_map = op_map.map(|(k, v)| (k, *v as f64 / 1_000_000.0 / (self.op_map_hits[k] as f64).sqrt())).collect::<Vec<_>>();\n\n // per hit\n\n //let op_map = unsafe { op_map.map(|(k, v)| (k, v as f64 / OP_MAP_HITS[k] as f64)) };\n\n // raw\n\n let op_map = op_map.map(|(k, v)| (k, v as f64 / 3.3 / 1_000.0));\n\n //let op_map = op_map.map(|(k, v)| (k, v as f64 / 1_000.0));\n\n // mod (hacked together)\n", "file_path": "src/interpreter.rs", "rank": 98, "score": 29290.729763876152 }, { "content": " // let op_map = op_map.map(|(k, v)| (k, (*v as f64 / *self.op_map_hits.get(k).unwrap() as f64 - 80.0) * *self.op_map_hits.get(k).unwrap() as f64 / 1_000_000.0)).collect::<Vec<_>>();\n\n //let op_map = op_map.map(|(k, v)| (k, (*v as f64 / *self.op_map_hits.get(k).unwrap() as f64 - 80.0))).collect::<Vec<_>>();\n\n let mut op_map = op_map.collect::<Vec<_>>();\n\n op_map.retain(|x| !x.1.is_nan());\n\n op_map.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap());\n\n let mut total_time = 0.0;\n\n for (op, time) in op_map\n\n {\n\n println!(\"{:05.05}:\\t{} / {}\", time, crate::bytecode::op_to_name(op as u8), unsafe { OP_MAP_HITS[op] });\n\n total_time += time;\n\n }\n\n println!(\"total time: {:05.05}\", total_time/1_000_000.0);\n\n println!(\"(units may vary)\");\n\n }\n\n}\n", "file_path": "src/interpreter.rs", "rank": 99, "score": 29289.748352643168 } ]
Rust
src/terrain/main.rs
fkaa/gfx_examples
dea8a8393e34d011873b9f9c39f945dc6755f3e2
extern crate cgmath; #[macro_use] extern crate gfx; extern crate gfx_window_glutin; extern crate glutin; extern crate time; extern crate rand; extern crate genmesh; extern crate noise; use rand::Rng; use cgmath::FixedArray; use cgmath::{Matrix4, Point3, Vector3}; use cgmath::{Transform, AffineMatrix3}; use gfx::traits::{Stream, ToIndexSlice, ToSlice, FactoryExt}; use genmesh::{Vertices, Triangulate}; use genmesh::generators::{Plane, SharedVertex, IndexedPolygon}; use time::precise_time_s; use noise::{Seed, perlin2}; gfx_vertex!( Vertex { a_Pos@ pos: [f32; 3], a_Color@ color: [f32; 3], }); gfx_parameters!( Params { u_Model@ model: [[f32; 4]; 4], u_View@ view: [[f32; 4]; 4], u_Proj@ proj: [[f32; 4]; 4], }); fn calculate_color(height: f32) -> [f32; 3] { if height > 8.0 { [0.9, 0.9, 0.9] } else if height > 0.0 { [0.7, 0.7, 0.7] } else if height > -5.0 { [0.2, 0.7, 0.2] } else { [0.2, 0.2, 0.7] } } pub fn main() { let (mut stream, mut device, mut factory) = gfx_window_glutin::init( glutin::Window::new().unwrap()); stream.out.window.set_title("Terrain example"); let rand_seed = rand::thread_rng().gen(); let seed = Seed::new(rand_seed); let plane = Plane::subdivide(256, 256); let vertex_data: Vec<Vertex> = plane.shared_vertex_iter() .map(|(x, y)| { let h = perlin2(&seed, &[x, y]) * 32.0; Vertex { pos: [25.0 * x, 25.0 * y, h], color: calculate_color(h), } }) .collect(); let index_data: Vec<u32> = plane.indexed_polygon_iter() .triangulate() .vertices() .map(|i| i as u32) .collect(); let slice = index_data.to_slice(&mut factory, gfx::PrimitiveType::TriangleList); let mesh = factory.create_mesh(&vertex_data); let program = { let vs = gfx::ShaderSource { glsl_120: Some(include_bytes!("terrain_120.glslv")), glsl_150: Some(include_bytes!("terrain_150.glslv")), .. gfx::ShaderSource::empty() }; let fs = gfx::ShaderSource { glsl_120: Some(include_bytes!("terrain_120.glslf")), glsl_150: Some(include_bytes!("terrain_150.glslf")), .. gfx::ShaderSource::empty() }; factory.link_program_source(vs, fs).unwrap() }; let data = Params { model: Matrix4::identity().into_fixed(), view: Matrix4::identity().into_fixed(), proj: cgmath::perspective(cgmath::deg(60.0f32), stream.get_aspect_ratio(), 0.1, 1000.0 ).into_fixed(), _r: std::marker::PhantomData, }; let mut batch = gfx::batch::Full::new(mesh, program, data) .unwrap(); batch.slice = slice; batch.state = gfx::DrawState::new().depth(gfx::state::Comparison::LessEqual, true); 'main: loop { for event in stream.out.window.poll_events() { match event { glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) => break 'main, glutin::Event::Closed => break 'main, _ => {}, } } let time = precise_time_s() as f32; let x = time.sin(); let y = time.cos(); let view: AffineMatrix3<f32> = Transform::look_at( &Point3::new(x * 32.0, y * 32.0, 16.0), &Point3::new(0.0, 0.0, 0.0), &Vector3::unit_z(), ); batch.params.view = view.mat.into_fixed(); stream.clear(gfx::ClearData { color: [0.3, 0.3, 0.3, 1.0], depth: 1.0, stencil: 0, }); stream.draw(&batch).unwrap(); stream.present(&mut device); } }
extern crate cgmath; #[macro_use] extern crate gfx; extern crate gfx_window_glutin; extern crate glutin; extern crate time; extern crate rand; extern crate genmesh; extern crate noise; use rand::Rng; use cgmath::FixedArray; use cgmath::{Matrix4, Point3, Vector3}; use cgmath::{Transform, AffineMatrix3}; use gfx::traits::{Stream, ToIndexSlice, ToSlice, FactoryExt}; use genmesh::{Vertices, Triangulate}; use genmesh::generators::{Plane, SharedVertex, IndexedPolygon}; use time::precise_time_s; use noise::{Seed, perlin2}; gfx_vertex!( Vertex { a_Pos@ pos: [f32; 3], a_Color@ color: [f32; 3], }); gfx_parameters!( Params { u_Model@ model: [[f32; 4]; 4], u_View@ view: [[f32; 4]; 4], u_Proj@ proj: [[f32; 4]; 4], }); fn
pub fn main() { let (mut stream, mut device, mut factory) = gfx_window_glutin::init( glutin::Window::new().unwrap()); stream.out.window.set_title("Terrain example"); let rand_seed = rand::thread_rng().gen(); let seed = Seed::new(rand_seed); let plane = Plane::subdivide(256, 256); let vertex_data: Vec<Vertex> = plane.shared_vertex_iter() .map(|(x, y)| { let h = perlin2(&seed, &[x, y]) * 32.0; Vertex { pos: [25.0 * x, 25.0 * y, h], color: calculate_color(h), } }) .collect(); let index_data: Vec<u32> = plane.indexed_polygon_iter() .triangulate() .vertices() .map(|i| i as u32) .collect(); let slice = index_data.to_slice(&mut factory, gfx::PrimitiveType::TriangleList); let mesh = factory.create_mesh(&vertex_data); let program = { let vs = gfx::ShaderSource { glsl_120: Some(include_bytes!("terrain_120.glslv")), glsl_150: Some(include_bytes!("terrain_150.glslv")), .. gfx::ShaderSource::empty() }; let fs = gfx::ShaderSource { glsl_120: Some(include_bytes!("terrain_120.glslf")), glsl_150: Some(include_bytes!("terrain_150.glslf")), .. gfx::ShaderSource::empty() }; factory.link_program_source(vs, fs).unwrap() }; let data = Params { model: Matrix4::identity().into_fixed(), view: Matrix4::identity().into_fixed(), proj: cgmath::perspective(cgmath::deg(60.0f32), stream.get_aspect_ratio(), 0.1, 1000.0 ).into_fixed(), _r: std::marker::PhantomData, }; let mut batch = gfx::batch::Full::new(mesh, program, data) .unwrap(); batch.slice = slice; batch.state = gfx::DrawState::new().depth(gfx::state::Comparison::LessEqual, true); 'main: loop { for event in stream.out.window.poll_events() { match event { glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) => break 'main, glutin::Event::Closed => break 'main, _ => {}, } } let time = precise_time_s() as f32; let x = time.sin(); let y = time.cos(); let view: AffineMatrix3<f32> = Transform::look_at( &Point3::new(x * 32.0, y * 32.0, 16.0), &Point3::new(0.0, 0.0, 0.0), &Vector3::unit_z(), ); batch.params.view = view.mat.into_fixed(); stream.clear(gfx::ClearData { color: [0.3, 0.3, 0.3, 1.0], depth: 1.0, stencil: 0, }); stream.draw(&batch).unwrap(); stream.present(&mut device); } }
calculate_color(height: f32) -> [f32; 3] { if height > 8.0 { [0.9, 0.9, 0.9] } else if height > 0.0 { [0.7, 0.7, 0.7] } else if height > -5.0 { [0.2, 0.7, 0.2] } else { [0.2, 0.2, 0.7] } }
function_block-function_prefixed
[ { "content": "fn calculate_color(height: f32) -> [f32; 3] {\n\n if height > 8.0 {\n\n [0.9, 0.9, 0.9] // white\n\n } else if height > 0.0 {\n\n [0.7, 0.7, 0.7] // greay\n\n } else if height > -5.0 {\n\n [0.2, 0.7, 0.2] // green\n\n } else {\n\n [0.2, 0.2, 0.7] // blue\n\n }\n\n}\n\n\n", "file_path": "src/deferred/main.rs", "rank": 0, "score": 115088.58731767075 }, { "content": "fn calculate_normal(seed: &Seed, x: f32, y: f32)-> [f32; 3] {\n\n // determine sample points\n\n let s_x0 = x - 0.001;\n\n let s_x1 = x + 0.001;\n\n let s_y0 = y - 0.001;\n\n let s_y1 = y + 0.001;\n\n\n\n // calculate gradient in point\n\n let dzdx = (perlin2(seed, &[s_x1, y]) - perlin2(seed, &[s_x0, y]))/(s_x1 - s_x0);\n\n let dzdy = (perlin2(seed, &[x, s_y1]) - perlin2(seed, &[x, s_y0]))/(s_y1 - s_y0);\n\n\n\n // cross gradient vectors to get normal\n\n let normal = Vector3::new(1.0, 0.0, dzdx).cross(&Vector3::new(0.0, 1.0, dzdy)).normalize();\n\n\n\n return normal.into_fixed();\n\n}\n\n\n", "file_path": "src/deferred/main.rs", "rank": 2, "score": 88059.69703833856 }, { "content": "fn make_entity<R: gfx::Resources>(dynamic: bool, mesh: &gfx::Mesh<R>, slice: &gfx::Slice<R>,\n\n prog_fw: &gfx::handle::Program<R>, prog_sh: &gfx::handle::Program<R>,\n\n num_lights: usize, light_buf: &gfx::handle::Buffer<R, LightParam>,\n\n shadow: &gfx::shade::TextureParam<R>, transform: cgmath::Matrix4<f32>)\n\n -> Entity<R>\n\n{\n\n use cgmath::FixedArray;\n\n Entity {\n\n dynamic: dynamic,\n\n mx_to_world: transform,\n\n batch_forward: {\n\n let data = ForwardParams {\n\n transform: cgmath::Matrix4::identity().into_fixed(),\n\n model_transform: cgmath::Matrix4::identity().into_fixed(),\n\n color: [1.0, 1.0, 1.0, 1.0],\n\n num_lights: num_lights as i32,\n\n light_buf: light_buf.clone(),\n\n shadow: shadow.clone(),\n\n _r: std::marker::PhantomData,\n\n };\n", "file_path": "src/shadow/main.rs", "rank": 3, "score": 62416.1006434553 }, { "content": "fn create_cube<R: gfx::Resources, F: gfx::Factory<R>>(factory: &mut F)\n\n -> (gfx::Mesh<R>, gfx::Slice<R>)\n\n{\n\n let vertex_data = [\n\n // top (0, 0, 1)\n\n Vertex::new([-1, -1, 1], [0, 0, 1]),\n\n Vertex::new([ 1, -1, 1], [0, 0, 1]),\n\n Vertex::new([ 1, 1, 1], [0, 0, 1]),\n\n Vertex::new([-1, 1, 1], [0, 0, 1]),\n\n // bottom (0, 0, -1)\n\n Vertex::new([-1, 1, -1], [0, 0, -1]),\n\n Vertex::new([ 1, 1, -1], [0, 0, -1]),\n\n Vertex::new([ 1, -1, -1], [0, 0, -1]),\n\n Vertex::new([-1, -1, -1], [0, 0, -1]),\n\n // right (1, 0, 0)\n\n Vertex::new([ 1, -1, -1], [1, 0, 0]),\n\n Vertex::new([ 1, 1, -1], [1, 0, 0]),\n\n Vertex::new([ 1, 1, 1], [1, 0, 0]),\n\n Vertex::new([ 1, -1, 1], [1, 0, 0]),\n\n // left (-1, 0, 0)\n", "file_path": "src/shadow/main.rs", "rank": 4, "score": 60122.47693313986 }, { "content": "fn create_g_buffer<R: gfx::Resources, F: Factory<R>>(\n\n width: gfx::tex::Size, height: gfx::tex::Size, factory: &mut F)\n\n -> (gfx::Frame<R>, gfx::handle::Texture<R>, gfx::handle::Texture<R>,\n\n gfx::handle::Texture<R>, gfx::handle::Texture<R>) {\n\n let texture_info_float = gfx::tex::TextureInfo {\n\n width: width,\n\n height: height,\n\n depth: 1,\n\n levels: 1,\n\n kind: gfx::tex::Kind::D2,\n\n format: gfx::tex::Format::Float(gfx::tex::Components::RGBA, gfx::attrib::FloatSize::F32),\n\n };\n\n let texture_info_depth = gfx::tex::TextureInfo {\n\n width: width,\n\n height: height,\n\n depth: 1,\n\n levels: 1,\n\n kind: gfx::tex::Kind::D2,\n\n format: gfx::tex::Format::DEPTH24_STENCIL8,\n\n };\n", "file_path": "src/deferred/main.rs", "rank": 5, "score": 56785.18094203794 }, { "content": "fn create_plane<R: gfx::Resources, F: gfx::Factory<R>>(factory: &mut F, size: i8)\n\n -> (gfx::Mesh<R>, gfx::Slice<R>)\n\n{\n\n let vertex_data = [\n\n Vertex::new([ size, -size, 0], [0, 0, 1]),\n\n Vertex::new([ size, size, 0], [0, 0, 1]),\n\n Vertex::new([-size, -size, 0], [0, 0, 1]),\n\n Vertex::new([-size, size, 0], [0, 0, 1]),\n\n ];\n\n\n\n let mesh = factory.create_mesh(&vertex_data);\n\n let slice = mesh.to_slice(gfx::PrimitiveType::TriangleStrip);\n\n\n\n (mesh, slice)\n\n}\n\n\n\n//----------------------------------------\n\n// Section-3: scene definitions\n\n\n", "file_path": "src/shadow/main.rs", "rank": 6, "score": 56037.32247167439 }, { "content": "fn create_res_buffer<R: gfx::Resources, F: Factory<R>>(\n\n width: gfx::tex::Size, height: gfx::tex::Size,\n\n factory: &mut F, texture_depth: &gfx::handle::Texture<R>)\n\n -> (gfx::Frame<R>, gfx::handle::Texture<R>, gfx::handle::Texture<R>) {\n\n let texture_info_float = gfx::tex::TextureInfo {\n\n width: width,\n\n height: height,\n\n depth: 1,\n\n levels: 1,\n\n kind: gfx::tex::Kind::D2,\n\n format: gfx::tex::Format::Float(gfx::tex::Components::RGBA, gfx::attrib::FloatSize::F32),\n\n };\n\n\n\n let texture_frame = factory.create_texture(texture_info_float).unwrap();\n\n\n\n let frame = gfx::Frame {\n\n colors: vec![gfx::Plane::Texture(texture_frame.clone(), 0, None)],\n\n depth: Some(gfx::Plane::Texture(texture_depth.clone(), 0, None)),\n\n .. gfx::Frame::empty(width, height)\n\n };\n\n\n\n (frame, texture_frame, texture_depth.clone())\n\n}\n\n\n", "file_path": "src/deferred/main.rs", "rank": 7, "score": 55479.96523866338 }, { "content": "fn gfx_main(mut glfw: glfw::Glfw,\n\n window: glfw::Window,\n\n events: Receiver<(f64, glfw::WindowEvent)>,\n\n dimension: i16) {\n\n let (mut stream, mut device, mut factory) = gfx_window_glfw::init(window);\n\n let state = gfx::DrawState::new().depth(gfx::state::Comparison::LessEqual, true);\n\n\n\n let vertex_data = [\n\n Vertex { pos: Floater::cast3([-1, 1, -1]) },\n\n Vertex { pos: Floater::cast3([ 1, 1, -1]) },\n\n Vertex { pos: Floater::cast3([ 1, 1, 1]) },\n\n ];\n\n\n\n let mesh = factory.create_mesh(&vertex_data);\n\n let slice = mesh.to_slice(gfx::PrimitiveType::TriangleList);\n\n\n\n let program = factory.link_program(VERTEX_SRC, FRAGMENT_SRC).unwrap();\n\n let view: AffineMatrix3<f32> = Transform::look_at(\n\n &Point3::new(0f32, -5.0, 0.0),\n\n &Point3::new(0f32, 0.0, 0.0),\n", "file_path": "src/performance/main.rs", "rank": 8, "score": 50766.432085858614 }, { "content": "fn load_texture<R, F>(factory: &mut F, data: &[u8]) -> Result<gfx::handle::Texture<R>, String>\n\n where R: gfx::Resources, F: gfx::device::Factory<R> {\n\n let img = image::load(Cursor::new(data), image::PNG).unwrap();\n\n\n\n let img = match img {\n\n image::DynamicImage::ImageRgba8(img) => img,\n\n img => img.to_rgba()\n\n };\n\n let (width, height) = img.dimensions();\n\n let tex_info = gfx::tex::TextureInfo {\n\n width: width as u16,\n\n height: height as u16,\n\n depth: 1,\n\n levels: 1,\n\n kind: gfx::tex::Kind::D2,\n\n format: gfx::tex::RGBA8\n\n };\n\n\n\n Ok(factory.create_texture_static(tex_info, &img).unwrap())\n\n}\n\n\n", "file_path": "src/flowmap/main.rs", "rank": 9, "score": 41849.94353643523 }, { "content": "pub fn main() {\n\n let (mut stream, mut device, mut factory) = gfx_window_glutin::init(\n\n glutin::WindowBuilder::new()\n\n .with_title(\"Flowmap example\".to_string())\n\n .with_dimensions(800, 600).build().unwrap()\n\n );\n\n\n\n let vertex_data = [\n\n Vertex::new([-1.0, -1.0], [0.0, 0.0]),\n\n Vertex::new([ 1.0, -1.0], [1.0, 0.0]),\n\n Vertex::new([ 1.0, 1.0], [1.0, 1.0]),\n\n\n\n Vertex::new([-1.0, -1.0], [0.0, 0.0]),\n\n Vertex::new([ 1.0, 1.0], [1.0, 1.0]),\n\n Vertex::new([-1.0, 1.0], [0.0, 1.0]),\n\n ];\n\n\n\n let mesh = factory.create_mesh(&vertex_data);\n\n\n\n let water_texture = load_texture(&mut factory, &include_bytes!(\"image/water.png\")[..]).unwrap();\n", "file_path": "src/flowmap/main.rs", "rank": 10, "score": 39635.24930888158 }, { "content": "pub fn main() {\n\n let ref mut args = env::args();\n\n let args_count = env::args().count();\n\n if args_count == 1 {\n\n println!(\"gfx-perf [gl|gfx] <size>\");\n\n return;\n\n }\n\n\n\n let mode = args.nth(1).unwrap();\n\n let count: i32 = if args_count == 3 {\n\n FromStr::from_str(&args.next().unwrap()).ok()\n\n } else {\n\n None\n\n }.unwrap_or(10000);\n\n\n\n let count = ((count as f64).sqrt() / 2.) as i16;\n\n\n\n let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS)\n\n .ok().expect(\"Failed to initialize glfw-rs\");\n\n\n", "file_path": "src/performance/main.rs", "rank": 11, "score": 39635.24930888158 }, { "content": "pub fn main() {\n\n let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();\n\n glfw.set_error_callback(glfw::FAIL_ON_ERRORS);\n\n let (mut window, events) = glfw\n\n .create_window(640, 480, \"Cube example\", glfw::WindowMode::Windowed)\n\n .unwrap();\n\n window.set_key_polling(true);\n\n\n\n let (mut stream, mut device, mut factory) = gfx_window_glfw::init(window);\n\n\n\n let vertex_data = [\n\n // top (0, 0, 1)\n\n Vertex::new([-1, -1, 1], [0, 0]),\n\n Vertex::new([ 1, -1, 1], [1, 0]),\n\n Vertex::new([ 1, 1, 1], [1, 1]),\n\n Vertex::new([-1, 1, 1], [0, 1]),\n\n // bottom (0, 0, -1)\n\n Vertex::new([-1, 1, -1], [1, 0]),\n\n Vertex::new([ 1, 1, -1], [0, 0]),\n\n Vertex::new([ 1, -1, -1], [0, 1]),\n", "file_path": "src/cube/main.rs", "rank": 12, "score": 39635.24930888158 }, { "content": "pub fn main() {\n\n env_logger::init().unwrap();\n\n let (gfx::OwnedStream{ ren: mut renderer, out: output }, mut device, mut factory) =\n\n gfx_window_glutin::init(glutin::WindowBuilder::new()\n\n .with_title(\"Deferred rendering example with gfx-rs\".to_string())\n\n .with_dimensions(800, 600)\n\n .with_gl(glutin::GL_CORE)\n\n .build().unwrap()\n\n );\n\n\n\n let (w, h) = output.get_size();\n\n let (g_buffer, texture_pos, texture_normal, texture_diffuse, texture_depth) = create_g_buffer(w, h, &mut factory);\n\n let (res_buffer, texture_frame, _) = create_res_buffer(w, h, &mut factory, &texture_depth);\n\n\n\n let seed = {\n\n let rand_seed = rand::thread_rng().gen();\n\n Seed::new(rand_seed)\n\n };\n\n\n\n let sampler = factory.create_sampler(\n", "file_path": "src/deferred/main.rs", "rank": 13, "score": 39635.24930888158 }, { "content": "pub fn main() {\n\n let (mut stream, mut device, mut factory) = gfx_window_glutin::init(\n\n glutin::Window::new().unwrap());\n\n stream.out.window.set_title(\"Triangle example\");\n\n\n\n let vertex_data = [\n\n Vertex { pos: [ -0.5, -0.5 ], color: [1.0, 0.0, 0.0] },\n\n Vertex { pos: [ 0.5, -0.5 ], color: [0.0, 1.0, 0.0] },\n\n Vertex { pos: [ 0.0, 0.5 ], color: [0.0, 0.0, 1.0] },\n\n ];\n\n let mesh = factory.create_mesh(&vertex_data);\n\n let slice = mesh.to_slice(gfx::PrimitiveType::TriangleList);\n\n\n\n let program = {\n\n let vs = gfx::ShaderSource {\n\n glsl_120: Some(include_bytes!(\"triangle_120.glslv\")),\n\n glsl_150: Some(include_bytes!(\"triangle_150.glslv\")),\n\n .. gfx::ShaderSource::empty()\n\n };\n\n let fs = gfx::ShaderSource {\n", "file_path": "src/triangle/main.rs", "rank": 14, "score": 39635.24930888158 }, { "content": "pub fn main() {\n\n use std::env;\n\n use time::precise_time_s;\n\n use cgmath::{EuclideanVector, FixedArray, Matrix, Rotation3, Vector};\n\n\n\n // initialize\n\n let mut is_parallel = true;\n\n for arg in env::args().skip(1) {\n\n if arg == \"single\" {\n\n is_parallel = false;\n\n }\n\n }\n\n println!(\"Running in {}-threaded mode\",\n\n if is_parallel {\"multi\"} else {\"single\"},\n\n );\n\n\n\n let (mut stream, mut device, mut factory) = gfx_window_glutin::init(\n\n glutin::WindowBuilder::new()\n\n .with_title(\"Multi-threaded shadow rendering example with gfx-rs\".to_string())\n\n .with_dimensions(800, 600)\n", "file_path": "src/shadow/main.rs", "rank": 15, "score": 39635.24930888158 }, { "content": "struct Entity<R: gfx::Resources> {\n\n dynamic: bool,\n\n mx_to_world: cgmath::Matrix4<f32>,\n\n batch_shadow: gfx::batch::Full<ShadowParams<R>>,\n\n batch_forward: gfx::batch::Full<ForwardParams<R>>,\n\n}\n\n\n", "file_path": "src/shadow/main.rs", "rank": 17, "score": 35536.934668104455 }, { "content": "struct Scene<R: gfx::Resources, S> {\n\n camera: Camera,\n\n lights: Vec<Light<S>>,\n\n entities: Arc<RwLock<Vec<Entity<R>>>>, // needs to be shared\n\n _light_buf: gfx::handle::Buffer<R, LightParam>,\n\n}\n\n\n\n//----------------------------------------\n\n// Section-4: scene construction routines\n\n\n", "file_path": "src/shadow/main.rs", "rank": 18, "score": 33287.72804722763 }, { "content": "fn gl_main(mut glfw: glfw::Glfw,\n\n mut window: glfw::Window,\n\n _: Receiver<(f64, glfw::WindowEvent),>,\n\n dimension: i16) {\n\n let gl = Gl::load_with(|s| window.get_proc_address(s));\n\n\n\n // Create GLSL shaders\n\n let vs = compile_shader(&gl, VS_SRC, gl::VERTEX_SHADER);\n\n let fs = compile_shader(&gl, FS_SRC, gl::FRAGMENT_SHADER);\n\n let program = link_program(&gl, vs, fs);\n\n\n\n let mut vao = 0;\n\n let mut vbo = 0;\n\n\n\n let trans_uniform = unsafe {\n\n // Create Vertex Array Object\n\n gl.GenVertexArrays(1, &mut vao);\n\n gl.BindVertexArray(vao);\n\n\n\n // Create a Vertex Buffer Object and copy the vertex data to it\n", "file_path": "src/performance/main.rs", "rank": 19, "score": 31187.154374340702 }, { "content": "/// Create a full scene\n\nfn create_scene<D, F>(_: &D, factory: &mut F)\n\n -> Scene<D::Resources, gfx::OwnedStream<D, gfx::Plane<D::Resources>>> where\n\n D: gfx::Device,\n\n F: gfx::Factory<D::Resources> + gfx::traits::StreamFactory<D>,\n\n{\n\n // load programs\n\n let program_forward = factory.link_program(\n\n include_bytes!(\"shader/forward_150.glslv\"),\n\n include_bytes!(\"shader/forward_150.glslf\"),\n\n ).unwrap();\n\n let program_shadow = factory.link_program(\n\n include_bytes!(\"shader/shadow_150.glslv\"),\n\n include_bytes!(\"shader/shadow_150.glslf\"),\n\n ).unwrap();\n\n\n\n // create shadows\n\n let shadow_array = factory.create_texture(gfx::tex::TextureInfo {\n\n width: 512,\n\n height: 512,\n\n depth: MAX_LIGHTS as gfx::tex::Size,\n", "file_path": "src/shadow/main.rs", "rank": 20, "score": 27116.37014609336 }, { "content": "fn compile_shader(gl: &Gl, src: &str, ty: GLenum) -> GLuint { unsafe {\n\n let shader = gl.CreateShader(ty);\n\n // Attempt to compile the shader\n\n let src = CString::new(src).unwrap();\n\n gl.ShaderSource(shader, 1, &(src.as_bytes_with_nul().as_ptr() as *const i8), ptr::null());\n\n gl.CompileShader(shader);\n\n\n\n // Get the compile status\n\n let mut status = gl::FALSE as GLint;\n\n gl.GetShaderiv(shader, gl::COMPILE_STATUS, &mut status);\n\n\n\n // Fail on error\n\n if status != (gl::TRUE as GLint) {\n\n let mut len: GLint = 0;\n\n gl.GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut len);\n\n let mut buf: Vec<u8> = repeat(0u8).take((len as isize).saturating_sub(1) as usize).collect(); // subtract 1 to skip the trailing null character\n\n gl.GetShaderInfoLog(shader, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);\n\n panic!(\"{}\", str::from_utf8(&buf).ok().expect(\"ShaderInfoLog not valid utf8\"));\n\n }\n\n shader\n\n}}\n\n\n", "file_path": "src/performance/main.rs", "rank": 21, "score": 24142.897064924568 }, { "content": "fn link_program(gl: &Gl, vs: GLuint, fs: GLuint) -> GLuint { unsafe {\n\n let program = gl.CreateProgram();\n\n gl.AttachShader(program, vs);\n\n gl.AttachShader(program, fs);\n\n gl.LinkProgram(program);\n\n // Get the link status\n\n let mut status = gl::FALSE as GLint;\n\n gl.GetProgramiv(program, gl::LINK_STATUS, &mut status);\n\n\n\n // Fail on error\n\n if status != (gl::TRUE as GLint) {\n\n let mut len: GLint = 0;\n\n gl.GetProgramiv(program, gl::INFO_LOG_LENGTH, &mut len);\n\n let mut buf: Vec<u8> = repeat(0u8).take((len as isize).saturating_sub(1) as usize).collect(); // subtract 1 to skip the trailing null character\n\n gl.GetProgramInfoLog(program, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);\n\n panic!(\"{}\", str::from_utf8(&buf).ok().expect(\"ProgramInfoLog not valid utf8\"));\n\n }\n\n program\n\n}}\n\n\n", "file_path": "src/performance/main.rs", "rank": 22, "score": 23650.23061717429 }, { "content": "# gfx_examples\n\n[![Build Status](https://travis-ci.org/gfx-rs/gfx_examples.png?branch=master)](https://travis-ci.org/gfx-rs/gfx_examples)\n\n\n\nExamples of using gfx-rs\n", "file_path": "README.md", "rank": 23, "score": 16845.667744268037 }, { "content": "<!--\n\n Copyright 2014 The Gfx-rs Developers.\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n-->\n\n\n\n# Cube Example\n\n\n\nA simple example showing how to render a textured cube using vertex and index\n\nbuffers, GLSL shaders, and uniform parameters. It is also using cgmath-rs to\n\ncompute the view-projection matrix.\n\n\n\nThe example provides two versions of each shader: for GLSL 1.20 and 1.50-core.\n\nThis is needed for proper OSX compatibility and ensures it can run on any\n\nsystem.\n\n\n\n## Screenshot\n\n\n\n![Cube Example](screenshot.png)\n", "file_path": "src/cube/README.md", "rank": 24, "score": 15746.76720527458 }, { "content": "<!--\n\n Copyright 2014 The Gfx-rs Developers.\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n-->\n\n\n\n# Terrain Example\n\n\n\nA simple procedural terrain example. This should generate a landscape using\n\nPerlin Noise, and scatter some entities around - for example houses and trees.\n\nThis will demonstrate the initialisation of buffers and shaders, and how the\n\nlibrary can be composed with other external dependencies.\n\n\n\n## Screenshot\n\n\n\n![Terrain Example](screenshot.png)\n\n\n\n## Useful libraries\n\n\n\n- [glfw-rs](https://github.com/bjz/glfw-rs)\n\n- [cgmath-rs](https://github.com/bjz/cgmath-rs)\n\n- [noise-rs](https://github.com/bjz/noise-rs)\n\n- [color-rs](https://github.com/bjz/color-rs) (could use some work)\n\n\n\n## Images of possible output\n\n\n\n_Sourced from [voyager3.tumblr.com](http://voyager3.tumblr.com/)_\n\n\n\n![Terrain Entities](http://25.media.tumblr.com/tumblr_m165bl19YG1qgurm4o1_1280.png)\n\n\n\n![Night Terrain](http://25.media.tumblr.com/tumblr_lyannkY9pg1qgurm4o1_r1_1280.png)\n\n\n\n![Trees](http://24.media.tumblr.com/tumblr_lxkeau5hUf1qgurm4o1_1280.png)\n", "file_path": "src/terrain/README.md", "rank": 25, "score": 15746.23626947262 }, { "content": "<!--\n\n Copyright 2014 The Gfx-rs Developers.\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n-->\n\n\n\n# Deferred Shading Example\n\n\n\nThis is an example of deferred shading with gfx-rs. It demonstrates the use of render targets and uniform buffers. It requires GL-3.2 to run.\n\n\n\nTwo render targets are created: a geometry buffer and a result buffer.\n\n\n\nRendering happens in two passes:\n\nFirst, the terrain is rendered, writing position, normal and color to the geometry buffer.\n\nSecond, the lights are rendered as cubes. each fragment reads from the geometry buffer,\n\n light is applied, and the result is written to the result buffer.\n\n\n\nThe result buffer is then displayed.\n\n\n\nPress 1-4 to show the immediate buffers. Press 0 to show the final result.\n\n\n\n## Screenshot\n\n\n\n![Deferred Shading Example](screenshot.png)\n\n\n\n## Useful libraries\n\n\n\n- [glfw-rs](https://github.com/bjz/glfw-rs)\n\n- [cgmath-rs](https://github.com/bjz/cgmath-rs)\n\n- [noise-rs](https://github.com/bjz/noise-rs)\n\n\n", "file_path": "src/deferred/README.md", "rank": 26, "score": 15745.19596101709 }, { "content": "<!--\n\n Copyright 2014 The Gfx-rs Developers.\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n-->\n\n\n\n# Shadow Example\n\n\n\nThis example shows multi-threaded rendering for computing the shadows of\n\nmultiple lights. A command buffer per each shadow map is composed by a\n\nseparate thread and then sent to the device for execution.\n\n\n\nMoving the mouse cursor rotates the cubes for your entertainment.\n\n\n\nYou can switch to single-threaded mode by appending \"single\" to the command\n\nline. Currently, the overhead of creating the threads seems to be higher\n\nthan the benefit from multi-threading. There needs to be a large number of\n\ngenerated objects in order to get the fork-join model to show a difference.\n\n\n\n## Screenshot\n\n\n\n![Shadow Example](screenshot.png)\n", "file_path": "src/shadow/README.md", "rank": 27, "score": 15742.54962331924 }, { "content": "<!--\n\n Copyright 2014 The Gfx-rs Developers.\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n-->\n\n\n\n# Flowmap Example\n\n\n\nA flowmap example. Flow mapping is often used to simulate water motion, by\n\nencoding a flow direction using the red and green channel of an image. This\n\nexample uses three images to compose this effect:\n\n* a flow map, which contains direction vector in the red and green channel\n\n* a water diffuse texture, which will be distorted\n\n* a noise texture, which fixes a common problem with flow maps mentioned in\n\nthe article *Animating Water Using Flow Maps* below\n\n\n\n## Screenshot\n\n\n\n![Flowmap Example](screenshot.png)\n\n\n\n## Further reading\n\n\n\n* [Water shader from source engine](https://developer.valvesoftware.com/wiki/Water_(shader))\n\n* [Water Flow in Portal 2 – Valve paper](http://www.valvesoftware.com/publications/2010/siggraph2010_vlachos_waterflow.pdf)\n\n* [Animating Water Using Flow Maps – Graphics Runner](http://graphicsrunner.blogspot.se/2010/08/water-using-flow-maps.html)\n\n* [A Walkthrough of the Special FX of Uncharted 3: Drake's Deception – Page 57](http://twvideo01.ubm-us.net/o1/vault/gdc2012/slides/Missing%20Presentations/Added%20March%2026/Keith_Guerrette_VisualArts_TheTricksUp.pdf#page.57)\n\n* [Water flow shader – icefall games](https://mtnphil.wordpress.com/2012/08/25/water-flow-shader/)\n", "file_path": "src/flowmap/README.md", "rank": 28, "score": 15742.464810946381 }, { "content": "<!--\n\n Copyright 2014 The Gfx-rs Developers.\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n-->\n\n\n\n# Triangle Example\n\n\n\nNo rendering library would be complete without a triangle example!\n\n\n\n## Screenshot\n\n\n\n![Triangle Example](screenshot.png)\n", "file_path": "src/triangle/README.md", "rank": 29, "score": 15742.151664743065 }, { "content": "use rand::Rng;\n\nuse cgmath::FixedArray;\n\nuse cgmath::{Matrix, Matrix4, Point3, Vector3, EuclideanVector};\n\nuse cgmath::{Transform, AffineMatrix3};\n\nuse gfx::attrib::Floater;\n\nuse gfx::traits::{Device, Factory, Stream, ToIndexSlice, ToSlice, Output, FactoryExt};\n\nuse genmesh::{Vertices, Triangulate};\n\nuse genmesh::generators::{SharedVertex, IndexedPolygon};\n\nuse time::precise_time_s;\n\n\n\nuse noise::{Seed, perlin2};\n\n\n\n// Remember to also change the constants in the shaders\n\nconst NUM_LIGHTS: usize = 250;\n\n\n\ngfx_vertex!( TerrainVertex {\n\n a_Pos@ pos: [f32; 3],\n\n a_Normal@ normal: [f32; 3],\n\n a_Color@ color: [f32; 3],\n\n});\n", "file_path": "src/deferred/main.rs", "rank": 31, "score": 29.11053785463109 }, { "content": "// First, the terrain is rendered, writing position, normal and color to the geometry buffer.\n\n// Second, the lights are rendered as cubes. each fragment reads from the geometry buffer,\n\n// light is applied, and the result is written to the result buffer.\n\n//\n\n// The result buffer is then displayed.\n\n//\n\n// Press 1-4 to show the immediate buffers. Press 0 to show the final result.\n\n\n\nextern crate cgmath;\n\nextern crate env_logger;\n\n#[macro_use]\n\nextern crate gfx;\n\nextern crate gfx_window_glutin;\n\nextern crate glutin;\n\nextern crate time;\n\nextern crate rand;\n\nextern crate genmesh;\n\nextern crate noise;\n\n\n\nuse std::marker::PhantomData;\n", "file_path": "src/deferred/main.rs", "rank": 32, "score": 22.602987692270567 }, { "content": " gfx::tex::SamplerInfo::new(gfx::tex::FilterMethod::Scale,\n\n gfx::tex::WrapMode::Clamp)\n\n );\n\n\n\n let aspect = w as f32 / h as f32;\n\n let proj = cgmath::perspective(cgmath::deg(60.0f32), aspect, 5.0, 100.0);\n\n\n\n let terrain_scale = Vector3::new(25.0, 25.0, 25.0);\n\n let mut terrain = {\n\n let plane = genmesh::generators::Plane::subdivide(256, 256);\n\n let vertex_data: Vec<TerrainVertex> = plane.shared_vertex_iter()\n\n .map(|(x, y)| {\n\n let h = terrain_scale.z * perlin2(&seed, &[x, y]);\n\n TerrainVertex {\n\n pos: [terrain_scale.x * x, terrain_scale.y * y, h],\n\n normal: calculate_normal(&seed, x, y),\n\n color: calculate_color(h),\n\n }\n\n })\n\n .collect();\n", "file_path": "src/deferred/main.rs", "rank": 34, "score": 20.076278292719877 }, { "content": "// Copyright 2014 The Gfx-rs Developers.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n#[macro_use]\n\nextern crate gfx;\n\nextern crate gfx_window_glutin;\n\nextern crate glutin;\n\n\n\nuse gfx::traits::{Stream, ToIndexSlice, ToSlice, FactoryExt};\n\n\n\ngfx_vertex!( Vertex {\n\n a_Pos@ pos: [f32; 2],\n\n a_Color@ color: [f32; 3],\n\n});\n\n\n", "file_path": "src/triangle/main.rs", "rank": 35, "score": 19.700183351884977 }, { "content": "\n\ngfx_vertex!( BlitVertex {\n\n a_Pos@ pos: [Floater<i8>; 3],\n\n a_TexCoord@ tex_coord: [Floater<u8>; 2],\n\n});\n\n\n\ngfx_vertex!( CubeVertex {\n\n a_Pos@ pos: [Floater<i8>; 3],\n\n});\n\n\n\ngfx_parameters!( TerrainParams {\n\n u_Model@ model: [[f32; 4]; 4],\n\n u_View@ view: [[f32; 4]; 4],\n\n u_Proj@ proj: [[f32; 4]; 4],\n\n u_CameraPos@ cam_pos: [f32; 3],\n\n});\n\n\n\ngfx_parameters!( LightParams {\n\n u_Transform@ transform: [[f32; 4]; 4],\n\n u_LightPosBlock@ light_pos_buf: gfx::handle::RawBuffer<R>,\n", "file_path": "src/deferred/main.rs", "rank": 36, "score": 19.641504816284666 }, { "content": "\n\nconst MAX_LIGHTS: usize = 10;\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct LightParam {\n\n pos: [f32; 4],\n\n color: [f32; 4],\n\n proj: [[f32; 4]; 4],\n\n}\n\n\n\ngfx_parameters!( ForwardParams {\n\n u_Transform@ transform: [[f32; 4]; 4],\n\n u_ModelTransform@ model_transform: [[f32; 4]; 4],\n\n u_Color@ color: [f32; 4],\n\n u_NumLights@ num_lights: i32,\n\n b_Lights@ light_buf: gfx::handle::Buffer<R, LightParam>,\n\n t_Shadow@ shadow: gfx::shade::TextureParam<R>,\n\n});\n\n\n\ngfx_parameters!( ShadowParams {\n\n u_Transform@ transform: [[f32; 4]; 4],\n\n});\n\n\n\n//----------------------------------------\n\n// Section-2: simple primitives generation\n\n//TODO: replace by genmesh\n\n\n", "file_path": "src/shadow/main.rs", "rank": 37, "score": 18.111303860030866 }, { "content": " &Point3::new(1.5f32, -5.0, 3.0),\n\n &Point3::new(0f32, 0.0, 0.0),\n\n &Vector3::unit_z(),\n\n );\n\n let proj = cgmath::perspective(cgmath::deg(45.0f32),\n\n stream.get_aspect_ratio(), 1.0, 10.0);\n\n\n\n let data = Params {\n\n transform: proj.mul_m(&view.mat).into_fixed(),\n\n color: (texture, Some(sampler)),\n\n _r: std::marker::PhantomData,\n\n };\n\n\n\n let mut batch = gfx::batch::Full::new(mesh, program, data).unwrap();\n\n batch.slice = index_data.to_slice(&mut factory, gfx::PrimitiveType::TriangleList);\n\n batch.state = batch.state.depth(gfx::state::Comparison::LessEqual, true);\n\n\n\n while !stream.out.window.should_close() {\n\n glfw.poll_events();\n\n for (_, event) in glfw::flush_messages(&events) {\n", "file_path": "src/cube/main.rs", "rank": 38, "score": 18.09571048191622 }, { "content": "\n\nuse std::io::Cursor;\n\nuse gfx::traits::{Factory, Stream, FactoryExt};\n\n\n\ngfx_vertex!( Vertex {\n\n a_Pos@ pos: [f32; 2],\n\n a_Uv@ uv: [f32; 2],\n\n});\n\n\n\nimpl Vertex {\n\n fn new(p: [f32; 2], u: [f32; 2]) -> Vertex {\n\n Vertex {\n\n pos: p,\n\n uv: u,\n\n }\n\n }\n\n}\n\n\n\ngfx_parameters!( Params {\n\n t_Color@ color: gfx::shade::TextureParam<R>,\n\n t_Flow@ flow: gfx::shade::TextureParam<R>,\n\n t_Noise@ noise: gfx::shade::TextureParam<R>,\n\n f_Offset0@ offset0: f32,\n\n f_Offset1@ offset1: f32,\n\n});\n\n\n", "file_path": "src/flowmap/main.rs", "rank": 39, "score": 17.817112130105826 }, { "content": "extern crate gfx_gl as gl;\n\n\n\nuse time::precise_time_s;\n\nuse cgmath::FixedArray;\n\nuse cgmath::{Matrix, Point3, Vector3, Matrix3, Matrix4};\n\nuse cgmath::{Transform, AffineMatrix3, Vector4, Array1};\n\nuse gfx::attrib::Floater;\n\nuse gfx::traits::{Device, Stream, ToIndexSlice, ToSlice, FactoryExt};\n\nuse glfw::Context;\n\nuse gl::Gl;\n\nuse gl::types::*;\n\nuse std::mem;\n\nuse std::ptr;\n\nuse std::str;\n\nuse std::env;\n\nuse std::str::FromStr;\n\nuse std::sync::mpsc::Receiver;\n\nuse std::iter::repeat;\n\nuse std::ffi::CString;\n\n\n", "file_path": "src/performance/main.rs", "rank": 40, "score": 17.46204792910736 }, { "content": "// Copyright 2015 The GFX developers.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nextern crate time;\n\nextern crate cgmath;\n\n#[macro_use]\n\nextern crate gfx;\n\nextern crate gfx_window_glutin;\n\nextern crate glutin;\n", "file_path": "src/shadow/main.rs", "rank": 41, "score": 17.332117079028837 }, { "content": " }\n\n\n\n let time = precise_time_s() as f32;\n\n\n\n // Update camera position\n\n {\n\n let cam_pos = {\n\n // Slowly circle the center\n\n let x = (0.05*time).sin();\n\n let y = (0.05*time).cos();\n\n Point3::new(x * 32.0, y * 32.0, 16.0)\n\n };\n\n let view: AffineMatrix3<f32> = Transform::look_at(\n\n &cam_pos,\n\n &Point3::new(0.0, 0.0, 0.0),\n\n &Vector3::unit_z(),\n\n );\n\n terrain.params.view = view.mat.into_fixed();\n\n terrain.params.cam_pos = cam_pos.into_fixed();\n\n\n", "file_path": "src/deferred/main.rs", "rank": 43, "score": 16.99208722140588 }, { "content": " light.params.transform = proj.mul_m(&view.mat).into_fixed();\n\n light.params.cam_pos = cam_pos.into_fixed();\n\n\n\n emitter.params.transform = proj.mul_m(&view.mat).into_fixed();\n\n }\n\n\n\n // Update light positions\n\n for (i, p) in light_pos_vec.iter_mut().enumerate() {\n\n let (x, y) = {\n\n let fi = i as f32;\n\n // Distribute lights nicely\n\n let r = 1.0 - (fi*fi) / ((NUM_LIGHTS*NUM_LIGHTS) as f32);\n\n (r * (0.2*time + i as f32).cos(), r * (0.2*time + i as f32).sin())\n\n };\n\n let h = perlin2(&seed, &[x, y]);\n\n\n\n p[0] = terrain_scale.x * x;\n\n p[1] = terrain_scale.y * y;\n\n p[2] = terrain_scale.z * h + 0.5;\n\n };\n", "file_path": "src/deferred/main.rs", "rank": 44, "score": 15.8467420197512 }, { "content": "use cgmath::FixedArray;\n\nuse cgmath::{Matrix, Point3, Vector3};\n\nuse cgmath::{Transform, AffineMatrix3};\n\nuse gfx::attrib::Floater;\n\nuse gfx::traits::{Factory, Stream, ToIndexSlice, ToSlice, FactoryExt};\n\n\n\n// Declare the vertex format suitable for drawing.\n\n// Notice the use of FixedPoint.\n\ngfx_vertex!( Vertex {\n\n a_Pos@ pos: [Floater<i8>; 3],\n\n a_TexCoord@ tex_coord: [Floater<u8>; 2],\n\n});\n\n\n\nimpl Vertex {\n\n fn new(p: [i8; 3], t: [u8; 2]) -> Vertex {\n\n Vertex {\n\n pos: Floater::cast3(p),\n\n tex_coord: Floater::cast2(t),\n\n }\n\n }\n", "file_path": "src/cube/main.rs", "rank": 45, "score": 15.618829296642255 }, { "content": " levels: 1,\n\n kind: gfx::tex::Kind::D2Array,\n\n format: gfx::tex::Format::DEPTH24,\n\n }).unwrap();\n\n\n\n let (near, far) = (1f32, 20f32);\n\n\n\n let light_buf = factory.create_buffer_dynamic::<LightParam>(\n\n MAX_LIGHTS, gfx::BufferRole::Uniform);\n\n\n\n // create lights\n\n struct LightDesc {\n\n pos: cgmath::Point3<f32>,\n\n color: gfx::ColorValue,\n\n fov: f32,\n\n }\n\n\n\n let light_descs = vec![\n\n LightDesc {\n\n pos: cgmath::Point3::new(7.0, -5.0, 10.0),\n", "file_path": "src/shadow/main.rs", "rank": 46, "score": 15.526416450886053 }, { "content": " let proj = cgmath::perspective(cgmath::deg(45.0f32), aspect, 1.0, 10.0);\n\n\n\n while !window.should_close() {\n\n // Poll events\n\n glfw.poll_events();\n\n\n\n let start = precise_time_s() * 1000.;\n\n\n\n // Clear the screen to black\n\n unsafe {\n\n gl.ClearColor(0.3, 0.3, 0.3, 1.0);\n\n gl.Clear(gl::COLOR_BUFFER_BIT);\n\n }\n\n\n\n for x in (-dimension) ..dimension {\n\n for y in (-dimension) ..dimension {\n\n let mut model = Matrix4::from(Matrix3::identity().mul_s(0.01f32));\n\n model.w = Vector4::new(x as f32 * 0.05,\n\n 0f32,\n\n y as f32 * 0.05,\n", "file_path": "src/performance/main.rs", "rank": 47, "score": 15.421040371218961 }, { "content": "\n\n let index_data: Vec<u32> = plane.indexed_polygon_iter()\n\n .triangulate()\n\n .vertices()\n\n .map(|i| i as u32)\n\n .collect();\n\n\n\n let mesh = factory.create_mesh(&vertex_data);\n\n let slice = index_data.to_slice(&mut factory, gfx::PrimitiveType::TriangleList);\n\n\n\n let program = factory.link_program(TERRAIN_VERTEX_SRC, TERRAIN_FRAGMENT_SRC)\n\n .unwrap();\n\n let state = gfx::DrawState::new().depth(gfx::state::Comparison::LessEqual, true);\n\n\n\n let data = TerrainParams {\n\n model: Matrix4::identity().into_fixed(),\n\n view: Matrix4::identity().into_fixed(),\n\n proj: proj.into_fixed(),\n\n cam_pos: Vector3::new(0.0, 0.0, 0.0).into_fixed(),\n\n _r: PhantomData,\n", "file_path": "src/deferred/main.rs", "rank": 48, "score": 15.120365723080898 }, { "content": " mx_proj.mul_m(&light.mx_view).into_fixed()\n\n },\n\n }).collect();\n\n factory.update_buffer(&light_buf, &light_params, 0).unwrap();\n\n\n\n let shadow_param = {\n\n let mut sinfo = gfx::tex::SamplerInfo::new(\n\n gfx::tex::FilterMethod::Bilinear,\n\n gfx::tex::WrapMode::Clamp\n\n );\n\n sinfo.comparison = Some(gfx::state::Comparison::LessEqual);\n\n let sampler = factory.create_sampler(sinfo);\n\n (shadow_array.clone(), Some(sampler))\n\n };\n\n\n\n // create entities\n\n struct CubeDesc {\n\n offset: cgmath::Vector3<f32>,\n\n angle: f32,\n\n scale: f32,\n", "file_path": "src/shadow/main.rs", "rank": 50, "score": 14.893562078988973 }, { "content": "// Copyright 2014 The Gfx-rs Developers.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nextern crate cgmath;\n\n#[macro_use]\n\nextern crate gfx;\n\nextern crate gfx_window_glfw;\n\nextern crate glfw;\n\nextern crate time;\n", "file_path": "src/performance/main.rs", "rank": 51, "score": 14.794190944677432 }, { "content": " }).collect();\n\n entities.push({\n\n let (mesh, slice) = create_plane(factory, 7);\n\n make_entity(false, &mesh, &slice,\n\n &program_forward, &program_shadow,\n\n lights.len(), &light_buf, &shadow_param,\n\n cgmath::Matrix4::identity())\n\n });\n\n\n\n // create camera\n\n let camera = Camera {\n\n mx_view: cgmath::Matrix4::look_at(\n\n &cgmath::Point3::new(3.0f32, -10.0, 6.0),\n\n &cgmath::Point3::new(0f32, 0.0, 0.0),\n\n &cgmath::Vector3::unit_z(),\n\n ),\n\n projection: cgmath::PerspectiveFov {\n\n fovy: cgmath::deg(45.0f32),\n\n aspect: 1.0,\n\n near: near,\n", "file_path": "src/shadow/main.rs", "rank": 52, "score": 14.574711201566485 }, { "content": "}\n\n\n\n// The shader_param attribute makes sure the following struct can be used to\n\n// pass parameters to a shader.\n\ngfx_parameters!( Params {\n\n u_Transform@ transform: [[f32; 4]; 4],\n\n t_Color@ color: gfx::shade::TextureParam<R>,\n\n});\n\n\n\n\n\n//----------------------------------------\n\n\n", "file_path": "src/cube/main.rs", "rank": 53, "score": 14.076459362824975 }, { "content": " near: near,\n\n far: far,\n\n }.to_perspective(),\n\n color: desc.color.clone(),\n\n stream: factory.create_stream(\n\n gfx::Plane::Texture(\n\n shadow_array.clone(),\n\n 0,\n\n Some(i as gfx::Layer)\n\n ),\n\n ),\n\n }).collect();\n\n\n\n // init light parameters\n\n let light_params: Vec<_> = lights.iter().map(|light| LightParam {\n\n pos: [light.position.x, light.position.y, light.position.z, 1.0],\n\n color: light.color,\n\n proj: {\n\n use cgmath::{FixedArray, Matrix, Matrix4};\n\n let mx_proj: Matrix4<_> = light.projection.into();\n", "file_path": "src/shadow/main.rs", "rank": 54, "score": 13.995070437918574 }, { "content": "// Copyright 2014 The Gfx-rs Developers.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n#[macro_use]\n\nextern crate gfx;\n\nextern crate gfx_window_glutin;\n\nextern crate glutin;\n\n\n\nextern crate image;\n", "file_path": "src/flowmap/main.rs", "rank": 55, "score": 13.773531552794623 }, { "content": " let mx_proj: cgmath::Matrix4<_> = proj.into();\n\n mx_proj.mul_m(&scene.camera.mx_view)\n\n };\n\n\n\n for ent in scene.entities.write().unwrap().iter_mut() {\n\n let batch = &mut ent.batch_forward;\n\n batch.params.transform = mx_vp.mul_m(&ent.mx_to_world).into_fixed();\n\n batch.params.model_transform = ent.mx_to_world.into_fixed();\n\n stream.draw(batch).unwrap();\n\n }\n\n\n\n // done\n\n stream.present(&mut device);\n\n num_frames += 1.0;\n\n }\n\n\n\n let time_end = precise_time_s();\n\n println!(\"Avg frame time: {} ms\",\n\n (time_end - time_start) * 1000.0 / num_frames\n\n );\n\n}\n", "file_path": "src/shadow/main.rs", "rank": 56, "score": 13.546148857860306 }, { "content": " &Vector3::unit_z(),\n\n );\n\n let aspect = stream.get_aspect_ratio();\n\n let proj = cgmath::perspective(cgmath::deg(45.0f32), aspect, 1.0, 10.0);\n\n\n\n let batch = gfx::batch::Core::new(mesh, program).unwrap();;\n\n\n\n while !stream.out.window.should_close() {\n\n glfw.poll_events();\n\n for (_, event) in glfw::flush_messages(&events) {\n\n match event {\n\n glfw::WindowEvent::Key(glfw::Key::Escape, _, glfw::Action::Press, _) =>\n\n stream.out.window.set_should_close(true),\n\n _ => {},\n\n }\n\n }\n\n\n\n let start = precise_time_s() * 1000.;\n\n stream.clear(gfx::ClearData {\n\n color: [0.3, 0.3, 0.3, 1.0],\n", "file_path": "src/performance/main.rs", "rank": 57, "score": 13.509010870287975 }, { "content": " depth: 1.0,\n\n stencil: 0,\n\n });\n\n\n\n for x in (-dimension) ..dimension {\n\n for y in (-dimension) ..dimension {\n\n let mut model = Matrix4::from(Matrix3::identity().mul_s(0.01f32));\n\n model.w = Vector4::new(x as f32 * 0.05,\n\n 0f32,\n\n y as f32 * 0.05,\n\n 1f32);\n\n\n\n let data = Params {\n\n transform: proj.mul_m(&view.mat)\n\n .mul_m(&model).into_fixed(),\n\n _r: std::marker::PhantomData,\n\n };\n\n stream.draw(&batch.with(&slice, &data, &state))\n\n .unwrap();\n\n }\n", "file_path": "src/performance/main.rs", "rank": 58, "score": 12.98503827243682 }, { "content": " color: [0.5, 1.0, 0.5, 1.0],\n\n fov: 60.0,\n\n },\n\n LightDesc {\n\n pos: cgmath::Point3::new(-5.0, 7.0, 10.0),\n\n color: [1.0, 0.5, 0.5, 1.0],\n\n fov: 45.0,\n\n },\n\n ];\n\n\n\n let lights: Vec<_> = light_descs.iter().enumerate().map(|(i, desc)| Light {\n\n position: desc.pos.clone(),\n\n mx_view: cgmath::Matrix4::look_at(\n\n &desc.pos,\n\n &cgmath::Point3::new(0.0, 0.0, 0.0),\n\n &cgmath::Vector3::unit_z(),\n\n ),\n\n projection: cgmath::PerspectiveFov {\n\n fovy: cgmath::deg(desc.fov),\n\n aspect: 1.0,\n", "file_path": "src/shadow/main.rs", "rank": 59, "score": 12.978236312752337 }, { "content": "// Copyright 2014 The Gfx-rs Developers.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nextern crate cgmath;\n\n#[macro_use]\n\nextern crate gfx;\n\nextern crate gfx_window_glfw;\n\nextern crate glfw;\n\n\n", "file_path": "src/cube/main.rs", "rank": 60, "score": 12.907583732790705 }, { "content": " light.stream.clear(gfx::ClearData {\n\n color: [0.0; 4],\n\n depth: 1.0,\n\n stencil: 0,\n\n });\n\n // fill\n\n for ent in entities.read().unwrap().iter() {\n\n let mut batch = ent.batch_shadow.clone();\n\n batch.params.transform = {\n\n let mx_proj: cgmath::Matrix4<_> = light.projection.into();\n\n let mx_view = mx_proj.mul_m(&light.mx_view);\n\n let mvp = mx_view.mul_m(&ent.mx_to_world);\n\n mvp.into_fixed()\n\n };\n\n light.stream.draw(&batch).unwrap();\n\n }\n\n sender.send(light).unwrap();\n\n })\n\n }).collect();\n\n // wait for the results and execute them\n", "file_path": "src/shadow/main.rs", "rank": 61, "score": 12.14354901616386 }, { "content": " #version 150 core\n\n\n\n uniform mat4 u_Model;\n\n uniform mat4 u_View;\n\n uniform mat4 u_Proj;\n\n in vec3 a_Pos;\n\n in vec3 a_Normal;\n\n in vec3 a_Color;\n\n out vec3 v_FragPos;\n\n out vec3 v_Normal;\n\n out vec3 v_Color;\n\n\n\n void main() {\n\n v_FragPos = (u_Model * vec4(a_Pos, 1.0)).xyz;\n\n v_Normal = a_Normal;\n\n v_Color = a_Color;\n\n gl_Position = u_Proj * u_View * u_Model * vec4(a_Pos, 1.0);\n\n }\n\n\";\n\n\n", "file_path": "src/deferred/main.rs", "rank": 62, "score": 12.089526276097931 }, { "content": " 1f32);\n\n\n\n let mat = proj.mul_m(&view.mat).mul_m(&model);\n\n\n\n unsafe {\n\n gl.UniformMatrix4fv(trans_uniform,\n\n 1,\n\n gl::FALSE,\n\n mat.x.ptr());\n\n gl.DrawArrays(gl::TRIANGLES, 0, 3);\n\n }\n\n\n\n }\n\n }\n\n\n\n let submit = precise_time_s() * 1000.;\n\n\n\n // Swap buffers\n\n window.swap_buffers();\n\n let swap = precise_time_s() * 1000.;\n", "file_path": "src/performance/main.rs", "rank": 63, "score": 11.909933711363637 }, { "content": " // put the lights back into the scene\n\n for _ in 0..num {\n\n let mut light = receiver.recv().unwrap();\n\n light.stream.flush(&mut device);\n\n scene.lights.push(light);\n\n }\n\n } else {\n\n for light in scene.lights.iter_mut() {\n\n // clear\n\n light.stream.clear(gfx::ClearData {\n\n color: [0.0; 4],\n\n depth: 1.0,\n\n stencil: 0,\n\n });\n\n // fill\n\n for ent in scene.entities.read().unwrap().iter() {\n\n let mut batch = ent.batch_shadow.clone();\n\n batch.params.transform = {\n\n let mx_proj: cgmath::Matrix4<_> = light.projection.into();\n\n let mx_view = mx_proj.mul_m(&light.mx_view);\n", "file_path": "src/shadow/main.rs", "rank": 64, "score": 11.411772020274503 }, { "content": " u_Radius@ radius: f32,\n\n u_CameraPos@ cam_pos: [f32; 3],\n\n u_FrameRes@ frame_res: [f32; 2],\n\n u_TexPos@ tex_pos: gfx::shade::TextureParam<R>,\n\n u_TexNormal@ tex_normal: gfx::shade::TextureParam<R>,\n\n u_TexDiffuse@ tex_diffuse: gfx::shade::TextureParam<R>,\n\n});\n\n\n\ngfx_parameters!( EmitterParams {\n\n u_Transform@ transform: [[f32; 4]; 4],\n\n u_LightPosBlock@ light_pos_buf: gfx::handle::RawBuffer<R>,\n\n u_Radius@ radius: f32,\n\n});\n\n\n\ngfx_parameters!( BlitParams {\n\n u_Tex@ tex: gfx::shade::TextureParam<R>,\n\n});\n\n\n\n\n\nstatic TERRAIN_VERTEX_SRC: &'static [u8] = b\"\n", "file_path": "src/deferred/main.rs", "rank": 65, "score": 10.926230415019171 }, { "content": " // since we sample our diffuse texture twice we need to lerp between\n\n // them to get a smooth transition (shouldn't even be noticable).\n\n\n\n // they start half a cycle apart (0.5) and is later used to calculate\n\n // the interpolation amount via `2.0 * abs(cycle0 - .5f)`\n\n cycle0 += 0.0025f32;\n\n if cycle0 > 1f32 {\n\n cycle0 -= 1f32;\n\n }\n\n\n\n cycle1 += 0.0025f32;\n\n if cycle1 > 1f32 {\n\n cycle1 -= 1f32;\n\n }\n\n\n\n batch.params.offset0 = cycle0;\n\n batch.params.offset1 = cycle1;\n\n\n\n stream.clear(gfx::ClearData {\n\n color: [0.3, 0.3, 0.3, 1.0],\n\n depth: 1.0,\n\n stencil: 0,\n\n });\n\n\n\n stream.draw(&batch).unwrap();\n\n stream.present(&mut device);\n\n }\n\n}\n", "file_path": "src/flowmap/main.rs", "rank": 66, "score": 10.751538919993845 }, { "content": " .with_gl(glutin::GL_CORE)\n\n .with_depth_buffer(24)\n\n .build().unwrap()\n\n );\n\n let _ = stream.out.set_gamma(gfx::Gamma::Convert); // enable srgb\n\n\n\n let mut scene = create_scene(&device, &mut factory);\n\n let mut last_mouse: (i32, i32) = (0, 0);\n\n let time_start = precise_time_s();\n\n let mut num_frames = 0f64;\n\n\n\n 'main: loop {\n\n // process events\n\n for event in stream.out.window.poll_events() {\n\n use glutin::{Event, VirtualKeyCode};\n\n match event {\n\n Event::KeyboardInput(_, _, Some(VirtualKeyCode::Escape)) => break 'main,\n\n Event::MouseMoved(cur) => if cur != last_mouse {\n\n let axis = cgmath::vec3(\n\n (cur.0 - last_mouse.0) as f32,\n", "file_path": "src/shadow/main.rs", "rank": 67, "score": 10.66808116517632 }, { "content": " let a_pos = CString::new(\"a_Pos\").unwrap();\n\n gl.BindFragDataLocation(program, 0, a_pos.as_bytes_with_nul().as_ptr() as *const i8);\n\n\n\n let pos_attr = gl.GetAttribLocation(program, a_pos.as_ptr());\n\n gl.EnableVertexAttribArray(pos_attr as GLuint);\n\n gl.VertexAttribPointer(pos_attr as GLuint, 3, gl::BYTE,\n\n gl::FALSE as GLboolean, 0, ptr::null());\n\n\n\n\n\n let u_transform = CString::new(\"u_Transform\").unwrap();\n\n gl.GetUniformLocation(program, u_transform.as_bytes_with_nul().as_ptr() as *const i8)\n\n };\n\n\n\n let (w, h) = window.get_framebuffer_size();\n\n let view: AffineMatrix3<f32> = Transform::look_at(\n\n &Point3::new(0f32, -5.0, 0.0),\n\n &Point3::new(0f32, 0.0, 0.0),\n\n &Vector3::unit_z(),\n\n );\n\n let aspect = w as f32 / h as f32;\n", "file_path": "src/performance/main.rs", "rank": 68, "score": 10.31790713629625 }, { "content": "\n\ngfx_vertex!( Vertex {\n\n a_Pos@ pos: [Floater<i8>; 3],\n\n});\n\n\n\ngfx_parameters!( Params {\n\n u_Transform@ transform: [[f32; 4]; 4],\n\n});\n\n\n\nstatic VERTEX_SRC: &'static [u8] = b\"\n\n #version 150 core\n\n in vec3 a_Pos;\n\n uniform mat4 u_Transform;\n\n\n\n void main() {\n\n gl_Position = u_Transform * vec4(a_Pos, 1.0);\n\n }\n\n\";\n\n\n\nstatic FRAGMENT_SRC: &'static [u8] = b\"\n", "file_path": "src/performance/main.rs", "rank": 69, "score": 9.92684667020814 }, { "content": "\n\nuse std::sync::{Arc, RwLock};\n\nuse gfx::attrib::Floater;\n\nuse gfx::traits::*;\n\n\n\n// Section-1: vertex formats and shader parameters\n\n\n\ngfx_vertex!( Vertex {\n\n a_Pos@ pos: [Floater<i8>; 3],\n\n a_Normal@ normal: [Floater<i8>; 3],\n\n});\n\n\n\nimpl Vertex {\n\n fn new(p: [i8; 3], n: [i8; 3]) -> Vertex {\n\n Vertex {\n\n pos: Floater::cast3(p),\n\n normal: Floater::cast3(n),\n\n }\n\n }\n\n}\n", "file_path": "src/shadow/main.rs", "rank": 70, "score": 9.487059974077528 }, { "content": "\n\n let mesh = factory.create_mesh(&vertex_data);\n\n let slice = index_data.to_slice(&mut factory, gfx::PrimitiveType::TriangleList);\n\n\n\n let state = gfx::DrawState::new()\n\n .depth(gfx::state::Comparison::LessEqual, false)\n\n .blend(gfx::BlendPreset::Add);\n\n\n\n let light_data = LightParams {\n\n transform: Matrix4::identity().into_fixed(),\n\n light_pos_buf: light_pos_buffer.raw().clone(),\n\n radius: 3.0,\n\n cam_pos: Vector3::new(0.0, 0.0, 0.0).into_fixed(),\n\n frame_res: [w as f32, h as f32],\n\n tex_pos: (texture_pos.clone(), Some(sampler.clone())),\n\n tex_normal: (texture_normal.clone(), Some(sampler.clone())),\n\n tex_diffuse: (texture_diffuse.clone(), Some(sampler.clone())),\n\n _r: PhantomData,\n\n };\n\n\n", "file_path": "src/deferred/main.rs", "rank": 71, "score": 9.186243509429419 }, { "content": " let mvp = mx_view.mul_m(&ent.mx_to_world);\n\n mvp.into_fixed()\n\n };\n\n light.stream.draw(&batch).unwrap();\n\n }\n\n // submit\n\n light.stream.flush(&mut device);\n\n }\n\n }\n\n\n\n // draw entities with forward pass\n\n stream.clear(gfx::ClearData {\n\n color: [0.1, 0.2, 0.3, 1.0],\n\n depth: 1.0,\n\n stencil: 0,\n\n });\n\n\n\n let mx_vp = {\n\n let mut proj = scene.camera.projection;\n\n proj.aspect = stream.get_aspect_ratio();\n", "file_path": "src/shadow/main.rs", "rank": 72, "score": 8.431895776678212 }, { "content": " gl.GenBuffers(1, &mut vbo);\n\n gl.BindBuffer(gl::ARRAY_BUFFER, vbo);\n\n\n\n let vertex_data = vec![\n\n Vertex { pos: Floater::cast3([-1, 1, -1]) },\n\n Vertex { pos: Floater::cast3([ 1, 1, -1]) },\n\n Vertex { pos: Floater::cast3([ 1, 1, 1]) },\n\n ];\n\n\n\n gl.BufferData(gl::ARRAY_BUFFER,\n\n (vertex_data.len() * mem::size_of::<Vertex>()) as GLsizeiptr,\n\n mem::transmute(&vertex_data[0]),\n\n gl::STATIC_DRAW);\n\n\n\n // Use shader program\n\n gl.UseProgram(program);\n\n let o_color = CString::new(\"o_Color\").unwrap();\n\n gl.BindFragDataLocation(program, 0, o_color.as_bytes_with_nul().as_ptr() as *const i8);\n\n\n\n // Specify the layout of the vertex data\n", "file_path": "src/performance/main.rs", "rank": 73, "score": 8.363222900874796 }, { "content": " let program = factory.link_program(BLIT_VERTEX_SRC, BLIT_FRAGMENT_SRC)\n\n .unwrap();\n\n let state = gfx::DrawState::new();\n\n\n\n let data = BlitParams {\n\n tex: (texture_pos.clone(), Some(sampler.clone())),\n\n _r: PhantomData,\n\n };\n\n\n\n let mut batch = gfx::batch::Full::new(mesh, program, data).unwrap();\n\n batch.slice = slice;\n\n batch.state = state;\n\n batch\n\n };\n\n\n\n let light_pos_buffer = factory.create_buffer_dynamic::<[f32; 4]>(NUM_LIGHTS,\n\n gfx::BufferRole::Uniform);\n\n\n\n let (mut light, mut emitter) = {\n\n let vertex_data = [\n", "file_path": "src/deferred/main.rs", "rank": 74, "score": 8.262401261828973 }, { "content": " noise: (noise_texture, None),\n\n offset0: 0f32,\n\n offset1: 0.5f32,\n\n _r: std::marker::PhantomData,\n\n };\n\n let mut batch = gfx::batch::Full::new(mesh, program, uniforms).unwrap();\n\n\n\n let mut cycle0 = 0.0f32;\n\n let mut cycle1 = 0.5f32;\n\n\n\n 'main: loop {\n\n // quit when Esc is pressed.\n\n for event in stream.out.window.poll_events() {\n\n match event {\n\n glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) => break 'main,\n\n glutin::Event::Closed => break 'main,\n\n _ => {},\n\n }\n\n }\n\n\n", "file_path": "src/flowmap/main.rs", "rank": 75, "score": 7.815936118957771 }, { "content": " let mut batch = gfx::batch::Full::new(\n\n mesh.clone(), prog_fw.clone(), data).unwrap();\n\n batch.slice = slice.clone();\n\n // forward pass is using depth test + write\n\n batch.state = batch.state.depth(gfx::state::Comparison::LessEqual, true);\n\n batch\n\n },\n\n batch_shadow: {\n\n let data = ShadowParams {\n\n transform: cgmath::Matrix4::identity().into_fixed(),\n\n _r: std::marker::PhantomData,\n\n };\n\n let mut batch = gfx::batch::Full::new(\n\n mesh.clone(), prog_sh.clone(), data).unwrap();\n\n batch.slice = slice.clone();\n\n // shadow pass is also depth testing and writing\n\n batch.state = batch.state.depth(gfx::state::Comparison::LessEqual, true);\n\n // need to offset the shadow depth to prevent self-shadowing\n\n // offset = 2, because we are using bilinear filtering\n\n batch.state.primitive.offset = Some(gfx::state::Offset(2.0, 2));\n\n batch\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/shadow/main.rs", "rank": 76, "score": 7.213768024641647 }, { "content": " glsl_120: Some(include_bytes!(\"triangle_120.glslf\")),\n\n glsl_150: Some(include_bytes!(\"triangle_150.glslf\")),\n\n .. gfx::ShaderSource::empty()\n\n };\n\n factory.link_program_source(vs, fs).unwrap()\n\n };\n\n let state = gfx::DrawState::new();\n\n\n\n 'main: loop {\n\n // quit when Esc is pressed.\n\n for event in stream.out.window.poll_events() {\n\n match event {\n\n glutin::Event::KeyboardInput(_, _, Some(glutin::VirtualKeyCode::Escape)) => break 'main,\n\n glutin::Event::Closed => break 'main,\n\n _ => {},\n\n }\n\n }\n\n\n\n stream.clear(gfx::ClearData {\n\n color: [0.3, 0.3, 0.3, 1.0],\n\n depth: 1.0,\n\n stencil: 0,\n\n });\n\n stream.draw(&gfx::batch::bind(&state, &mesh, slice.clone(), &program, &None))\n\n .unwrap();\n\n stream.present(&mut device);\n\n }\n\n}\n", "file_path": "src/triangle/main.rs", "rank": 77, "score": 6.887657359248239 }, { "content": " batch.slice = slice;\n\n batch.state = state;\n\n batch\n\n };\n\n\n\n (light, emitter)\n\n };\n\n\n\n let clear_data = gfx::ClearData {\n\n color: [0.0, 0.0, 0.0, 1.0],\n\n depth: 1.0,\n\n stencil: 0,\n\n };\n\n\n\n let mut debug_buf: Option<gfx::handle::Texture<_>> = None;\n\n\n\n let mut light_pos_vec: Vec<[f32; 4]> = (0 ..NUM_LIGHTS).map(|_| {\n\n [0.0, 0.0, 0.0, 0.0]\n\n }).collect();\n\n\n", "file_path": "src/deferred/main.rs", "rank": 78, "score": 6.785202464697532 }, { "content": " angle: 210.0,\n\n scale: 0.9,\n\n },\n\n ];\n\n\n\n let mut entities: Vec<_> = cube_descs.iter().map(|desc| {\n\n use cgmath::{EuclideanVector, Rotation3};\n\n let (mesh, slice) = create_cube(factory);\n\n make_entity(true, &mesh, &slice,\n\n &program_forward, &program_shadow,\n\n lights.len(), &light_buf, &shadow_param,\n\n cgmath::Decomposed {\n\n disp: desc.offset.clone(),\n\n rot: cgmath::Quaternion::from_axis_angle(\n\n &desc.offset.normalize(),\n\n cgmath::deg(desc.angle).into(),\n\n ),\n\n scale: desc.scale,\n\n }.into(),\n\n )\n", "file_path": "src/shadow/main.rs", "rank": 79, "score": 6.763854327464043 }, { "content": " let sampler = factory.create_sampler(\n\n gfx::tex::SamplerInfo::new(gfx::tex::FilterMethod::Bilinear,\n\n gfx::tex::WrapMode::Clamp)\n\n );\n\n\n\n let program = {\n\n let vs = gfx::ShaderSource {\n\n glsl_120: Some(include_bytes!(\"cube_120.glslv\")),\n\n glsl_150: Some(include_bytes!(\"cube_150.glslv\")),\n\n .. gfx::ShaderSource::empty()\n\n };\n\n let fs = gfx::ShaderSource {\n\n glsl_120: Some(include_bytes!(\"cube_120.glslf\")),\n\n glsl_150: Some(include_bytes!(\"cube_150.glslf\")),\n\n .. gfx::ShaderSource::empty()\n\n };\n\n factory.link_program_source(vs, fs).unwrap()\n\n };\n\n\n\n let view: AffineMatrix3<f32> = Transform::look_at(\n", "file_path": "src/cube/main.rs", "rank": 80, "score": 6.33582938072981 }, { "content": " let light_program = factory.link_program(LIGHT_VERTEX_SRC, LIGHT_FRAGMENT_SRC)\n\n .unwrap();\n\n let light = {\n\n let mut batch = gfx::batch::Full::new(mesh.clone(), light_program, light_data).unwrap();\n\n batch.slice = slice.clone();\n\n batch.state = state.clone();\n\n batch\n\n };\n\n\n\n let emitter_data = EmitterParams {\n\n transform: Matrix4::identity().into_fixed(),\n\n light_pos_buf: light_pos_buffer.raw().clone(),\n\n radius: 0.2,\n\n _r: PhantomData,\n\n };\n\n\n\n let emitter_program = factory.link_program(EMITTER_VERTEX_SRC, EMITTER_FRAGMENT_SRC)\n\n .unwrap();\n\n let emitter = {\n\n let mut batch = gfx::batch::Full::new(mesh, emitter_program, emitter_data).unwrap();\n", "file_path": "src/deferred/main.rs", "rank": 81, "score": 5.712057194319502 }, { "content": " let flow_texture = load_texture(&mut factory, &include_bytes!(\"image/flow.png\")[..]).unwrap();\n\n let noise_texture = load_texture(&mut factory, &include_bytes!(\"image/noise.png\")[..]).unwrap();\n\n\n\n let program = {\n\n let vs = gfx::ShaderSource {\n\n glsl_120: Some(include_bytes!(\"shader/flowmap_120.glslv\")),\n\n glsl_150: Some(include_bytes!(\"shader/flowmap_150.glslv\")),\n\n .. gfx::ShaderSource::empty()\n\n };\n\n let fs = gfx::ShaderSource {\n\n glsl_120: Some(include_bytes!(\"shader/flowmap_120.glslf\")),\n\n glsl_150: Some(include_bytes!(\"shader/flowmap_150.glslf\")),\n\n .. gfx::ShaderSource::empty()\n\n };\n\n factory.link_program_source(vs, fs).unwrap()\n\n };\n\n\n\n let uniforms = Params {\n\n color: (water_texture, None),\n\n flow: (flow_texture, None),\n", "file_path": "src/flowmap/main.rs", "rank": 82, "score": 5.48162372858285 }, { "content": " };\n\n\n\n let mut batch = gfx::batch::Full::new(mesh, program, data).unwrap();\n\n batch.slice = slice;\n\n batch.state = state;\n\n batch\n\n };\n\n\n\n let mut blit = {\n\n let vertex_data = [\n\n BlitVertex { pos: Floater::cast3([-1, -1, 0]), tex_coord: Floater::cast2([0, 0]) },\n\n BlitVertex { pos: Floater::cast3([ 1, -1, 0]), tex_coord: Floater::cast2([1, 0]) },\n\n BlitVertex { pos: Floater::cast3([ 1, 1, 0]), tex_coord: Floater::cast2([1, 1]) },\n\n BlitVertex { pos: Floater::cast3([-1, -1, 0]), tex_coord: Floater::cast2([0, 0]) },\n\n BlitVertex { pos: Floater::cast3([ 1, 1, 0]), tex_coord: Floater::cast2([1, 1]) },\n\n BlitVertex { pos: Floater::cast3([-1, 1, 0]), tex_coord: Floater::cast2([0, 1]) },\n\n ];\n\n let mesh = factory.create_mesh(&vertex_data);\n\n let slice = mesh.to_slice(gfx::PrimitiveType::TriangleList);\n\n\n", "file_path": "src/deferred/main.rs", "rank": 83, "score": 5.301193533193089 }, { "content": "static TERRAIN_FRAGMENT_SRC: &'static [u8] = b\"\n\n #version 150 core\n\n\n\n in vec3 v_FragPos;\n\n in vec3 v_Normal;\n\n in vec3 v_Color;\n\n out vec4 o_Position;\n\n out vec4 o_Normal;\n\n out vec4 o_Color;\n\n\n\n void main() {\n\n vec3 n = normalize(v_Normal);\n\n\n\n o_Position = vec4(v_FragPos, 0.0);\n\n o_Normal = vec4(n, 0.0);\n\n o_Color = vec4(v_Color, 1.0);\n\n }\n\n\";\n\n\n\nstatic BLIT_VERTEX_SRC: &'static [u8] = b\"\n", "file_path": "src/deferred/main.rs", "rank": 84, "score": 5.300596289947264 }, { "content": " 0.0,\n\n (cur.1 - last_mouse.1) as f32,\n\n );\n\n let len = axis.length();\n\n for ent in scene.entities.write().unwrap().iter_mut() {\n\n if !ent.dynamic {\n\n continue\n\n }\n\n // rotate all cubes around the axis\n\n let rot = cgmath::Decomposed {\n\n disp: cgmath::vec3(0.0, 0.0, 0.0),\n\n rot: cgmath::Quaternion::from_axis_angle(\n\n &axis.mul_s(1.0 / len),\n\n cgmath::deg(len * 0.3).into(),\n\n ),\n\n scale: 1.0,\n\n }.into();\n\n ent.mx_to_world = ent.mx_to_world.mul_m(&rot);\n\n }\n\n last_mouse = cur;\n", "file_path": "src/shadow/main.rs", "rank": 85, "score": 5.094221285844096 }, { "content": " let texture_pos = factory.create_texture(texture_info_float).unwrap();\n\n let texture_normal = factory.create_texture(texture_info_float).unwrap();\n\n let texture_diffuse = factory.create_texture(texture_info_float).unwrap();\n\n let texture_depth = factory.create_texture(texture_info_depth).unwrap();\n\n\n\n let frame = gfx::Frame {\n\n colors: vec![\n\n gfx::Plane::Texture(texture_pos .clone(), 0, None),\n\n gfx::Plane::Texture(texture_normal .clone(), 0, None),\n\n gfx::Plane::Texture(texture_diffuse.clone(), 0, None),\n\n ],\n\n depth: Some(gfx::Plane::Texture(texture_depth .clone(), 0, None)),\n\n .. gfx::Frame::empty(width, height)\n\n };\n\n\n\n (frame, texture_pos, texture_normal, texture_diffuse, texture_depth)\n\n}\n\n\n", "file_path": "src/deferred/main.rs", "rank": 87, "score": 4.287241381426024 }, { "content": "static EMITTER_VERTEX_SRC: &'static [u8] = b\"\n\n #version 150 core\n\n\n\n uniform mat4 u_Transform;\n\n uniform float u_Radius;\n\n in vec3 a_Pos;\n\n\n\n const int NUM_LIGHTS = 250;\n\n layout(std140)\n\n uniform u_LightPosBlock {\n\n vec4 offs[NUM_LIGHTS];\n\n };\n\n\n\n void main() {\n\n gl_Position = u_Transform * vec4(u_Radius * a_Pos + offs[gl_InstanceID].xyz, 1.0);\n\n }\n\n\";\n\n\n\nstatic EMITTER_FRAGMENT_SRC: &'static [u8] = b\"\n\n #version 150 core\n\n\n\n out vec4 o_Color;\n\n\n\n void main() {\n\n o_Color = vec4(1.0, 1.0, 1.0, 1.0);\n\n }\n\n\";\n\n\n", "file_path": "src/deferred/main.rs", "rank": 88, "score": 4.239292746519429 }, { "content": "\n\n println!(\"total time:\\t\\t{0:4.2}ms\", swap - start);\n\n println!(\"\\tsubmit:\\t\\t{0:4.2}ms\", submit - start);\n\n println!(\"\\tgpu wait:\\t{0:4.2}ms\", swap - submit)\n\n\n\n }\n\n\n\n // Cleanup\n\n unsafe {\n\n gl.DeleteProgram(program);\n\n gl.DeleteShader(fs);\n\n gl.DeleteShader(vs);\n\n gl.DeleteBuffers(1, &vbo);\n\n gl.DeleteVertexArrays(1, &vao);\n\n }\n\n}\n\n\n", "file_path": "src/performance/main.rs", "rank": 89, "score": 4.1865786238856035 }, { "content": " match event {\n\n glfw::WindowEvent::Key(glfw::Key::Escape, _, glfw::Action::Press, _) =>\n\n stream.out.window.set_should_close(true),\n\n _ => {},\n\n }\n\n }\n\n\n\n stream.clear(gfx::ClearData {\n\n color: [0.3, 0.3, 0.3, 1.0],\n\n depth: 1.0,\n\n stencil: 0,\n\n });\n\n stream.draw(&batch).unwrap();\n\n stream.present(&mut device);\n\n }\n\n}\n", "file_path": "src/cube/main.rs", "rank": 90, "score": 4.093049436580693 }, { "content": " Vertex::new([-1, -1, -1], [1, 1]),\n\n // right (1, 0, 0)\n\n Vertex::new([ 1, -1, -1], [0, 0]),\n\n Vertex::new([ 1, 1, -1], [1, 0]),\n\n Vertex::new([ 1, 1, 1], [1, 1]),\n\n Vertex::new([ 1, -1, 1], [0, 1]),\n\n // left (-1, 0, 0)\n\n Vertex::new([-1, -1, 1], [1, 0]),\n\n Vertex::new([-1, 1, 1], [0, 0]),\n\n Vertex::new([-1, 1, -1], [0, 1]),\n\n Vertex::new([-1, -1, -1], [1, 1]),\n\n // front (0, 1, 0)\n\n Vertex::new([ 1, 1, -1], [1, 0]),\n\n Vertex::new([-1, 1, -1], [0, 0]),\n\n Vertex::new([-1, 1, 1], [0, 1]),\n\n Vertex::new([ 1, 1, 1], [1, 1]),\n\n // back (0, -1, 0)\n\n Vertex::new([ 1, -1, 1], [0, 0]),\n\n Vertex::new([-1, -1, 1], [1, 0]),\n\n Vertex::new([-1, -1, -1], [1, 1]),\n", "file_path": "src/cube/main.rs", "rank": 91, "score": 4.037127547071414 }, { "content": " Vertex::new([-1, -1, 1], [-1, 0, 0]),\n\n Vertex::new([-1, 1, 1], [-1, 0, 0]),\n\n Vertex::new([-1, 1, -1], [-1, 0, 0]),\n\n Vertex::new([-1, -1, -1], [-1, 0, 0]),\n\n // front (0, 1, 0)\n\n Vertex::new([ 1, 1, -1], [0, 1, 0]),\n\n Vertex::new([-1, 1, -1], [0, 1, 0]),\n\n Vertex::new([-1, 1, 1], [0, 1, 0]),\n\n Vertex::new([ 1, 1, 1], [0, 1, 0]),\n\n // back (0, -1, 0)\n\n Vertex::new([ 1, -1, 1], [0, -1, 0]),\n\n Vertex::new([-1, -1, 1], [0, -1, 0]),\n\n Vertex::new([-1, -1, -1], [0, -1, 0]),\n\n Vertex::new([ 1, -1, -1], [0, -1, 0]),\n\n ];\n\n\n\n let mesh = factory.create_mesh(&vertex_data);\n\n\n\n let index_data: &[u8] = &[\n\n 0, 1, 2, 2, 3, 0, // top\n", "file_path": "src/shadow/main.rs", "rank": 92, "score": 3.982809895984945 }, { "content": " vec4 tex = texture(u_Tex, v_TexCoord);\n\n o_Color = tex;\n\n }\n\n\";\n\n\n\nstatic LIGHT_VERTEX_SRC: &'static [u8] = b\"\n\n #version 150 core\n\n\n\n uniform mat4 u_Transform;\n\n uniform float u_Radius;\n\n in vec3 a_Pos;\n\n out vec3 v_LightPos;\n\n\n\n const int NUM_LIGHTS = 250;\n\n layout(std140)\n\n uniform u_LightPosBlock {\n\n vec4 offs[NUM_LIGHTS];\n\n };\n\n\n\n void main() {\n", "file_path": "src/deferred/main.rs", "rank": 93, "score": 3.981141597202032 }, { "content": " // top (0, 0, 1)\n\n CubeVertex { pos: Floater::cast3([-1, -1, 1]) },\n\n CubeVertex { pos: Floater::cast3([ 1, -1, 1]) },\n\n CubeVertex { pos: Floater::cast3([ 1, 1, 1]) },\n\n CubeVertex { pos: Floater::cast3([-1, 1, 1]) },\n\n // bottom (0, 0, -1)\n\n CubeVertex { pos: Floater::cast3([-1, 1, -1]) },\n\n CubeVertex { pos: Floater::cast3([ 1, 1, -1]) },\n\n CubeVertex { pos: Floater::cast3([ 1, -1, -1]) },\n\n CubeVertex { pos: Floater::cast3([-1, -1, -1]) },\n\n // right (1, 0, 0)\n\n CubeVertex { pos: Floater::cast3([ 1, -1, -1]) },\n\n CubeVertex { pos: Floater::cast3([ 1, 1, -1]) },\n\n CubeVertex { pos: Floater::cast3([ 1, 1, 1]) },\n\n CubeVertex { pos: Floater::cast3([ 1, -1, 1]) },\n\n // left (-1, 0, 0)\n\n CubeVertex { pos: Floater::cast3([-1, -1, 1]) },\n\n CubeVertex { pos: Floater::cast3([-1, 1, 1]) },\n\n CubeVertex { pos: Floater::cast3([-1, 1, -1]) },\n\n CubeVertex { pos: Floater::cast3([-1, -1, -1]) },\n", "file_path": "src/deferred/main.rs", "rank": 94, "score": 3.8256642758255484 }, { "content": "// Copyright 2014 The Gfx-rs Developers.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n\n\n// This is an example of deferred shading with gfx-rs.\n\n//\n\n// Two render targets are created: a geometry buffer and a result buffer.\n\n//\n\n// Rendering happens in two passes:\n", "file_path": "src/deferred/main.rs", "rank": 95, "score": 3.716420804011366 }, { "content": " // front (0, 1, 0)\n\n CubeVertex { pos: Floater::cast3([ 1, 1, -1]) },\n\n CubeVertex { pos: Floater::cast3([-1, 1, -1]) },\n\n CubeVertex { pos: Floater::cast3([-1, 1, 1]) },\n\n CubeVertex { pos: Floater::cast3([ 1, 1, 1]) },\n\n // back (0, -1, 0)\n\n CubeVertex { pos: Floater::cast3([ 1, -1, 1]) },\n\n CubeVertex { pos: Floater::cast3([-1, -1, 1]) },\n\n CubeVertex { pos: Floater::cast3([-1, -1, -1]) },\n\n CubeVertex { pos: Floater::cast3([ 1, -1, -1]) },\n\n ];\n\n\n\n let index_data: &[u8] = &[\n\n 0, 1, 2, 2, 3, 0, // top\n\n 4, 5, 6, 6, 7, 4, // bottom\n\n 8, 9, 10, 10, 11, 8, // right\n\n 12, 13, 14, 14, 15, 12, // left\n\n 16, 17, 18, 18, 19, 16, // front\n\n 20, 21, 22, 22, 23, 20, // back\n\n ];\n", "file_path": "src/deferred/main.rs", "rank": 96, "score": 3.6914426912620613 }, { "content": " }\n\n\n\n let cube_descs = vec![\n\n CubeDesc {\n\n offset: cgmath::vec3(-2.0, -2.0, 2.0),\n\n angle: 10.0,\n\n scale: 0.7,\n\n },\n\n CubeDesc {\n\n offset: cgmath::vec3(2.0, -2.0, 2.0),\n\n angle: 50.0,\n\n scale: 1.3,\n\n },\n\n CubeDesc {\n\n offset: cgmath::vec3(-2.0, 2.0, 2.0),\n\n angle: 140.0,\n\n scale: 1.1,\n\n },\n\n CubeDesc {\n\n offset: cgmath::vec3(2.0, 2.0, 2.0),\n", "file_path": "src/shadow/main.rs", "rank": 97, "score": 3.5028852498002987 }, { "content": " #version 150 core\n\n out vec4 o_Color;\n\n\n\n void main() {\n\n o_Color = vec4(1., 0., 0., 1.);\n\n }\n\n\";\n\n\n\n//----------------------------------------\n\n\n", "file_path": "src/performance/main.rs", "rank": 98, "score": 3.3859751392984236 }, { "content": " }\n\n\n\n let pre_submit = precise_time_s() * 1000.;\n\n stream.flush(&mut device);\n\n let post_submit = precise_time_s() * 1000.;\n\n stream.out.window.swap_buffers();\n\n device.cleanup();\n\n let swap = precise_time_s() * 1000.;\n\n\n\n println!(\"total time:\\t\\t{0:4.2}ms\", swap - start);\n\n println!(\"\\tcreate list:\\t{0:4.2}ms\", pre_submit - start);\n\n println!(\"\\tsubmit:\\t\\t{0:4.2}ms\", post_submit - pre_submit);\n\n println!(\"\\tgpu wait:\\t{0:4.2}ms\", swap - post_submit)\n\n }\n\n}\n\n\n\nstatic VS_SRC: &'static str = \"\n\n #version 150 core\n\n in vec3 a_Pos;\n\n uniform mat4 u_Transform;\n", "file_path": "src/performance/main.rs", "rank": 99, "score": 3.2374134845071003 } ]
Rust
src/search/aggregations/bucket/diversified_sampler_aggregation.rs
ypenglyn/elasticsearch-dsl-rs
bff3508055fb20eb54bde78edffa69da3ccbf4eb
use crate::search::*; use crate::util::*; use std::convert::TryInto; #[derive(Debug, Clone, Serialize, PartialEq)] pub struct DiversifiedSamplerAggregation { diversified_sampler: DiversifiedSamplerAggregationInner, #[serde(skip_serializing_if = "ShouldSkip::should_skip")] aggs: Aggregations, } #[derive(Debug, Clone, Serialize, PartialEq, Copy)] #[serde(rename_all = "snake_case")] pub enum ExecutionHint { Map, BytesHash, GlobalOrdinals, } #[derive(Debug, Clone, Serialize, PartialEq)] struct DiversifiedSamplerAggregationInner { field: String, #[serde(skip_serializing_if = "ShouldSkip::should_skip")] shard_size: Option<u64>, #[serde(skip_serializing_if = "ShouldSkip::should_skip")] max_docs_per_value: Option<u64>, #[serde(skip_serializing_if = "ShouldSkip::should_skip")] execution_hint: Option<ExecutionHint>, } impl Aggregation { pub fn diversified_sampler(field: impl Into<String>) -> DiversifiedSamplerAggregation { DiversifiedSamplerAggregation { diversified_sampler: DiversifiedSamplerAggregationInner { field: field.into(), shard_size: None, max_docs_per_value: None, execution_hint: None, }, aggs: Aggregations::new(), } } } impl DiversifiedSamplerAggregation { pub fn shard_size(mut self, shard_size: impl TryInto<u64>) -> Self { if let Ok(shard_size) = shard_size.try_into() { self.diversified_sampler.shard_size = Some(shard_size); } self } pub fn max_docs_per_value(mut self, max_docs_per_value: impl TryInto<u64>) -> Self { if let Ok(max_docs_per_value) = max_docs_per_value.try_into() { self.diversified_sampler.max_docs_per_value = Some(max_docs_per_value); } self } pub fn execution_hint(mut self, execution_hint: ExecutionHint) -> Self { self.diversified_sampler.execution_hint = Some(execution_hint); self } add_aggregate!(); } #[cfg(test)] mod tests { use super::*; #[test] fn serialization() { assert_serialize( Aggregation::diversified_sampler("catalog_id").shard_size(50), json!({ "diversified_sampler": { "field": "catalog_id", "shard_size": 50 } }), ); assert_serialize( Aggregation::diversified_sampler("catalog_id") .shard_size(50) .max_docs_per_value(2) .execution_hint(ExecutionHint::GlobalOrdinals) .aggregate("catalog", Aggregation::terms("catalog_id")) .aggregate("brand", Aggregation::terms("brand_id")), json!({ "diversified_sampler": { "field": "catalog_id", "shard_size": 50, "max_docs_per_value": 2, "execution_hint": "global_ordinals" }, "aggs": { "catalog": { "terms": { "field": "catalog_id" } }, "brand": { "terms": { "field": "brand_id" } } } }), ); } }
use crate::search::*; use crate::util::*; use std::convert::TryInto; #[derive(Debug, Clone, Serialize, PartialEq)] pub struct DiversifiedSamplerAggregation { diversified_sampler: DiversifiedSamplerAggregationInner, #[serde(skip_serializing_if = "ShouldSkip::should_skip")] aggs: Aggregations, } #[derive(Debug, Clone, Serialize, PartialEq, Copy)] #[serde(rename_all = "snake_case")] pub enum ExecutionHint { Map, BytesHash, GlobalOrdinals, } #[derive(Debug, Clone, Serialize, PartialEq)] struct DiversifiedSamplerAggregationInner { field: String, #[serde(skip_serializing_if = "ShouldSkip::should_skip")] shard_size: Option<u64>, #[serde(skip_serializing_if = "ShouldSkip::should_skip")] max_docs_per_value: Option<u64>, #[serde(skip_serializing_if = "ShouldSkip::should_skip")] execution_hint: Option<ExecutionHint>, } impl Aggregation { pub fn diversified_sampler(field: impl Into<String>) -> DiversifiedSamplerAggregation { DiversifiedSamplerAggregation { diversified_sampler: DiversifiedSamplerAggregationInner { field: field.into(), shard_size: None, max_docs_per_value: None, execution_hint: None, }, aggs: Aggregations::new(), } } } impl DiversifiedSamplerAggregation { pub fn shard_size(mut sel
pub fn max_docs_per_value(mut self, max_docs_per_value: impl TryInto<u64>) -> Self { if let Ok(max_docs_per_value) = max_docs_per_value.try_into() { self.diversified_sampler.max_docs_per_value = Some(max_docs_per_value); } self } pub fn execution_hint(mut self, execution_hint: ExecutionHint) -> Self { self.diversified_sampler.execution_hint = Some(execution_hint); self } add_aggregate!(); } #[cfg(test)] mod tests { use super::*; #[test] fn serialization() { assert_serialize( Aggregation::diversified_sampler("catalog_id").shard_size(50), json!({ "diversified_sampler": { "field": "catalog_id", "shard_size": 50 } }), ); assert_serialize( Aggregation::diversified_sampler("catalog_id") .shard_size(50) .max_docs_per_value(2) .execution_hint(ExecutionHint::GlobalOrdinals) .aggregate("catalog", Aggregation::terms("catalog_id")) .aggregate("brand", Aggregation::terms("brand_id")), json!({ "diversified_sampler": { "field": "catalog_id", "shard_size": 50, "max_docs_per_value": 2, "execution_hint": "global_ordinals" }, "aggs": { "catalog": { "terms": { "field": "catalog_id" } }, "brand": { "terms": { "field": "brand_id" } } } }), ); } }
f, shard_size: impl TryInto<u64>) -> Self { if let Ok(shard_size) = shard_size.try_into() { self.diversified_sampler.shard_size = Some(shard_size); } self }
function_block-function_prefixed
[ { "content": "#[doc(hidden)]\n\npub trait Origin: Debug + PartialEq + Serialize + Clone {\n\n type Scale: Debug + PartialEq + Serialize + Clone;\n\n type Offset: Debug + PartialEq + Serialize + Clone;\n\n}\n\n\n\nimpl Origin for DateTime<Utc> {\n\n type Scale = Time;\n\n type Offset = Time;\n\n}\n\n\n\nimpl Origin for GeoPoint {\n\n type Scale = Distance;\n\n type Offset = Distance;\n\n}\n\n\n\nmacro_rules! impl_origin_for_numbers {\n\n ($($name:ident ),+) => {\n\n $(\n\n impl Origin for $name {\n\n type Scale = Self;\n", "file_path": "src/search/queries/params/function_score_query.rs", "rank": 0, "score": 174394.90719398452 }, { "content": "#[doc(hidden)]\n\npub trait Origin: Debug + PartialEq + Serialize + Clone {\n\n type Pivot: Debug + PartialEq + Serialize + Clone;\n\n}\n\n\n\nimpl Origin for DateTime<Utc> {\n\n type Pivot = Time;\n\n}\n\n\n\nimpl Origin for GeoPoint {\n\n type Pivot = Distance;\n\n}\n\n\n\n/// Boosts the [relevance score](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-filter-context.html#relevance-scores)\n\n/// of documents closer to a provided `origin` date or point.\n\n/// For example, you can use this query to give more weight to documents\n\n/// closer to a certain date or location.\n\n///\n\n/// You can use the `distance_feature` query to find the nearest neighbors to a location.\n\n/// You can also use the query in a [bool](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-bool-query.html)\n\n/// search’s `should` filter to add boosted relevance scores to the `bool` query’s scores.\n", "file_path": "src/search/queries/specialized/distance_feature_query.rs", "rank": 1, "score": 174394.90719398452 }, { "content": "fn main() {\n\n let search = Search::new()\n\n .size(0)\n\n .query(Query::bool().must_not(Query::exists(\"country_id\")))\n\n .aggregate(\n\n \"country_ids\",\n\n Aggregation::terms(\"country_id\")\n\n .aggregate(\"catalog_ids\", Aggregation::terms(\"catalog_id\"))\n\n .aggregate(\"company_ids\", Aggregation::terms(\"company_id\"))\n\n .aggregate(\n\n \"top1\",\n\n Aggregation::top_hits()\n\n .size(1)\n\n .sort(Sort::new(SortField::Id).order(SortOrder::Desc)),\n\n ),\n\n );\n\n\n\n println!(\"{}\", serde_json::to_string_pretty(&search).unwrap());\n\n}\n", "file_path": "examples/aggregations/src/main.rs", "rank": 2, "score": 118970.19425156037 }, { "content": "#[derive(Debug, Clone, Serialize, PartialEq)]\n\nstruct MaxAggregationInner {\n\n field: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n missing: Option<Number>,\n\n}\n\n\n\nimpl Aggregation {\n\n /// Creates an instance of [`MaxAggregation`]\n\n ///\n\n /// - `field` - field to aggregate\n\n pub fn max(field: impl Into<String>) -> MaxAggregation {\n\n MaxAggregation {\n\n max: MaxAggregationInner {\n\n field: field.into(),\n\n missing: None,\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/search/aggregations/metrics/max_aggregation.rs", "rank": 3, "score": 113169.53016760357 }, { "content": "#[derive(Debug, Clone, Serialize, PartialEq)]\n\nstruct RateAggregationInner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n field: Option<String>,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n unit: Option<CalendarInterval>,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n mode: Option<RateMode>,\n\n}\n\n\n\nimpl Aggregation {\n\n /// Creates an instance of [`RateAggregation`]\n\n pub fn rate() -> RateAggregation {\n\n RateAggregation {\n\n rate: RateAggregationInner {\n\n field: None,\n\n unit: None,\n\n mode: None,\n\n },\n\n }\n\n }\n", "file_path": "src/search/aggregations/metrics/rate_aggregation.rs", "rank": 4, "score": 113169.53016760357 }, { "content": "#[derive(Debug, Clone, Serialize, PartialEq)]\n\nstruct SumAggregationInner {\n\n field: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n missing: Option<Number>,\n\n}\n\n\n\nimpl Aggregation {\n\n /// Creates an instance of [`SumAggregation`]\n\n ///\n\n /// - `field` - field to aggregate\n\n pub fn sum(field: impl Into<String>) -> SumAggregation {\n\n SumAggregation {\n\n sum: SumAggregationInner {\n\n field: field.into(),\n\n missing: None,\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/search/aggregations/metrics/sum_aggregation.rs", "rank": 5, "score": 113169.53016760357 }, { "content": "#[derive(Debug, Clone, Serialize, PartialEq)]\n\nstruct BoxplotAggregationInner {\n\n field: String,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n compression: Option<Number>,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n missing: Option<Number>,\n\n}\n\n\n\nimpl Aggregation {\n\n /// Creates an instance of [`BoxplotAggregation`]\n\n ///\n\n /// - `field` - field to aggregate\n\n pub fn boxplot(field: impl Into<String>) -> BoxplotAggregation {\n\n BoxplotAggregation {\n\n boxplot: BoxplotAggregationInner {\n\n field: field.into(),\n\n compression: None,\n\n missing: None,\n\n },\n\n }\n", "file_path": "src/search/aggregations/metrics/boxplot_aggregation.rs", "rank": 6, "score": 113169.53016760357 }, { "content": "#[derive(Debug, Clone, Serialize, PartialEq)]\n\nstruct AvgAggregationInner {\n\n field: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n missing: Option<Number>,\n\n}\n\n\n\nimpl Aggregation {\n\n /// Creates an instance of [`AvgAggregation`]\n\n ///\n\n /// - `field` - field to aggregate\n\n pub fn avg(field: impl Into<String>) -> AvgAggregation {\n\n AvgAggregation {\n\n avg: AvgAggregationInner {\n\n field: field.into(),\n\n missing: None,\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/search/aggregations/metrics/avg_aggregation.rs", "rank": 7, "score": 113169.53016760357 }, { "content": "#[derive(Debug, Clone, Serialize, PartialEq)]\n\nstruct TermsAggregationInner {\n\n field: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n size: Option<u64>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n show_term_doc_count_error: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n order: Vec<TermsAggregationOrder>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n min_doc_count: Option<u16>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n missing: Option<Term>,\n\n}\n\n\n\n/// Terms Aggregation sorting struct\n", "file_path": "src/search/aggregations/bucket/terms_aggregation.rs", "rank": 8, "score": 113169.53016760357 }, { "content": "#[derive(Debug, Clone, Serialize, PartialEq)]\n\nstruct MinAggregationInner {\n\n field: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n missing: Option<Number>,\n\n}\n\n\n\nimpl Aggregation {\n\n /// Creates an instance of [`MinAggregation`]\n\n ///\n\n /// - `field` - field to aggregate\n\n pub fn min(field: impl Into<String>) -> MinAggregation {\n\n MinAggregation {\n\n min: MinAggregationInner {\n\n field: field.into(),\n\n missing: None,\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/search/aggregations/metrics/min_aggregation.rs", "rank": 9, "score": 113169.53016760357 }, { "content": "#[derive(Debug, Clone, Serialize, PartialEq)]\n\nstruct CardinalityAggregationInner {\n\n field: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n precision_threshold: Option<u16>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n missing: Option<String>,\n\n}\n\n\n\nimpl Aggregation {\n\n /// Creates an instance of [`CardinalityAggregation`]\n\n ///\n\n /// - `field` - field to aggregate\n\n pub fn cardinality(field: impl Into<String>) -> CardinalityAggregation {\n\n CardinalityAggregation {\n\n cardinality: CardinalityAggregationInner {\n\n field: field.into(),\n\n precision_threshold: None,\n\n missing: None,\n", "file_path": "src/search/aggregations/metrics/cardinality_aggregation.rs", "rank": 10, "score": 113169.53016760357 }, { "content": "#[derive(Debug, Clone, Serialize, PartialEq)]\n\nstruct SamplerAggregationInner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n shard_size: Option<u64>,\n\n}\n\n\n\nimpl Aggregation {\n\n /// Creates an instance of [`SamplerAggregation`]\n\n pub fn sampler() -> SamplerAggregation {\n\n SamplerAggregation {\n\n sampler: SamplerAggregationInner { shard_size: None },\n\n aggs: Aggregations::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl SamplerAggregation {\n\n /// The shard_size parameter limits how many top-scoring documents are\n\n /// collected in the sample processed on each shard. The default value is 100.\n\n pub fn shard_size(mut self, shard_size: impl TryInto<u64>) -> Self {\n\n if let Ok(shard_size) = shard_size.try_into() {\n", "file_path": "src/search/aggregations/bucket/sampler_aggregation.rs", "rank": 11, "score": 113169.53016760357 }, { "content": "#[derive(Debug, Clone, Serialize, PartialEq)]\n\nstruct TopHitsAggregationInner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _source: Option<SourceFilter>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n from: Option<u64>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n size: Option<u64>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n sort: Vec<Sort>,\n\n}\n\n\n\nimpl Aggregation {\n\n /// Creates an instance of [`TopHitsAggregation`]\n\n pub fn top_hits() -> TopHitsAggregation {\n\n TopHitsAggregation {\n\n top_hits: TopHitsAggregationInner {\n\n _source: None,\n", "file_path": "src/search/aggregations/metrics/top_hits_aggregation.rs", "rank": 13, "score": 110915.6599220623 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Default)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`MatchNoneQuery`]\n\n pub fn match_none() -> MatchNoneQuery {\n\n MatchNoneQuery::default()\n\n }\n\n}\n\n\n\nimpl MatchNoneQuery {\n\n add_boost_and_name!();\n\n}\n\n\n\nimpl ShouldSkip for MatchNoneQuery {}\n", "file_path": "src/search/queries/match_none_query.rs", "rank": 14, "score": 102511.03750736034 }, { "content": "#[derive(Debug, Clone, PartialEq, PartialOrd, Serialize)]\n\nstruct RuntimeScript {\n\n source: String,\n\n}\n\n\n\nimpl RuntimeMapping {\n\n /// Creates a new instance of [RuntimeMapping]\n\n pub fn new<T>(r#type: RuntimeDataType, source: T) -> Self\n\n where\n\n T: ToString,\n\n {\n\n RuntimeMapping {\n\n r#type,\n\n script: RuntimeScript {\n\n source: source.to_string(),\n\n },\n\n }\n\n }\n\n\n\n /// Creates a new instance of [RuntimeDataType::Boolean] [RuntimeMapping]\n\n pub fn boolean<T>(source: T) -> Self\n", "file_path": "src/search/runtime_mappings/mod.rs", "rank": 15, "score": 102433.34515664619 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Default)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n fields: Vec<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n query: Text,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n auto_generate_synonyms_phrase_query: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n operator: Option<Operator>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n minimum_should_match: Option<MinimumShouldMatch>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n zero_terms_query: Option<ZeroTermsQuery>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n", "file_path": "src/search/queries/full_text/combined_fields_query.rs", "rank": 16, "score": 99431.22161954312 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Default)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n query: Text,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n default_field: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n allow_leading_wildcard: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n analyze_wildcard: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n analyzer: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n auto_generate_synonyms_phrase_query: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n", "file_path": "src/search/queries/full_text/query_string_query.rs", "rank": 17, "score": 99224.64159055777 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Default)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n query: Text,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n fields: Vec<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n default_operator: Option<Operator>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n analyze_wildcard: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n analyzer: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n auto_generate_synonyms_phrase_query: Option<bool>,\n\n\n\n #[serde(\n", "file_path": "src/search/queries/full_text/simple_query_string_query.rs", "rank": 18, "score": 97793.44202324492 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct FieldValueFactorInner {\n\n field: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n factor: Option<f32>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n modifier: Option<FieldValueFactorModifier>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n missing: Option<f32>,\n\n}\n\n\n\nimpl FieldValueFactor {\n\n /// Creates an instance of [FieldValueFactor](FieldValueFactor)\n\n ///\n\n /// - `field` - Field to be extracted from the document.\n\n pub fn new(field: impl Into<String>) -> Self {\n\n Self {\n\n field_value_factor: FieldValueFactorInner {\n", "file_path": "src/search/queries/params/function_score_query.rs", "rank": 19, "score": 96609.74654589735 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nstruct DecayFieldInner<T: Origin> {\n\n field: String,\n\n inner: DecayInner<T>,\n\n}\n\n\n", "file_path": "src/search/queries/params/function_score_query.rs", "rank": 20, "score": 92846.24987496296 }, { "content": "#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n\n#[serde(untagged)]\n\nenum N {\n\n /// Non-negative integers\n\n Pos(u64),\n\n\n\n /// Negative integers\n\n Neg(i64),\n\n\n\n /// 32-bit floats\n\n F32(f32),\n\n\n\n /// 64-bit floats\n\n F64(f64),\n\n}\n\n\n\nimpl std::fmt::Display for Number {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self.0 {\n\n N::Pos(value) => value.fmt(f),\n\n N::Neg(value) => value.fmt(f),\n\n N::F32(value) => value.fmt(f),\n", "file_path": "src/search/params/number.rs", "rank": 21, "score": 73799.76345961947 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize)]\n\n#[serde(untagged)]\n\nenum Inner {\n\n /// Boolean value\n\n Bool(bool),\n\n\n\n /// String value\n\n String(String),\n\n\n\n /// Number value\n\n Number(Number),\n\n\n\n /// Date\n\n Date(Date),\n\n}\n\n\n\nimpl From<bool> for Term {\n\n fn from(value: bool) -> Self {\n\n Self(Some(Inner::Bool(value)))\n\n }\n\n}\n\n\n", "file_path": "src/search/params/term.rs", "rank": 22, "score": 73794.57591346189 }, { "content": "#[derive(Debug, Clone, Copy, Serialize)]\n\n#[serde(untagged)]\n\nenum Inner {\n\n U64(u64),\n\n F32(f32),\n\n F64(f64),\n\n}\n\n\n\nimpl PartialEq for Inner {\n\n fn eq(&self, other: &Self) -> bool {\n\n match (self, other) {\n\n (Self::U64(value), Self::U64(other)) => value.eq(other),\n\n (Self::U64(value), Self::F32(other)) => (*value as f32).eq(other),\n\n (Self::U64(value), Self::F64(other)) => (*value as f64).eq(other),\n\n (Self::F32(value), Self::U64(other)) => value.eq(&(*other as f32)),\n\n (Self::F32(value), Self::F32(other)) => value.eq(other),\n\n (Self::F32(value), Self::F64(other)) => value.eq(&(*other as f32)),\n\n (Self::F64(value), Self::U64(other)) => value.eq(&(*other as f64)),\n\n (Self::F64(value), Self::F32(other)) => (*value as f32).eq(other),\n\n (Self::F64(value), Self::F64(other)) => value.eq(other),\n\n }\n\n }\n", "file_path": "src/search/queries/params/boost.rs", "rank": 23, "score": 72967.0954717364 }, { "content": "fn main() {\n\n let search = Search::new()\n\n .source(false)\n\n .from(0)\n\n .size(10)\n\n .stats(\"boolean-query\")\n\n .query(\n\n Query::bool()\n\n .must(Query::term(\"user.id\", \"kimchy\"))\n\n .filter(Query::term(\"tags\", \"production\"))\n\n .must_not(Query::range(\"age\").gte(10).lte(10))\n\n .should([Query::term(\"tags\", \"env1\"), Query::term(\"tags\", \"deployed\")])\n\n .minimum_should_match(\"1\")\n\n .boost(1),\n\n );\n\n\n\n println!(\"{}\", serde_json::to_string_pretty(&search).unwrap());\n\n}\n", "file_path": "examples/boolean-query/src/main.rs", "rank": 24, "score": 72875.7366178295 }, { "content": "fn main() {\n\n let search = Search::new()\n\n .size(10)\n\n .query(Query::term(\"user.id\", \"kimchy\").boost(1));\n\n\n\n println!(\"{}\", serde_json::to_string_pretty(&search).unwrap());\n\n}\n", "file_path": "examples/simple-query/src/main.rs", "rank": 25, "score": 72875.7366178295 }, { "content": "fn main() {\n\n let request = Request {\n\n category_id: 1,\n\n company_id: 2,\n\n brand_ids: vec![1, 2],\n\n user: User { id: 1 },\n\n country: Country { id: 1 },\n\n };\n\n\n\n let query = RequestQuery::new(&request)\n\n .category_id(|x| x.category_id)\n\n .company_id(|x| x.company_id)\n\n .brand_ids(|x| &x.brand_ids)\n\n .my_country_documents_only()\n\n .exclude_user_items()\n\n .finish();\n\n\n\n let search = Search::new().size(10).query(query);\n\n\n\n println!(\"{}\", serde_json::to_string_pretty(&search).unwrap());\n\n}\n\n\n", "file_path": "examples/domain-specific-dsl/src/main.rs", "rank": 26, "score": 72072.88386899463 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n rescore_query: Option<Query>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n rescore_query_weight: Option<f64>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n query_weight: Option<f64>,\n\n}\n\n\n\nimpl Rescore {\n\n /// Creates a new instance of [`Rescore`]\n\n ///\n\n /// - `query` - Second query which will be execute on top-k results returned by original query.\n\n pub fn new(query: impl Into<Option<Query>>) -> Self {\n\n Self {\n\n query: Inner {\n\n rescore_query: query.into(),\n\n rescore_query_weight: None,\n\n query_weight: None,\n", "file_path": "src/search/rescoring/mod.rs", "rank": 27, "score": 60012.350543315144 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Default)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`MatchAllQuery`]\n\n pub fn match_all() -> MatchAllQuery {\n\n MatchAllQuery::default()\n\n }\n\n}\n\n\n\nimpl MatchAllQuery {\n\n add_boost_and_name!();\n\n}\n\n\n\nimpl ShouldSkip for MatchAllQuery {}\n", "file_path": "src/search/queries/match_all_query.rs", "rank": 28, "score": 59190.66611460561 }, { "content": "#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize)]\n\nstruct SortInner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n order: Option<SortOrder>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n mode: Option<SortMode>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n unmapped_type: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n missing: Option<SortMissing>,\n\n}\n\n\n\nimpl Sort {\n\n /// Creates an instance of [`Sort`]\n\n pub fn new(field: impl Into<SortField>) -> Self {\n\n Self(KeyValuePair::new(field.into(), Default::default()))\n\n }\n\n\n", "file_path": "src/search/sort/mod.rs", "rank": 29, "score": 59190.607114638595 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(flatten)]\n\n pair: KeyValuePair<String, InlineShape>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n ignore_unmapped: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/shape/shape_query.rs", "rank": 30, "score": 58398.47856602773 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(flatten)]\n\n values: PinnedQueryValues,\n\n\n\n /// Any choice of query used to rank documents which will be ranked below\n\n /// the \"pinned\" documents.\n\n organic: Box<Query>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`PinnedQuery`]\n\n pub fn pinned<Q>(values: PinnedQueryValues, organic: Q) -> PinnedQuery\n\n where\n\n Q: Into<Query>,\n", "file_path": "src/search/queries/specialized/pinned_query.rs", "rank": 31, "score": 58398.47856602773 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n parent_type: String,\n\n\n\n query: Box<Query>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n score: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n ignore_unmapped: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`HasParentQuery`]\n", "file_path": "src/search/queries/joining/has_parent_query.rs", "rank": 32, "score": 58398.47856602773 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n positive: Box<Query>,\n\n negative: Box<Query>,\n\n negative_boost: NegativeBoost,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`BoostingQuery`]\n\n ///\n\n /// - `positive` - Query you wish to run. Any returned documents must match this query.\n\n /// - `negative` - Query used to decrease the\n\n /// [relevance score](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-filter-context.html#relevance-scores)\n\n /// of matching documents.<br>\n\n /// If a returned document matches the `positive` query and this query, the `boosting` query\n\n /// calculates the final\n\n /// [relevance score](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-filter-context.html#relevance-scores)\n", "file_path": "src/search/queries/compound/boosting_query.rs", "rank": 33, "score": 58398.47856602773 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n path: String,\n\n\n\n query: Box<Query>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n score_mode: Option<NestedQueryScoreMode>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n ignore_unmapped: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n inner_hits: Option<Box<InnerHits>>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n", "file_path": "src/search/queries/joining/nested_query.rs", "rank": 34, "score": 58398.47856602773 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n fields: Option<Vec<String>>,\n\n\n\n like: Vec<Like>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n unlike: Option<Vec<Like>>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n min_term_freq: Option<i64>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n max_query_terms: Option<i64>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n min_doc_freq: Option<i64>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n max_doc_freq: Option<i64>,\n", "file_path": "src/search/queries/specialized/more_like_this_query.rs", "rank": 35, "score": 58398.47856602773 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n field: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n name: Option<String>,\n\n\n\n #[serde(flatten)]\n\n source: PercolateSource,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`PercolateQuery`]\n\n ///\n\n /// - `field` - The field of type `percolator` that holds the indexed queries\n\n /// - `source` - [Source](PercolateSource) to percolate\n\n pub fn percolate<S, T>(field: S, source: T) -> PercolateQuery\n\n where\n\n S: ToString,\n\n T: Serialize,\n\n {\n", "file_path": "src/search/queries/specialized/percolate_query.rs", "rank": 36, "score": 58398.47856602773 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n r#type: String,\n\n\n\n query: Box<Query>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n ignore_unmapped: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n max_children: Option<u32>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n min_children: Option<u32>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n score_mode: Option<HasChildScoreMode>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n", "file_path": "src/search/queries/joining/has_child_query.rs", "rank": 37, "score": 58398.47856602773 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n script: Script,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`ScriptQuery`]\n\n ///\n\n /// - `script` - Contains a script to run as a query. This script must\n\n /// return a boolean value, `true` or `false`\n\n pub fn script(script: Script) -> ScriptQuery {\n\n ScriptQuery {\n\n inner: Inner {\n\n script,\n\n boost: None,\n", "file_path": "src/search/queries/specialized/script_query.rs", "rank": 38, "score": 58398.47856602773 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Default)]\n\nstruct Inner {\n\n query: String,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`WrapperQuery`]\n\n pub fn wrapper<S>(query: S) -> WrapperQuery\n\n where\n\n S: ToString,\n\n {\n\n WrapperQuery {\n\n inner: Inner {\n\n query: query.to_string(),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl ShouldSkip for WrapperQuery {}\n\n\n", "file_path": "src/search/queries/specialized/wrapper_query.rs", "rank": 39, "score": 58398.418805387075 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Default)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n must: Queries,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n filter: Queries,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n should: Queries,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n must_not: Queries,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n minimum_should_match: Option<MinimumShouldMatch>,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`BoolQuery`]\n\n pub fn bool() -> BoolQuery {\n", "file_path": "src/search/queries/compound/bool_query.rs", "rank": 40, "score": 58398.418805387075 }, { "content": "struct Request {\n\n category_id: i32,\n\n company_id: i32,\n\n brand_ids: Vec<i32>,\n\n user: User,\n\n country: Country,\n\n}\n", "file_path": "examples/domain-specific-dsl/src/main.rs", "rank": 41, "score": 58389.148401797276 }, { "content": "struct Country {\n\n id: i32,\n\n}\n\n\n", "file_path": "examples/domain-specific-dsl/src/main.rs", "rank": 42, "score": 58389.148401797276 }, { "content": "struct User {\n\n id: i32,\n\n}\n\n\n", "file_path": "examples/domain-specific-dsl/src/main.rs", "rank": 43, "score": 58389.148401797276 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n gt: Term,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n gte: Term,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n lt: Term,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n lte: Term,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n format: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n relation: Option<RangeRelation>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n", "file_path": "src/search/queries/term_level/range_query.rs", "rank": 44, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Logarithm {\n\n scaling_factor: f64,\n\n}\n\n\n", "file_path": "src/search/queries/specialized/rank_feature_query.rs", "rank": 45, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n filter: Box<Query>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`ConstantScoreQuery`]\n\n ///\n\n /// - `filter` - [Filter query](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-bool-query.html)\n\n /// you wish to run. Any returned documents must match this query.<br>\n\n /// Filter queries do not calculate\n\n /// [relevance scores](https://www.elastic.co/guide/en/elasticsearch/reference/current/query-filter-context.html#relevance-scores).\n\n /// To speed up performance, Elasticsearch automatically caches frequently used filter queries.\n\n pub fn constant_score(filter: impl Into<Query>) -> ConstantScoreQuery {\n\n ConstantScoreQuery {\n", "file_path": "src/search/queries/compound/constant_score_query.rs", "rank": 46, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n value: String,\n\n\n\n #[serde(\n\n skip_serializing_if = \"ShouldSkip::should_skip\",\n\n serialize_with = \"join_with_pipe\"\n\n )]\n\n flags: Vec<RegexpFlag>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n case_insensitive: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n max_determinized_states: Option<u64>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n rewrite: Option<Rewrite>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n", "file_path": "src/search/queries/term_level/regexp_query.rs", "rank": 47, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Linear {}\n\n\n", "file_path": "src/search/queries/specialized/rank_feature_query.rs", "rank": 48, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n field: String,\n\n\n\n index: String,\n\n\n\n id: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n routing: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n preference: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n version: Option<u64>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/specialized/percolate_lookup_query.rs", "rank": 49, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n value: Term,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n rewrite: Option<Rewrite>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n case_insensitive: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`WildcardQuery`]\n\n ///\n\n /// - `field` - Field you wish to search.\n", "file_path": "src/search/queries/term_level/wildcard_query.rs", "rank": 50, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(flatten)]\n\n pair: KeyValuePair<String, InlineShape>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n ignore_unmapped: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/geo/geo_shape_query.rs", "rank": 51, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n value: Term,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n rewrite: Option<Rewrite>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n case_insensitive: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`PrefixQuery`]\n\n ///\n\n /// - `field` - Field you wish to search.\n", "file_path": "src/search/queries/term_level/prefix_query.rs", "rank": 52, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n value: Term,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`TermQuery`]\n\n ///\n\n /// - `field` - Field you wish to search.\n\n /// - `value` - Term you wish to find in the provided field.\n\n /// To return a document, the term must exactly match the field value, including whitespace and capitalization.\n\n pub fn term(field: impl Into<String>, value: impl Into<Term>) -> TermQuery {\n\n TermQuery {\n\n field: field.into(),\n\n inner: Inner {\n", "file_path": "src/search/queries/term_level/term_query.rs", "rank": 53, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(flatten)]\n\n pair: KeyValuePair<String, Terms>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`TermsQuery`]\n\n ///\n\n /// - `field` - Field you wish to search.\n\n /// - `values` - An array of terms you wish to find in the provided field. To return a\n\n /// document, one or more terms must exactly match a field value,\n\n /// including whitespace and capitalization.<br>\n\n /// By default, Elasticsearch limits the `terms` query to a maximum of\n\n /// 65,536 terms. You can change this limit using the\n", "file_path": "src/search/queries/term_level/terms_query.rs", "rank": 54, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n values: BTreeSet<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`IdsQuery`]\n\n ///\n\n /// - `values` - An array of\n\n /// [document IDs](https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-id-field.html).\n\n pub fn ids<I>(values: I) -> IdsQuery\n\n where\n\n I: IntoIterator,\n\n I::Item: ToString,\n", "file_path": "src/search/queries/term_level/ids_query.rs", "rank": 55, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct InlineShape {\n\n shape: Shape,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n relation: Option<SpatialRelation>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`ShapeQuery`]\n\n ///\n\n /// - `field` - Field you wish to search\n\n /// - `shape` - Shape you with to search\n\n pub fn shape<S, T>(field: S, shape: T) -> ShapeQuery\n\n where\n\n S: ToString,\n\n T: Into<Shape>,\n\n {\n\n ShapeQuery {\n\n inner: Inner {\n\n pair: KeyValuePair::new(\n", "file_path": "src/search/queries/shape/shape_query.rs", "rank": 56, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(flatten)]\n\n pair: KeyValuePair<String, GeoPoint>,\n\n\n\n distance: Distance,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n distance_type: Option<DistanceType>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n validation_method: Option<ValidationMethod>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n", "file_path": "src/search/queries/geo/geo_distance_query.rs", "rank": 57, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(flatten)]\n\n pair: KeyValuePair<String, Shape>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n ignore_unmapped: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/shape/shape_lookup_query.rs", "rank": 58, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Shape {\n\n indexed_shape: IndexedShape,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n relation: Option<SpatialRelation>,\n\n}\n\n\n", "file_path": "src/search/queries/shape/shape_lookup_query.rs", "rank": 59, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Saturation {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n pivot: Option<f64>,\n\n}\n\n\n", "file_path": "src/search/queries/specialized/rank_feature_query.rs", "rank": 60, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n value: Term,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n fuzziness: Option<Fuzziness>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n max_expansions: Option<u8>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n prefix_length: Option<u8>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n transpositions: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n rewrite: Option<Rewrite>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n", "file_path": "src/search/queries/term_level/fuzzy_query.rs", "rank": 61, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n query: Box<Query>,\n\n\n\n script: Script,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n min_score: Option<f32>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`ScriptScoreQuery`]\n\n ///\n\n /// - `query` - Query used to return documents\n\n /// - `script` - Script used to compute the score of documents returned by\n", "file_path": "src/search/queries/specialized/script_score_query.rs", "rank": 62, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n query: Box<Query>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n functions: Vec<Function>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n max_boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n min_score: Option<f32>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n score_mode: Option<FunctionScoreMode>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost_mode: Option<FunctionScoreBoostMode>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n", "file_path": "src/search/queries/compound/function_score_query.rs", "rank": 63, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n r#type: String,\n\n\n\n id: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n ignore_unmapped: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`ParentIdQuery`]\n\n ///\n\n /// - `type` - Name of the child relationship mapped for the join field\n\n /// - `id` - ID of the parent document. The query will return child documents of this\n", "file_path": "src/search/queries/joining/parent_id_query.rs", "rank": 64, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Sigmoid {\n\n pivot: f64,\n\n exponent: f64,\n\n}\n\n\n", "file_path": "src/search/queries/specialized/rank_feature_query.rs", "rank": 65, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n query: Text,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n analyzer: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n auto_generate_synonyms_phrase_query: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n fuzziness: Option<Fuzziness>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n max_expansions: Option<u8>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n prefix_length: Option<u8>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n", "file_path": "src/search/queries/full_text/match_query.rs", "rank": 66, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n field: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`ExistsQuery`]\n\n ///\n\n /// - `field` - Name of the field you wish to search.\n\n /// While a field is deemed non-existent if the JSON value is `null` or `[]`,\n\n /// these values will indicate the field does exist:\n\n /// - Empty strings, such as `\"\"` or `\"-\"`\n\n /// - Arrays containing `null` and another value, such as `[null, \"foo\"]`\n\n /// - A custom [`null-value`](https://www.elastic.co/guide/en/elasticsearch/reference/current/null-value.html), defined in field mapping\n\n pub fn exists(field: impl Into<String>) -> ExistsQuery {\n", "file_path": "src/search/queries/term_level/exists_query.rs", "rank": 67, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n field: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/specialized/rank_feature_query.rs", "rank": 68, "score": 57634.06069845435 }, { "content": "#[derive(Debug, Clone, Default, PartialEq, Serialize)]\n\nstruct Inner {\n\n queries: Queries,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n tie_breaker: Option<TieBreaker>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`DisMaxQuery`]\n\n pub fn dis_max() -> DisMaxQuery {\n\n DisMaxQuery::default()\n\n }\n\n}\n\n\n", "file_path": "src/search/queries/compound/dis_max_query.rs", "rank": 69, "score": 57634.0009378137 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(flatten)]\n\n pair: KeyValuePair<String, TermsLookup>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/term_level/terms_lookup_query.rs", "rank": 70, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n query: Text,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n analyzer: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n slop: Option<u8>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`MatchPhraseQuery`]\n\n ///\n", "file_path": "src/search/queries/full_text/match_phrase_query.rs", "rank": 71, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct ScriptInner {\n\n source: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n params: Option<serde_json::Value>,\n\n}\n\n\n\nimpl Script {\n\n /// Creates an instance of [Script](Script)\n\n ///\n\n /// - `source` - script source\n\n pub fn new(source: impl Into<String>) -> Self {\n\n Self {\n\n script_score: ScriptInnerWrapper {\n\n script: ScriptInner {\n\n source: source.into(),\n\n params: None,\n\n },\n\n },\n\n }\n", "file_path": "src/search/queries/params/function_score_query.rs", "rank": 72, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct InnerSigmoid {\n\n field: String,\n\n\n\n sigmoid: Sigmoid,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/specialized/rank_feature_query.rs", "rank": 73, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct InnerSaturation {\n\n field: String,\n\n\n\n saturation: Saturation,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/specialized/rank_feature_query.rs", "rank": 74, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct InnerLinear {\n\n field: String,\n\n\n\n linear: Linear,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`RankFeatureQuery`]\n\n ///\n\n /// - `field` - `rank_feature` or `rank_features` field used to boost relevance scores\n\n pub fn rank_feature(field: impl Into<String>) -> RankFeatureQuery {\n\n RankFeatureQuery {\n\n inner: Inner {\n\n field: field.into(),\n", "file_path": "src/search/queries/specialized/rank_feature_query.rs", "rank": 75, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(flatten)]\n\n pair: KeyValuePair<String, GeoBoundingBox>,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n validation_method: Option<ValidationMethod>,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`GeoBoundingBoxQuery`]\n\n ///\n\n /// - `field` - Field you wish to search.\n\n /// - `value` - A series of vertex coordinates of a geo bounding box\n\n pub fn geo_bounding_box(\n\n field: impl Into<String>,\n\n value: impl Into<GeoBoundingBox>,\n\n ) -> GeoBoundingBoxQuery {\n", "file_path": "src/search/queries/geo/geo_bounding_box_query.rs", "rank": 76, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct InlineShape {\n\n shape: GeoShape,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n relation: Option<SpatialRelation>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`GeoShapeQuery`]\n\n ///\n\n /// - `field` - Field you wish to search\n\n /// - `shape` - Shape you with to search\n\n pub fn geo_shape<S, T>(field: S, shape: T) -> GeoShapeQuery\n\n where\n\n S: ToString,\n\n T: Into<GeoShape>,\n\n {\n\n GeoShapeQuery {\n\n inner: Inner {\n\n pair: KeyValuePair::new(\n", "file_path": "src/search/queries/geo/geo_shape_query.rs", "rank": 77, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct IndexedShape {\n\n id: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n index: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n path: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n routing: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`ShapeLookupQuery`]\n\n ///\n\n /// - `field` - Field you wish to search\n\n /// - `id` - The ID of the document that containing the pre-indexed shape\n\n pub fn shape_lookup<S, T>(field: S, id: T) -> ShapeLookupQuery\n\n where\n", "file_path": "src/search/queries/shape/shape_lookup_query.rs", "rank": 78, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n terms: Terms,\n\n\n\n #[serde(flatten)]\n\n minimum_should_match: TermsSetMinimumShouldMatch,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`TermsSetQuery`]\n\n ///\n\n /// - `field` - Field you wish to search.\n\n /// - `value` - TermsSet you wish to find in the provided field.\n\n /// To return a document, the terms_set must exactly match the field value, including whitespace and capitalization.\n\n pub fn terms_set<S, T, U>(field: S, terms: T, minimum_should_match: U) -> TermsSetQuery\n", "file_path": "src/search/queries/term_level/terms_set_query.rs", "rank": 79, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(flatten)]\n\n pair: KeyValuePair<String, Shape>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n ignore_unmapped: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/geo/geo_shape_lookup_query.rs", "rank": 80, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Shape {\n\n indexed_shape: IndexedShape,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n relation: Option<SpatialRelation>,\n\n}\n\n\n", "file_path": "src/search/queries/geo/geo_shape_lookup_query.rs", "rank": 81, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct InnerLogarithm {\n\n field: String,\n\n\n\n log: Logarithm,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/specialized/rank_feature_query.rs", "rank": 82, "score": 56896.031226321844 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Default)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n fields: Vec<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n r#type: Option<MultiMatchQueryType>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n tie_breaker: Option<TieBreaker>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n query: Text,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n analyzer: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n auto_generate_synonyms_phrase_query: Option<bool>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n", "file_path": "src/search/queries/full_text/multi_match_query.rs", "rank": 83, "score": 56895.97146568119 }, { "content": "struct RequestQuery<'a> {\n\n request: &'a Request,\n\n query: BoolQuery,\n\n}\n\n\n\nimpl<'a> RequestQuery<'a> {\n\n fn new(request: &'a Request) -> Self {\n\n Self {\n\n request,\n\n query: Query::bool(),\n\n }\n\n }\n\n\n\n fn exclude_user_items(mut self) -> Self {\n\n self.query = self\n\n .query\n\n .must_not(Query::term(\"user_id\", self.request.user.id));\n\n\n\n self\n\n }\n", "file_path": "examples/domain-specific-dsl/src/main.rs", "rank": 84, "score": 56308.879184433354 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct TermsLookup {\n\n index: String,\n\n id: String,\n\n path: String,\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n routing: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`TermsLookupQuery`]\n\n ///\n\n /// - `field` - Field you wish to search.\n\n /// - `index` - Name of the index from which to fetch field values.\n\n /// - `id` - [ID](https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-id-field.html)\n\n /// of the document from which to fetch field values.\n\n /// - `path` - Name of the field from which to fetch field values. Elasticsearch uses\n\n /// these values as search terms for the query. If the field values\n\n /// include an array of nested inner objects, you can access those objects\n\n /// using dot notation syntax.\n\n pub fn terms_lookup<S, T, U, V>(field: S, index: T, id: U, path: V) -> TermsLookupQuery\n", "file_path": "src/search/queries/term_level/terms_lookup_query.rs", "rank": 85, "score": 56183.046907713666 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct IndexedShape {\n\n id: String,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n index: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n path: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n routing: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [`GeoShapeLookupQuery`]\n\n ///\n\n /// - `field` - Field you wish to search\n\n /// - `id` - The ID of the document that containing the pre-indexed shape\n\n pub fn geo_shape_lookup<S, T>(field: S, id: T) -> GeoShapeLookupQuery\n\n where\n", "file_path": "src/search/queries/geo/geo_shape_lookup_query.rs", "rank": 86, "score": 56183.046907713666 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct ScriptInnerWrapper {\n\n script: ScriptInner,\n\n}\n\n\n", "file_path": "src/search/queries/params/function_score_query.rs", "rank": 87, "score": 56183.046907713666 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n query: Text,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n analyzer: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n minimum_should_match: Option<MinimumShouldMatch>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n operator: Option<Operator>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n", "file_path": "src/search/queries/full_text/match_bool_prefix_query.rs", "rank": 88, "score": 56183.046907713666 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n query: Text,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n analyzer: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n max_expansions: Option<u8>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n slop: Option<u8>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n zero_terms_query: Option<ZeroTermsQuery>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n", "file_path": "src/search/queries/full_text/match_phrase_prefix_query.rs", "rank": 89, "score": 56183.046907713666 }, { "content": "#[derive(Debug, Default, Clone, PartialEq, Serialize)]\n\nstruct RandomScoreInner {\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n seed: Term,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n field: Option<String>,\n\n}\n\n\n\nimpl RandomScore {\n\n /// Creates an instance of [RandomScore](RandomScore)\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n\n\n /// Sets seed value\n\n pub fn seed(mut self, seed: impl Into<Term>) -> Self {\n\n self.random_score.seed = seed.into();\n\n self\n\n }\n\n\n", "file_path": "src/search/queries/params/function_score_query.rs", "rank": 90, "score": 56182.987147073014 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\nstruct Inner<O: Origin> {\n\n field: String,\n\n\n\n origin: O,\n\n\n\n pivot: <O as Origin>::Pivot,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n boost: Option<Boost>,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n _name: Option<String>,\n\n}\n\n\n\nimpl Query {\n\n /// Creates an instance of [DistanceFeatureQuery](DistanceFeatureQuery)\n\n ///\n\n /// - `field` - Name of the field used to calculate distances. This field must meet the following criteria:<br>\n\n /// - Be a [`date`](https://www.elastic.co/guide/en/elasticsearch/reference/current/date.html),\n\n /// [`date_nanos`](https://www.elastic.co/guide/en/elasticsearch/reference/current/date_nanos.html) or\n", "file_path": "src/search/queries/specialized/distance_feature_query.rs", "rank": 91, "score": 53765.3407944307 }, { "content": "}\n\n\n\nimpl Aggregation {\n\n /// Creates an instance of [`TermsAggregation`]\n\n ///\n\n /// - `field` - field to group by\n\n pub fn terms(field: impl Into<String>) -> TermsAggregation {\n\n TermsAggregation {\n\n terms: TermsAggregationInner {\n\n field: field.into(),\n\n size: None,\n\n show_term_doc_count_error: None,\n\n order: vec![],\n\n min_doc_count: None,\n\n missing: None,\n\n },\n\n aggs: Aggregations::new(),\n\n }\n\n }\n\n}\n", "file_path": "src/search/aggregations/bucket/terms_aggregation.rs", "rank": 92, "score": 53658.04397615606 }, { "content": "use crate::search::*;\n\nuse crate::util::*;\n\nuse std::convert::TryInto;\n\n\n\n/// A filtering aggregation used to limit any sub aggregations' processing to a sample of the top-scoring documents.\n\n///\n\n/// <https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-sampler-aggregation.html>\n\n#[derive(Debug, Clone, Serialize, PartialEq)]\n\npub struct SamplerAggregation {\n\n sampler: SamplerAggregationInner,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n aggs: Aggregations,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, PartialEq)]\n", "file_path": "src/search/aggregations/bucket/sampler_aggregation.rs", "rank": 93, "score": 53657.46378370624 }, { "content": "/// Aggregation name\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]\n\npub struct AggregationName(String);\n\n\n\nimpl<T> From<T> for AggregationName\n\nwhere\n\n T: ToString,\n\n{\n\n fn from(value: T) -> Self {\n\n Self(value.to_string())\n\n }\n\n}\n", "file_path": "src/search/aggregations/params/aggregation_name.rs", "rank": 94, "score": 53657.12644870673 }, { "content": "use crate::search::*;\n\nuse crate::util::*;\n\nuse std::convert::TryInto;\n\n\n\n#[derive(Debug, Clone, Serialize, PartialEq)]\n\n/// A multi-bucket value source based aggregation where buckets are dynamically built - one per unique value.\n\n///\n\n/// <https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-terms-aggregation.html>\n\npub struct TermsAggregation {\n\n terms: TermsAggregationInner,\n\n\n\n #[serde(skip_serializing_if = \"ShouldSkip::should_skip\")]\n\n aggs: Aggregations,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, PartialEq)]\n", "file_path": "src/search/aggregations/bucket/terms_aggregation.rs", "rank": 95, "score": 53656.918035092356 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize)]\n\npub struct TermsAggregationOrder(KeyValuePair<SortField, SortOrder>);\n\n\n\nimpl TermsAggregationOrder {\n\n /// Creates an instance of [TermsAggregationOrder](TermsAggregationOrder)\n\n ///\n\n /// - `field` - Field to sort by\n\n /// - `order` - Ordering direction\n\n pub fn new(field: impl Into<SortField>, order: SortOrder) -> Self {\n\n Self(KeyValuePair::new(field.into(), order))\n\n }\n\n}\n\n\n\nimpl<K> From<(K, SortOrder)> for TermsAggregationOrder\n\nwhere\n\n K: Into<SortField>,\n\n{\n\n fn from((key, value): (K, SortOrder)) -> Self {\n\n Self::new(key, value)\n\n }\n", "file_path": "src/search/aggregations/bucket/terms_aggregation.rs", "rank": 96, "score": 53656.5200980321 }, { "content": "\n\nimpl AvgAggregation {\n\n /// The missing parameter defines how documents that are missing a value should be treated. By\n\n /// default they will be ignored but it is also possible to treat them as if they had a value.\n\n pub fn missing(mut self, missing: impl Into<Number>) -> Self {\n\n self.avg.missing = Some(missing.into());\n\n self\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn serialization() {\n\n assert_serialize(\n\n Aggregation::avg(\"test_field\"),\n\n json!({ \"avg\": { \"field\": \"test_field\" } }),\n\n );\n", "file_path": "src/search/aggregations/metrics/avg_aggregation.rs", "rank": 97, "score": 53656.36620061292 }, { "content": "\n\nimpl MaxAggregation {\n\n /// The `missing` parameter defines how documents that are missing a value should be treated. By\n\n /// default they will be ignored but it is also possible to treat them as if they had a value.\n\n pub fn missing(mut self, missing: impl Into<Number>) -> Self {\n\n self.max.missing = Some(missing.into());\n\n self\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn serialization() {\n\n assert_serialize(\n\n Aggregation::max(\"test_field\"),\n\n json!({ \"max\": { \"field\": \"test_field\" } }),\n\n );\n", "file_path": "src/search/aggregations/metrics/max_aggregation.rs", "rank": 98, "score": 53656.36620061292 }, { "content": "\n\nimpl MinAggregation {\n\n /// The `missing` parameter defines how documents that are missing a value should be treated. By\n\n /// default they will be ignored but it is also possible to treat them as if they had a value.\n\n pub fn missing(mut self, missing: impl Into<Number>) -> Self {\n\n self.min.missing = Some(missing.into());\n\n self\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn serialization() {\n\n assert_serialize(\n\n Aggregation::min(\"test_field\"),\n\n json!({ \"min\": { \"field\": \"test_field\" } }),\n\n );\n", "file_path": "src/search/aggregations/metrics/min_aggregation.rs", "rank": 99, "score": 53656.36620061292 } ]
Rust
utoipa-gen/src/schema/component/attr.rs
juhaku/utoipa
070b00c13b41040e9605c62a0d7c3b5fcf04899c
use std::mem; use proc_macro2::{Ident, TokenStream}; use proc_macro_error::{abort, ResultExt}; use quote::{quote, ToTokens}; use syn::{ parenthesized, parse::{Parse, ParseBuffer}, Attribute, Error, ExprPath, Token, }; use crate::{ parse_utils, schema::{ComponentPart, GenericType}, AnyValue, }; use super::xml::{Xml, XmlAttr}; #[cfg_attr(feature = "debug", derive(Debug))] pub struct ComponentAttr<T> where T: Sized, { inner: T, } impl<T> AsRef<T> for ComponentAttr<T> where T: Sized, { fn as_ref(&self) -> &T { &self.inner } } #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] pub struct Enum { default: Option<AnyValue>, example: Option<AnyValue>, } #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] pub struct Struct { example: Option<AnyValue>, xml_attr: Option<XmlAttr>, } #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] pub struct UnnamedFieldStruct { pub(super) ty: Option<Ident>, format: Option<ExprPath>, default: Option<AnyValue>, example: Option<AnyValue>, } #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] pub struct NamedField { example: Option<AnyValue>, pub(super) ty: Option<Ident>, format: Option<ExprPath>, default: Option<AnyValue>, write_only: Option<bool>, read_only: Option<bool>, xml_attr: Option<XmlAttr>, pub(super) xml: Option<Xml>, } impl Parse for ComponentAttr<Enum> { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE_MESSAGE: &str = "unexpected attribute, expected any of: default, example"; let mut enum_attr = Enum::default(); while !input.is_empty() { let ident = input.parse::<Ident>().map_err(|error| { Error::new( error.span(), format!("{}, {}", EXPECTED_ATTRIBUTE_MESSAGE, error), ) })?; let name = &*ident.to_string(); match name { "default" => { enum_attr.default = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?) } "example" => { enum_attr.example = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?) } _ => return Err(Error::new(ident.span(), EXPECTED_ATTRIBUTE_MESSAGE)), } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(Self { inner: enum_attr }) } } impl ComponentAttr<Struct> { pub(super) fn from_attributes_validated(attributes: &[Attribute]) -> Option<Self> { parse_component_attr::<ComponentAttr<Struct>>(attributes).map(|attrs| { if let Some(ref wrapped_ident) = attrs .as_ref() .xml_attr .as_ref() .and_then(|xml| xml.is_wrapped.as_ref()) { abort! {wrapped_ident, "cannot use `wrapped` attribute in non slice type"; help = "Try removing `wrapped` attribute" } } attrs }) } } impl Parse for ComponentAttr<Struct> { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE_MESSAGE: &str = "unexpected attribute, expected any of: example, xml"; let mut struct_ = Struct::default(); while !input.is_empty() { let ident = input.parse::<Ident>().map_err(|error| { Error::new( error.span(), &format!("{}, {}", EXPECTED_ATTRIBUTE_MESSAGE, error), ) })?; let name = &*ident.to_string(); match name { "example" => { struct_.example = Some(parse_utils::parse_next(input, || { AnyValue::parse_lit_str_or_json(input) })?); } "xml" => { let xml; parenthesized!(xml in input); struct_.xml_attr = Some(xml.parse()?) } _ => return Err(Error::new(ident.span(), EXPECTED_ATTRIBUTE_MESSAGE)), } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(Self { inner: struct_ }) } } impl Parse for ComponentAttr<UnnamedFieldStruct> { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE_MESSAGE: &str = "unexpected attribute, expected any of: default, example, format, value_type"; let mut unnamed_struct = UnnamedFieldStruct::default(); while !input.is_empty() { let attribute = input.parse::<Ident>().map_err(|error| { Error::new( error.span(), format!("{}, {}", EXPECTED_ATTRIBUTE_MESSAGE, error), ) })?; let name = &*attribute.to_string(); match name { "default" => { unnamed_struct.default = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?) } "example" => { unnamed_struct.example = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?) } "format" => unnamed_struct.format = Some(parse_format(input)?), "value_type" => { unnamed_struct.ty = Some(parse_utils::parse_next(input, || input.parse::<Ident>())?) } _ => return Err(Error::new(attribute.span(), EXPECTED_ATTRIBUTE_MESSAGE)), } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(Self { inner: unnamed_struct, }) } } impl ComponentAttr<NamedField> { pub(super) fn from_attributes_validated( attributes: &[Attribute], component_part: &ComponentPart, ) -> Option<Self> { parse_component_attr::<ComponentAttr<NamedField>>(attributes) .map(|attrs| { is_valid_xml_attr(&attrs, component_part); attrs }) .map(|mut attrs| { if matches!(component_part.generic_type, Some(GenericType::Vec)) { if let Some(ref mut xml) = attrs.inner.xml_attr { let mut value_xml = mem::take(xml); let vec_xml = XmlAttr::with_wrapped( mem::take(&mut value_xml.is_wrapped), mem::take(&mut value_xml.wrap_name), ); attrs.inner.xml = Some(Xml::Slice { vec: vec_xml, value: value_xml, }); } } else if let Some(ref mut xml) = attrs.inner.xml_attr { attrs.inner.xml = Some(Xml::NonSlice(mem::take(xml))); } attrs }) } } #[inline] fn is_valid_xml_attr(attrs: &ComponentAttr<NamedField>, component_part: &ComponentPart) { if !matches!( component_part.generic_type, Some(crate::schema::GenericType::Vec) ) { if let Some(wrapped_ident) = attrs .as_ref() .xml_attr .as_ref() .and_then(|xml| xml.is_wrapped.as_ref()) { abort! {wrapped_ident, "cannot use `wrapped` attribute in non slice field type"; help = "Try removing `wrapped` attribute or make your field `Vec`" } } } } impl Parse for ComponentAttr<NamedField> { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE_MESSAGE: &str = "unexpected attribute, expected any of: example, format, default, write_only, read_only, xml, value_type"; let mut field = NamedField::default(); while !input.is_empty() { let ident = input.parse::<Ident>().map_err(|error| { Error::new( error.span(), format!("{}, {}", EXPECTED_ATTRIBUTE_MESSAGE, error), ) })?; let name = &*ident.to_string(); match name { "example" => { field.example = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?); } "format" => field.format = Some(parse_format(input)?), "default" => { field.default = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?) } "write_only" => field.write_only = Some(parse_utils::parse_bool_or_true(input)?), "read_only" => field.read_only = Some(parse_utils::parse_bool_or_true(input)?), "xml" => { let xml; parenthesized!(xml in input); field.xml_attr = Some(xml.parse()?) } "value_type" => { field.ty = Some(parse_utils::parse_next(input, || input.parse::<Ident>())?) } _ => return Err(Error::new(ident.span(), EXPECTED_ATTRIBUTE_MESSAGE)), } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(Self { inner: field }) } } #[inline] fn parse_format(input: &ParseBuffer) -> Result<ExprPath, Error> { let format = parse_utils::parse_next(input, || input.parse::<ExprPath>()).map_err(|error| { Error::new( error.span(), format!( "unparseable format expected expression path e.g. ComponentFormat::String, {}", error ), ) })?; if format.path.segments.first().unwrap().ident != "utoipa" { let appended_path: ExprPath = syn::parse_quote!(utoipa::openapi::#format); Ok(appended_path) } else { Ok(format) } } pub fn parse_component_attr<T: Sized + Parse>(attributes: &[Attribute]) -> Option<T> { attributes .iter() .find(|attribute| attribute.path.get_ident().unwrap() == "component") .map(|attribute| attribute.parse_args::<T>().unwrap_or_abort()) } impl<T> ToTokens for ComponentAttr<T> where T: quote::ToTokens, { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.extend(self.inner.to_token_stream()) } } impl ToTokens for Enum { fn to_tokens(&self, tokens: &mut TokenStream) { if let Some(ref default) = self.default { tokens.extend(quote! { .default(Some(#default)) }) } if let Some(ref example) = self.example { tokens.extend(quote! { .example(Some(#example)) }) } } } impl ToTokens for Struct { fn to_tokens(&self, tokens: &mut TokenStream) { if let Some(ref example) = self.example { tokens.extend(quote! { .example(Some(#example)) }) } if let Some(ref xml) = self.xml_attr { tokens.extend(quote!( .xml(Some(#xml)) )) } } } impl ToTokens for UnnamedFieldStruct { fn to_tokens(&self, tokens: &mut TokenStream) { if let Some(ref default) = self.default { tokens.extend(quote! { .default(Some(#default)) }) } if let Some(ref example) = self.example { tokens.extend(quote! { .example(Some(#example)) }) } if let Some(ref format) = self.format { tokens.extend(quote! { .format(Some(#format)) }) } } } impl ToTokens for NamedField { fn to_tokens(&self, tokens: &mut TokenStream) { if let Some(ref default) = self.default { tokens.extend(quote! { .default(Some(#default)) }) } if let Some(ref format) = self.format { tokens.extend(quote! { .format(Some(#format)) }) } if let Some(ref example) = self.example { tokens.extend(quote! { .example(Some(#example)) }) } if let Some(ref write_only) = self.write_only { tokens.extend(quote! { .write_only(Some(#write_only)) }) } if let Some(ref read_only) = self.read_only { tokens.extend(quote! { .read_only(Some(#read_only)) }) } } }
use std::mem; use proc_macro2::{Ident, TokenStream}; use proc_macro_error::{abort, ResultExt}; use quote::{quote, ToTokens}; use syn::{ parenthesized, parse::{Parse, ParseBuffer}, Attribute, Error, ExprPath, Token, }; use crate::{ parse_utils, schema::{ComponentPart, GenericType}, AnyValue, }; use super::xml::{Xml, XmlAttr}; #[cfg_attr(feature = "debug", derive(Debug))] pub struct ComponentAttr<T> where T: Sized, { inner: T, } impl<T> AsRef<T> for ComponentAttr<T> where T: Sized, { fn as_ref(&self) -> &T { &self.inner } } #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] pub struct Enum { default: Option<AnyValue>, example: Option<AnyValue>, } #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] pub struct Struct { example: Option<AnyValue>, xml_attr: Option<XmlAttr>, } #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] pub struct UnnamedFieldStruct { pub(super) ty: Option<Ident>, format: Option<ExprPath>, default: Option<AnyValue>, example: Option<AnyValue>, } #[derive(Default)] #[cfg_attr(feature = "debug", derive(Debug))] pub struct NamedField { example: Option<AnyValue>, pub(super) ty: Option<Ident>, format: Option<ExprPath>, default: Option<AnyValue>, write_only: Option<bool>, read_only: Option<bool>, xml_attr: Option<XmlAttr>, pub(super) xml: Option<Xml>, } impl Parse for ComponentAttr<Enum> { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE_MESSAGE: &str = "unexpected attribute, expected any of: default, example"; let mut enum_attr = Enum::default(); while !input.is_empty() { let ident = input.parse::<Ident>().map_err(|error| { Error::new( error.span(), format!("{}, {}", EXPECTED_ATTRIBUTE_MESSAGE, error), ) })?; let name = &*ident.to_string(); match name { "default" => { enum_attr.default = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?) } "example" => { enum_attr.example = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?) } _ => return Err(Error::new(ident.span(), EXPECTED_ATTRIBUTE_MESSAGE)), } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(Self { inner: enum_attr }) } } impl ComponentAttr<Struct> { pub(super) fn from_attributes_validated(attributes: &[Attribute]) -> Option<Self> { parse_component_attr::<ComponentAttr<Struct>>(attributes).map(|attrs| { if let Some(ref wrapped_ident) = attrs .as_ref() .xml_attr .as_ref() .and_then(|xml| xml.is_wrapped.as_ref()) { abort! {wrapped_ident, "cannot use `wrapped` attribute in non slice type"; help = "Try removing `wrapped` attribute" } } attrs }) } } impl Parse for ComponentAttr<Struct> { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE_MESSAGE: &str = "unexpected attribute, expected any of: example, xml"; let mut struct_ = Struct::default(); while !input.is_empty() { let ident = input.parse::<Ident>().map_err(|error| { Error::new( error.span(), &format!("{}, {}", EXPECTED_ATTRIBUTE_MESSAGE, error), ) })?; let name = &*ident.to_string(); match name { "example" => { struct_.example = Some(parse_utils::parse_next(input, || { AnyValue::parse_lit_str_or_json(input) })?); } "xml" => { let xml; parenthesized!(xml in input); struct_.xml_attr = Some(xml.parse()?) } _ => return Err(Error::new(ident.span(), EXPECTED_ATTRIBUTE_MESSAGE)), } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(Self { inner: struct_ }) } } impl Parse for ComponentAttr<UnnamedFieldStruct> { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE_MESSAGE: &str = "unexpected attribute, expected any of: default, example, format, value_type"; let mut unnamed_struct = UnnamedFieldStruct::default(); while !input.is_empty() { let attribute = input.parse::<Ident>().map_err(|error| { Error::new( error.span(), format!("{}, {}", EXPECTED_ATTRIBUTE_MESSAGE, error), ) })?; let name = &*attribute.to_string(); match name { "default" => { unnamed_struct.default = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?) } "example" => { unnamed_struct.example = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?) } "format" => unnamed_struct.format = Some(parse_format(input)?), "value_type" => { unnamed_struct.ty = Some(parse_utils::parse_next(input, || input.parse::<Ident>())?) } _ => return Err(Error::new(attribute.span(), EXPECTED_ATTRIBUTE_MESSAGE)), } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(Self { inner: unnamed_struct, }) } } impl ComponentAttr<NamedField> { pub(super) fn from_attributes_validated( attributes: &[Attribute], component_part: &ComponentPart, ) -> Option<Self> { parse_component_attr::<ComponentAttr<NamedField>>(attributes) .map(|attrs| { is_valid_xml_attr(&attrs, component_part); attrs }) .map(|mut attrs| { if matches!(component_part.generic_type, Some(GenericType::Vec)) { if let Some(ref mut xml) = attrs.inner.xml_attr { let mut value_xml = mem::take(xml); let vec_xml = XmlAttr::with_wrapped( mem::take(&mut value_xml.is_wrapped), mem::take(&mut value_xml.wrap_name), ); attrs.inner.xml = Some(Xml::Slice { vec: vec_xml, value: value_xml, }); } } else if let Some(ref mut xml) = attrs.inner.xml_attr { attrs.inner.xml = Some(Xml::NonSlice(mem::take(xml))); } attrs }) } } #[inline] fn is_valid_xml_attr(attrs: &ComponentAttr<NamedField>, component_part: &ComponentPart) { if !matches!( component_part.generic_type, Some(crate::schema::GenericType::Vec) ) { if let Some(wrapped_ident) = attrs .as_ref() .xml_attr .as_ref() .and_then(|xml| xml.is_wrapped.as_ref()) { abort! {wrapped_ident, "cannot use `wrapped` attribute in non slice field type"; help = "Try removing `wrapped` attribute or make your field `Vec`" } } } } impl Parse for ComponentAttr<NamedField> { fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> { const EXPECTED_ATTRIBUTE_MESSAGE: &str = "unexpected attribute, expected any of: example, format, default, write_only, read_only, xml, value_type"; let mut field = NamedField::default(); while !input.is_empty() { let ident = input.parse::<Ident>().map_err(|error| { Error::new( error.span(), format!("{}, {}", EXPECTED_ATTRIBUTE_MESSAGE, error), ) })?; let name = &*ident.to_string(); match name { "example" => { field.example = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?); } "format" => field.format = Some(parse_format(input)?), "default" => { field.default = Some(parse_utils::parse_next(input, || { AnyValue::parse_any(input) })?) } "write_only" => field.write_only = Some(parse_utils::parse_bool_or_true(input)?), "read_only" => field.read_only = Some(parse_utils::parse_bool_or_true(input)?), "xml" => { let xml; parenthesized!(xml in input); field.xml_attr = Some(xml.parse()?) } "value_type" => { field.ty = Some(parse_utils::parse_next(input, || input.parse::<Ident>())?) } _ => return Err(Error::new(ident.span(), EXPECTED_ATTRIBUTE_MESSAGE)), } if !input.is_empty() { input.parse::<Token![,]>()?; } } Ok(Self { inner: field }) } } #[inline] fn parse_format(input: &ParseBuffer) -> Result<ExprPath, Error> { let format = parse_utils::parse_next(input, || input.parse::<ExprPath>()).map_err(|error| { Error::new( error.span(), format!( "unparseable format expected expression path e.g. ComponentFormat::String, {}", error ), ) })?; if format.path.segments.first().unwrap().ident != "utoipa" { let appended_path: ExprPath = syn::parse_quote!(utoipa::openapi::#format); Ok(appended_path) } else { Ok(format) } }
impl<T> ToTokens for ComponentAttr<T> where T: quote::ToTokens, { fn to_tokens(&self, tokens: &mut TokenStream) { tokens.extend(self.inner.to_token_stream()) } } impl ToTokens for Enum { fn to_tokens(&self, tokens: &mut TokenStream) { if let Some(ref default) = self.default { tokens.extend(quote! { .default(Some(#default)) }) } if let Some(ref example) = self.example { tokens.extend(quote! { .example(Some(#example)) }) } } } impl ToTokens for Struct { fn to_tokens(&self, tokens: &mut TokenStream) { if let Some(ref example) = self.example { tokens.extend(quote! { .example(Some(#example)) }) } if let Some(ref xml) = self.xml_attr { tokens.extend(quote!( .xml(Some(#xml)) )) } } } impl ToTokens for UnnamedFieldStruct { fn to_tokens(&self, tokens: &mut TokenStream) { if let Some(ref default) = self.default { tokens.extend(quote! { .default(Some(#default)) }) } if let Some(ref example) = self.example { tokens.extend(quote! { .example(Some(#example)) }) } if let Some(ref format) = self.format { tokens.extend(quote! { .format(Some(#format)) }) } } } impl ToTokens for NamedField { fn to_tokens(&self, tokens: &mut TokenStream) { if let Some(ref default) = self.default { tokens.extend(quote! { .default(Some(#default)) }) } if let Some(ref format) = self.format { tokens.extend(quote! { .format(Some(#format)) }) } if let Some(ref example) = self.example { tokens.extend(quote! { .example(Some(#example)) }) } if let Some(ref write_only) = self.write_only { tokens.extend(quote! { .write_only(Some(#write_only)) }) } if let Some(ref read_only) = self.read_only { tokens.extend(quote! { .read_only(Some(#read_only)) }) } } }
pub fn parse_component_attr<T: Sized + Parse>(attributes: &[Attribute]) -> Option<T> { attributes .iter() .find(|attribute| attribute.path.get_ident().unwrap() == "component") .map(|attribute| attribute.parse_args::<T>().unwrap_or_abort()) }
function_block-full_function
[]
Rust
cosmos-abci/abci/src/lib.rs
couragetec/courage_substrate_cosmos
d9b2038436ac8933e6dadda960cd66a543c851d3
mod defaults; pub mod grpc; pub mod utils; pub use defaults::*; pub use grpc::*; use lazy_static::lazy_static; use owning_ref::MutexGuardRefMut; use std::sync::Mutex; use mockall::automock; lazy_static! { static ref ABCI_INTERFACE_INSTANCE: Mutex<Option<AIType>> = Mutex::new(None); } type AIType = Box<dyn AbciInterface + Send>; type AbciResult<T> = Result<Box<T>, Box<dyn std::error::Error>>; #[automock] pub trait ResponseFlush {} #[automock] pub trait ResponseEcho { fn get_message(&self) -> String; fn set_message(&mut self, v: String); } #[automock] pub trait ResponseCheckTx { fn get_code(&self) -> u32; fn get_data(&self) -> Vec<u8>; fn get_log(&self) -> String; fn get_info(&self) -> String; fn get_gas_wanted(&self) -> i64; fn get_gas_used(&self) -> i64; fn get_codespace(&self) -> String; fn set_code(&mut self, v: u32); fn set_data(&mut self, v: Vec<u8>); fn set_log(&mut self, v: String); fn set_info(&mut self, v: String); fn set_gas_wanted(&mut self, v: i64); fn set_gas_used(&mut self, v: i64); fn set_codespace(&mut self, v: String); } #[automock] pub trait ResponseDeliverTx { fn get_code(&self) -> u32; fn get_data(&self) -> Vec<u8>; fn get_log(&self) -> String; fn get_info(&self) -> String; fn get_gas_wanted(&self) -> i64; fn get_gas_used(&self) -> i64; fn get_codespace(&self) -> String; fn set_code(&mut self, v: u32); fn set_data(&mut self, v: Vec<u8>); fn set_log(&mut self, v: String); fn set_info(&mut self, v: String); fn set_gas_wanted(&mut self, v: i64); fn set_gas_used(&mut self, v: i64); fn set_codespace(&mut self, v: String); } #[automock] pub trait ResponseInitChain { fn get_validators(&self) -> Vec<protos::tendermint::abci::ValidatorUpdate>; } #[automock] pub trait ResponseSetOption { fn get_code(&self) -> u32; fn get_log(&self) -> String; fn get_info(&self) -> String; } #[automock] pub trait ResponseBeginBlock {} #[automock] pub trait ResponseEndBlock { fn get_validator_updates(&self) -> Vec<protos::tendermint::abci::ValidatorUpdate>; fn get_events(&self) -> Vec<protos::tendermint::abci::Event>; fn set_events(&mut self, events: Vec<protos::tendermint::abci::Event>); fn set_validator_updates( &mut self, validator_updates: Vec<protos::tendermint::abci::ValidatorUpdate>, ); } #[automock] pub trait ResponseCommit { fn get_data(&self) -> Vec<u8>; fn get_retain_height(&self) -> i64; fn set_data(&mut self, v: Vec<u8>); fn set_retain_height(&mut self, v: i64); } #[automock] pub trait ResponseInfo { fn get_version(&self) -> String; fn get_app_version(&self) -> u64; fn get_data(&self) -> String; fn get_last_block_height(&self) -> i64; fn get_last_block_app_hash(&self) -> Vec<u8>; } #[automock] pub trait ResponseQuery { fn get_code(&self) -> u32; fn get_log(&self) -> String; fn get_info(&self) -> String; fn get_index(&self) -> i64; fn get_key(&self) -> Vec<u8>; fn get_value(&self) -> Vec<u8>; fn get_height(&self) -> i64; fn get_codespace(&self) -> String; fn get_proof(&self) -> Option<protos::tendermint::crypto::ProofOps>; fn set_code(&mut self, v: u32); fn set_log(&mut self, v: String); fn set_info(&mut self, v: String); fn set_index(&mut self, v: i64); fn set_key(&mut self, v: Vec<u8>); fn set_value(&mut self, v: Vec<u8>); fn set_height(&mut self, v: i64); fn set_codespace(&mut self, v: String); } #[automock] pub trait AbciInterface { fn echo(&mut self, message: String) -> AbciResult<dyn ResponseEcho>; fn check_tx(&mut self, tx: Vec<u8>) -> AbciResult<dyn ResponseCheckTx>; fn deliver_tx(&mut self, tx: Vec<u8>) -> AbciResult<dyn ResponseDeliverTx>; fn init_chain( &mut self, time_seconds: i64, time_nanos: i32, chain_id: &str, pub_key_types: Vec<String>, max_block_bytes: i64, max_evidence_bytes: i64, max_gas: i64, max_age_num_blocks: i64, max_age_duration: u64, app_state_bytes: Vec<u8>, validators: Vec<protos::tendermint::abci::ValidatorUpdate>, app_version: u64, initial_height: i64, ) -> AbciResult<dyn ResponseInitChain>; fn set_option(&mut self, key: &str, value: &str) -> AbciResult<dyn ResponseSetOption>; fn begin_block( &mut self, height: i64, hash: Vec<u8>, last_block_id: Vec<u8>, proposer_address: Vec<u8>, active_validators: Vec<protos::tendermint::abci::VoteInfo>, ) -> AbciResult<dyn ResponseBeginBlock>; fn end_block(&mut self, height: i64) -> AbciResult<dyn ResponseEndBlock>; fn commit(&mut self) -> AbciResult<dyn ResponseCommit>; fn query( &mut self, path: String, data: Vec<u8>, height: i64, prove: bool, ) -> AbciResult<dyn ResponseQuery>; fn info(&mut self) -> AbciResult<dyn ResponseInfo>; fn flush(&mut self) -> AbciResult<dyn ResponseFlush>; } pub fn set_abci_instance<'ret>( new_instance: AIType, ) -> Result<MutexGuardRefMut<'ret, Option<AIType>, AIType>, Box<dyn std::error::Error>> { let mut instance = ABCI_INTERFACE_INSTANCE.lock()?; *instance = Some(new_instance); let res = MutexGuardRefMut::new(instance).map_mut(|mg| mg.as_mut().unwrap()); Ok(res) } pub fn get_abci_instance<'ret>( ) -> Result<MutexGuardRefMut<'ret, Option<AIType>, AIType>, Box<dyn std::error::Error>> { let instance = ABCI_INTERFACE_INSTANCE.lock()?; if instance.is_none() { panic!("abci instance has not been set, execute set_abci_instance before calling this function"); } let res = MutexGuardRefMut::new(instance).map_mut(|mg| mg.as_mut().unwrap()); Ok(res) }
mod defaults; pub mod grpc; pub mod utils; pub use defaults::*; pub use grpc::*; use lazy_static::lazy_static; use owning_ref::MutexGuardRefMut; use std::sync::Mutex; use mockall::automock; lazy_static! { static ref ABCI_INTERFACE_INSTANCE: Mutex<Option<AIType>> = Mutex::new(None); } type AIType = Box<dyn AbciInterface + Send>; type AbciResult<T> = Result<Box<T>, Box<dyn std::error::Error>>; #[automock] pub trait ResponseFlush {} #[automock] pub trait ResponseEcho { fn get_message(&self) -> String; fn set_message(&mut self, v: String); } #[automock] pub trait ResponseCheckTx { fn get_code(&self) -> u32; fn get_data(&self) -> Vec<u8>; fn get_log(&self) -> String; fn get_info(&self) -> String; fn get_gas_wanted(&self) -> i64; fn get_gas_used(&self) -> i64; fn get_codespace(&self) -> String; fn set_code(&mut self, v: u32); fn set_data(&mut self, v: Vec<u8>); fn set_log(&mut self, v: String); fn set_info(&mut self, v: String); fn set_gas_wanted(&mut self, v: i64); fn set_gas_used(&mut self, v: i64); fn set_codespace(&mut self, v: String); } #[automock] pub trait ResponseDeliverTx { fn get_code(&self) -> u32; fn get_data(&self) -> Vec<u8>; fn get_log(&self) -> String; fn get_info(&self) -> String; fn get_gas_wanted(&self) -> i64; fn get_gas_used(&self) -> i64; fn get_codespace(&self) -> String; fn set_code(&mut self, v: u32); fn set_data(&mut self, v: Vec<u8>); fn set_log(&mut self, v: String); fn set_info(&mut self, v: String); fn set_gas_wanted(&mut self, v: i64); fn set_gas_used(&mut self, v: i64); fn set_codespace(&mut self, v: String); } #[automock] pub trait ResponseInitChain { fn get_validators(&self) -> Vec<protos::tendermint::abci::ValidatorUpdate>; } #[automock] pub trait ResponseSetOption { fn get_code(&self) -> u32; fn get_log(&self) -> String; fn get_info(&self) -> String; } #[automock] pub trait ResponseBeginBlock {} #[automock] pub trait ResponseEndBlock { fn get_validator_updates(&self) -> Vec<protos::tendermint::abci::ValidatorUpdate>; fn get_events(&self) -> Vec<protos::tendermint::abci::Event>; fn set_events(&mut self, events: Vec<protos::tendermint::abci::Event>); fn set_validator_updates( &mut self, validator_updates: Vec<protos::tendermint::abci::ValidatorUpdate>, ); } #[automock] pub trait ResponseCommit { fn get_data(&self) -> Vec<u8>; fn get_retain_height(&self) -> i64; fn set_data(&mut self, v: Vec<u8>); fn set_retain_height(&mut self, v: i64); } #[automock] pub trait ResponseInfo { fn get_version(&self) -> String; fn get_app_version(&self) -> u64; fn get_data(&self) -> String; fn get_last_block_height(&self) -> i64; fn get_last_block_app_hash(&self) -> Vec<u8>; } #[automock] pub trait ResponseQuery { fn get_code(&self) -> u32; fn get_log(&self) -> String; fn get_info(&self) -> String; fn get_index(&self) -> i64; fn get_key(&self) -> Vec<u8>; fn get_value(&self) -> Vec<u8>; fn get_height(&self) -> i64; fn get_codespace(&self) -> String; fn get_proof(&self) -> Option<protos::tendermint::crypto::ProofOps>; fn set_code(&mut self, v: u32); fn set_log(&mut self, v: String); fn set_info(&mut self, v: String); fn set_index(&mut self, v: i64); fn set_key(&mut self, v: Vec<u8>); fn set_value(&mut self, v: Vec<u8>); fn set_height(&mut self, v: i64); fn set_codespace(&mut self, v: String); } #[automock] pub trait AbciInterface { fn echo(&mut self, message: String) -> AbciResult<dyn ResponseEcho>; fn check_tx(&mut self, tx: Vec<u8>) -> AbciResult<dyn ResponseCheckTx>; fn deliver_tx(&mut self, tx: Vec<u8>) -> AbciResult<dyn ResponseDeliverTx>; fn init_chain( &mut self, time_seconds: i64, time_nanos: i32, chain_id: &str, pub_key_types: Vec<String>, max_block_bytes: i64, max_evidence_bytes: i64, max_gas: i64, max_age_num_blocks: i64, max_age_duration: u64, app_state_bytes: Vec<u8>, validators: Vec<protos::tendermint::abci::ValidatorUpdate>, app_version: u64, initial_height: i64, ) -> AbciResult<dyn ResponseInitChain>; fn set_option(&mut self, key: &str, value: &str) -> AbciResult<dyn ResponseSetOption>; fn begin_block( &mut self, height: i64, hash: Vec<u8>, last_block_id: Vec<u8>, proposer_address: Vec<u8>, active_validators: Vec<protos::tendermint::abci::VoteInfo>, ) -> AbciResult<dyn ResponseBeginBlock>; fn end_block(&mut self, height: i64) -> AbciResult<dyn ResponseEndBlock>; fn commit(&mut self) -> AbciResult<dyn ResponseCommit>; fn query( &mut self, path: String, data: Vec<u8>, height: i64, prove: bool, ) -> AbciResult<dyn ResponseQuery>; fn info(&mut self) -> AbciResult<dyn ResponseInfo>; fn flush(&mut self) -> AbciResult<dyn ResponseFlush>; } pub fn set_abci_instance<'ret>( new_instance: AIType, ) -> Result<MutexGuardRefMut<'ret, Option<AIType>, AIType>, Box<dyn std::error::Error>> { let mut instance = ABCI_INTERFACE_INSTANCE.lock()?; *instance = Some(new_instance); let res = MutexGuardRefMut::new(instance).map_mut(|mg| mg.as_mut().unwrap()); Ok(res) }
pub fn get_abci_instance<'ret>( ) -> Result<MutexGuardRefMut<'ret, Option<AIType>, AIType>, Box<dyn std::error::Error>> { let instance = ABCI_INTERFACE_INSTANCE.lock()?; if instance.is_none() { panic!("abci instance has not been set, execute set_abci_instance before calling this function"); } let res = MutexGuardRefMut::new(instance).map_mut(|mg| mg.as_mut().unwrap()); Ok(res) }
function_block-full_function
[ { "content": "/// Method for getting gRPC url form active env.\n\npub fn get_server_url() -> String {\n\n crate::utils::get_option_from_node_args(crate::utils::NodeOptionVariables::AbciServerUrl)\n\n .unwrap_or_else(|| DEFAULT_ABCI_URL.to_owned())\n\n}\n\n\n", "file_path": "cosmos-abci/abci/src/defaults.rs", "rank": 0, "score": 150988.44325077467 }, { "content": "pub fn get_storage_name() -> String {\n\n match std::env::var(\"ABCI_STORAGE_NAME\") {\n\n Ok(val) => val,\n\n Err(_) => DEFAULT_ABCI_STORAGE_NAME.to_owned(),\n\n }\n\n}\n\n\n\n/// Default ABCI gRPC url.\n\npub const DEFAULT_ABCI_URL: &str = \"tcp://localhost:26658\";\n\n\n\n/// Default ABCI storage name.\n\npub const DEFAULT_ABCI_STORAGE_NAME: &str = \"abci_storage_rocksdb\";\n\n\n\n/// App version type.\n\npub type AppVersion = String;\n\n/// App block version type.\n\npub type BlockVersion = u64;\n\n/// App P2P version type.\n\npub type P2PVersion = u64;\n\n\n", "file_path": "cosmos-abci/abci/src/defaults.rs", "rank": 1, "score": 150988.44325077467 }, { "content": "pub fn get_abci_genesis() -> String {\n\n match get_genesis_from_file() {\n\n Ok(v) => v,\n\n _ => std::env::var(\"ABCI_GENESIS_STATE\")\n\n .map_err(|_| \"Failed to get abci genesis state file\")\n\n .unwrap(),\n\n }\n\n}\n\n\n", "file_path": "cosmos-abci/abci/src/utils.rs", "rank": 2, "score": 150891.51061300913 }, { "content": "/// Method for getting RPC server url form active env.\n\npub fn get_abci_rpc_url() -> String {\n\n pallet_abci::utils::get_option_from_node_args(\n\n pallet_abci::utils::NodeOptionVariables::AbciRPCUrl,\n\n )\n\n .unwrap_or_else(|| DEFAULT_ABCI_RPC_URL.to_owned())\n\n}\n\n\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 3, "score": 150792.83691259215 }, { "content": "/// Get authority address from authority pub key.\n\npub fn get_address_from_pub_key(pub_key: &[u8], key_type: PubKeyTypes) -> Vec<u8> {\n\n match key_type {\n\n PubKeyTypes::Ed25519 => {\n\n let sha_digest = &hashers::get_sha256_hash(pub_key)[0..20];\n\n sha_digest.to_vec()\n\n }\n\n PubKeyTypes::Secp256k1 => {\n\n let sha_digest = hashers::get_sha256_hash(pub_key);\n\n let ripemd160_digest = &hashers::get_ripemd160_hash(&sha_digest);\n\n ripemd160_digest.clone()\n\n }\n\n }\n\n}\n\n\n", "file_path": "cosmos-abci/src/crypto_transform.rs", "rank": 6, "score": 137944.25404491858 }, { "content": "/// Return an Aura or Babe/Grandpa session keys.\n\npub fn to_session_keys(\n\n ed25519_keyring: &Ed25519Keyring,\n\n sr25519_keyring: &Sr25519Keyring,\n\n) -> SessionKeys {\n\n SessionKeys {\n\n grandpa: ed25519_keyring.to_owned().public().into(),\n\n #[cfg(feature = \"aura\")]\n\n aura: sr25519_keyring.to_owned().public().into(),\n\n #[cfg(feature = \"aura\")]\n\n abci: sr25519_keyring.to_owned().public().into(),\n\n #[cfg(feature = \"babe\")]\n\n babe: sr25519_keyring.to_owned().public().into(),\n\n }\n\n}\n\n\n", "file_path": "src/chain_spec.rs", "rank": 7, "score": 122069.4093483299 }, { "content": "/// Instantiate all full RPC extensions.\n\npub fn create_full<P: TransactionPool + 'static>(\n\n deps: FullDeps<crate::service::FullClient, P>,\n\n) -> jsonrpc_core::IoHandler<sc_rpc::Metadata> {\n\n let mut io = jsonrpc_core::IoHandler::default();\n\n io.extend_with(SystemApi::to_delegate(FullSystem::new(\n\n deps.client.clone(),\n\n deps.pool.clone(),\n\n deps.deny_unsafe,\n\n )));\n\n io.extend_with(TransactionPaymentApi::to_delegate(TransactionPayment::new(\n\n deps.client,\n\n )));\n\n io\n\n}\n", "file_path": "src/rpc.rs", "rank": 8, "score": 121434.43629159436 }, { "content": "fn get_genesis_from_file() -> Result<String, String> {\n\n if let Some(_path) = get_option_from_node_args(NodeOptionVariables::AbciGenesisStatePath) {\n\n let app_genesis_state = fs::read_to_string(&_path).map_err(|e| e.to_string())?;\n\n Ok(app_genesis_state)\n\n } else {\n\n Err(\"ABCI path to Genesis state not provided.\".to_string())\n\n }\n\n}\n\n\n", "file_path": "cosmos-abci/abci/src/utils.rs", "rank": 9, "score": 120974.71679398471 }, { "content": "pub fn get_option_from_node_args(option_name: NodeOptionVariables) -> Option<String> {\n\n let node_args: Vec<String> = std::env::args().collect();\n\n let mut arg_ids = ArgIds {\n\n server_url_temp_id: 0,\n\n genesis_state_path_temp_id: 0,\n\n rpc_url_temp_id: 0,\n\n };\n\n let abci_server_url_option = \"--abci_server_url\";\n\n let abci_genesis_state_path_option = \"--abci_genesis_state_path\";\n\n let abci_rpc_url_option = \"--abci_rpc_url\";\n\n\n\n for (arg_id, arg) in node_args.iter().enumerate() {\n\n if arg == abci_server_url_option {\n\n arg_ids.server_url_temp_id = arg_id + 1;\n\n } else if arg == abci_genesis_state_path_option {\n\n arg_ids.genesis_state_path_temp_id = arg_id + 1;\n\n } else if arg == abci_rpc_url_option {\n\n arg_ids.rpc_url_temp_id = arg_id + 1;\n\n }\n\n }\n", "file_path": "cosmos-abci/abci/src/utils.rs", "rank": 10, "score": 118847.52813588863 }, { "content": "pub fn development_config() -> Result<ChainSpec, String> {\n\n let wasm_binary =\n\n WASM_BINARY.ok_or_else(|| \"Development wasm binary not available\".to_string())?;\n\n\n\n Ok(ChainSpec::from_genesis(\n\n // Name\n\n \"Development\",\n\n // ID\n\n \"dev\",\n\n ChainType::Development,\n\n move || {\n\n testnet_genesis(\n\n wasm_binary,\n\n // Initial PoA authorities\n\n initial_poa_keys(),\n\n // Sudo account\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n // Pre-funded accounts\n\n vec![\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n", "file_path": "src/chain_spec.rs", "rank": 12, "score": 118281.86137961569 }, { "content": "pub fn local_testnet_config() -> Result<ChainSpec, String> {\n\n let wasm_binary =\n\n WASM_BINARY.ok_or_else(|| \"Development wasm binary not available\".to_string())?;\n\n\n\n Ok(ChainSpec::from_genesis(\n\n // Name\n\n \"Local Testnet\",\n\n // ID\n\n \"local_testnet\",\n\n ChainType::Local,\n\n move || {\n\n testnet_genesis(\n\n wasm_binary,\n\n // Initial PoA authorities\n\n initial_poa_keys(),\n\n // Sudo account\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n // Pre-funded accounts\n\n vec![\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n", "file_path": "src/chain_spec.rs", "rank": 17, "score": 115775.21295870763 }, { "content": "#[cfg(feature = \"babe\")]\n\npub trait Trait:\n\n CreateSignedTransaction<Call<Self>>\n\n + pallet_session::Trait\n\n + pallet_sudo::Trait\n\n + pallet_grandpa::Trait\n\n + pallet_babe::Trait\n\n{\n\n type AuthorityId: Decode + sp_runtime::RuntimeAppPublic + Default;\n\n type Call: From<Call<Self>>;\n\n type Subscription: SubscriptionManager;\n\n}\n\n\n", "file_path": "cosmos-abci/src/lib.rs", "rank": 21, "score": 113651.69481228145 }, { "content": "/// Method for getting app version configs.\n\npub fn get_app_configs() -> VersionConfigs {\n\n let version_configs = VersionConfigs {\n\n app_version: \"0.1.0\".to_string(), // version specified at Cargo.toml of `abci` pallet.\n\n block_version: 0,\n\n p2p_version: 0,\n\n };\n\n version_configs.log_info();\n\n version_configs\n\n}\n", "file_path": "cosmos-abci/abci/src/defaults.rs", "rank": 24, "score": 103477.35747173656 }, { "content": "/// Method that set abci instance.\n\npub fn set_abci_storage_instance<'ret>(\n\n new_instance: AbciStorageType,\n\n) -> Result<\n\n MutexGuardRefMut<'ret, Option<AbciStorageType>, AbciStorageType>,\n\n Box<dyn std::error::Error>,\n\n> {\n\n let mut instance = ABCI_STORAGE_INSTANCE.lock()?;\n\n *instance = Some(new_instance);\n\n // Here we create a ref to the inner value of the mutex guard.\n\n // Unwrap should never panic as we set it previously.\n\n let res = MutexGuardRefMut::new(instance).map_mut(|mg| mg.as_mut().unwrap());\n\n Ok(res)\n\n}\n\n\n", "file_path": "cosmos-abci/abci_storage/src/lib.rs", "rank": 25, "score": 101163.34546442045 }, { "content": "/// Generate an account ID from seed.\n\npub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId\n\nwhere\n\n AccountPublic: From<<TPublic::Pair as Pair>::Public>,\n\n{\n\n AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()\n\n}\n\n\n", "file_path": "src/chain_spec.rs", "rank": 26, "score": 100930.81817172823 }, { "content": "pub fn parse_cosmos_genesis_file(genesis: &str) -> Result<GenesisInfo, Box<dyn std::error::Error>> {\n\n let genesis: serde_json::Value = serde_json::from_str(genesis).map_err(|e| e.to_string())?;\n\n let chain_id = genesis[\"chain_id\"]\n\n .as_str()\n\n .ok_or_else(|| \"chain_id not found\".to_owned())?;\n\n let genesis_time = genesis[\"genesis_time\"]\n\n .as_str()\n\n .ok_or_else(|| \"chain_id not found\".to_owned())?;\n\n let pub_key_types: Vec<String> = genesis[\"consensus_params\"][\"validator\"][\"pub_key_types\"]\n\n .as_array()\n\n .ok_or_else(|| \"pub_keys_types not found\".to_owned())?\n\n .iter()\n\n .map(|v| v.as_str().unwrap().to_owned())\n\n .collect();\n\n let max_bytes = genesis[\"consensus_params\"][\"block\"][\"max_bytes\"]\n\n .as_str()\n\n .ok_or_else(|| \"chain_id not found\".to_owned())?\n\n .parse::<i64>()?;\n\n let max_gas = genesis[\"consensus_params\"][\"block\"][\"max_gas\"]\n\n .as_str()\n", "file_path": "cosmos-abci/abci/src/utils.rs", "rank": 27, "score": 100883.23881267583 }, { "content": "/// Session index defined in pallet_session.\n\ntype SessionIndex = u32;\n", "file_path": "cosmos-abci/src/lib.rs", "rank": 28, "score": 100667.32326081154 }, { "content": "/// Encode value from `base64` string to `utf8` string.\n\npub fn encode_value_from_base64(value: &[u8]) -> Vec<u8> {\n\n base64::decode(value).unwrap().to_vec()\n\n}\n", "file_path": "cosmos-abci/src/crypto_transform.rs", "rank": 29, "score": 100546.08864276622 }, { "content": "pub fn serialize_vec<T: serde::Serialize>(\n\n validators: Vec<T>,\n\n) -> Result<Vec<u8>, Box<dyn std::error::Error>> {\n\n Ok(bincode::serialize(&validators).map_err(|_| \"cannot serialize\")?)\n\n}\n\n\n", "file_path": "cosmos-abci/abci/src/utils.rs", "rank": 30, "score": 97597.42592584164 }, { "content": "#[cfg(feature = \"aura\")]\n\npub fn new_partial(\n\n config: &Configuration,\n\n) -> Result<\n\n sc_service::PartialComponents<\n\n FullClient,\n\n FullBackend,\n\n FullSelectChain,\n\n sp_consensus::DefaultImportQueue<Block, FullClient>,\n\n sc_transaction_pool::FullPool<Block, FullClient>,\n\n (\n\n sc_consensus_aura::AuraBlockImport<Block, FullClient, FullGrandpaBlockImport, AuraPair>,\n\n sc_finality_grandpa::LinkHalf<Block, FullClient, FullSelectChain>,\n\n ),\n\n >,\n\n ServiceError,\n\n> {\n\n let inherent_data_providers = sp_inherents::InherentDataProviders::new();\n\n let (client, backend, keystore, task_manager) =\n\n sc_service::new_full_parts::<Block, RuntimeApi, Executor>(&config)?;\n\n let client = Arc::new(client);\n", "file_path": "src/service.rs", "rank": 31, "score": 96798.10779262288 }, { "content": "fn remove_rocks_db(db_path: path::PathBuf) {\n\n match fs::remove_dir_all(&db_path) {\n\n Ok(_) => {\n\n println!(\"{:?} removed.\", &db_path);\n\n }\n\n Err(_) => {\n\n println!(\"{:?} did not exist.\", &db_path);\n\n }\n\n }\n\n}\n\n\n\nimpl PurgeChainWithStorageCmd {\n\n /// Run the purge command.\n\n pub fn run(&self, config: &Configuration) -> sc_cli::Result<()> {\n\n let mut confirm_removal = self.yes;\n\n let chain_spec_id = config.chain_spec.id();\n\n let shared_params = self.shared_params();\n\n let database_params = self.database_params().unwrap();\n\n let config_dir = config\n\n .base_path\n", "file_path": "src/cli/commands.rs", "rank": 32, "score": 96572.95576816137 }, { "content": "/// Method that return abci instance.\n\npub fn get_abci_storage_instance<'ret>() -> Result<\n\n MutexGuardRefMut<'ret, Option<AbciStorageType>, AbciStorageType>,\n\n Box<dyn std::error::Error>,\n\n> {\n\n let instance = ABCI_STORAGE_INSTANCE.lock()?;\n\n if instance.is_none() {\n\n // TODO return an error\n\n panic!(\"abci storage instance has not been set, execute set_storage_instance before calling this function\");\n\n }\n\n // Here we create a ref to the inner value of the mutex guard.\n\n // Unwrap should never panic as we set it previously.\n\n let res = MutexGuardRefMut::new(instance).map_mut(|mg| mg.as_mut().unwrap());\n\n Ok(res)\n\n}\n", "file_path": "cosmos-abci/abci_storage/src/lib.rs", "rank": 33, "score": 95848.27370935798 }, { "content": "/// The pallet SubscriptionManager trait that defines `on_check_tx` and `on_deliver_tx` methods\n\n/// and is used by pallet subscribtion macro.\n\npub trait SubscriptionManager {\n\n fn on_check_tx(data: Vec<u8>) -> DispatchResult;\n\n fn on_deliver_tx(data: Vec<u8>) -> DispatchResult;\n\n}\n\n\n\nimpl SubscriptionManager for () {\n\n fn on_check_tx(_: Vec<u8>) -> DispatchResult {\n\n Ok(())\n\n }\n\n fn on_deliver_tx(_: Vec<u8>) -> DispatchResult {\n\n Ok(())\n\n }\n\n}\n\n\n\nmacro_rules! tuple_impls {\n\n ( $( $name:ident )+ ) => {\n\n impl<$($name: SubscriptionManager),+> SubscriptionManager for ($($name,)+)\n\n {\n\n fn on_check_tx(data: Vec<u8>) -> DispatchResult {\n\n $($name::on_check_tx(data.clone())?;)+\n", "file_path": "cosmos-abci/src/lib.rs", "rank": 34, "score": 95134.54452383549 }, { "content": "/// The CosmosAbci trait that defines `check_tx`, `deliver_tx` methods.\n\npub trait CosmosAbci {\n\n fn check_tx(data: Vec<u8>) -> Result<u64, DispatchError>;\n\n fn deliver_tx(data: Vec<u8>) -> DispatchResult;\n\n}\n\n\n\n/// The pallet configuration trait for aura consensus.\n", "file_path": "cosmos-abci/src/lib.rs", "rank": 35, "score": 95132.04931210996 }, { "content": "#[runtime_interface]\n\npub trait AbciInterface {\n\n /// Write data to the external DB\n\n fn storage_write(key: Vec<u8>, value: Vec<u8>) -> Result<(), DispatchError> {\n\n abci_storage::get_abci_storage_instance()\n\n .map_err(|_| \"failed to get abci storage instance\")?\n\n .write(key, value)\n\n .map_err(|_| \"failed to write some data into the abci storage\")?;\n\n Ok(())\n\n }\n\n\n\n /// Get data from the external DB\n\n fn storage_get(key: Vec<u8>) -> Result<Option<Vec<u8>>, DispatchError> {\n\n let value = abci_storage::get_abci_storage_instance()\n\n .map_err(|_| \"failed to get abci storage instance\")?\n\n .get(key)\n\n .map_err(|_| \"failed to get value from the abci storage\")?;\n\n\n\n Ok(value)\n\n }\n\n\n", "file_path": "cosmos-abci/src/lib.rs", "rank": 36, "score": 95124.5920620253 }, { "content": "pub fn deserialize_vec<'a, T: serde::Deserialize<'a>>(\n\n bytes: &'a [u8],\n\n) -> Result<Vec<T>, Box<dyn std::error::Error>> {\n\n Ok(bincode::deserialize(bytes).map_err(|_| \"cannot deserialize\")?)\n\n}\n\n\n", "file_path": "cosmos-abci/abci/src/utils.rs", "rank": 37, "score": 93381.11995726007 }, { "content": " /// Broadcast ABCI transaction\n\n pub trait ExtrinsicConstructionApi {\n\n fn broadcast_abci_tx(data: Vec<u8>);\n\n }\n\n}\n\n\n\n/// Runtime interfaces for interaction with other modules.\n", "file_path": "cosmos-abci/src/lib.rs", "rank": 39, "score": 93009.22327388576 }, { "content": "#[automock]\n\npub trait AbciStorage {\n\n fn write(&mut self, key: Vec<u8>, data: Vec<u8>) -> CustomStorageResult<()>;\n\n\n\n fn get(&mut self, key: Vec<u8>) -> CustomStorageResult<Option<Vec<u8>>>;\n\n}\n\n\n", "file_path": "cosmos-abci/abci_storage/src/lib.rs", "rank": 41, "score": 91063.42736025923 }, { "content": "/// Generate sha256 hash from value.\n\npub fn get_sha256_hash(from: &[u8]) -> Vec<u8> {\n\n let mut digest = Sha256::new();\n\n digest.update(from);\n\n let value = digest.finalize();\n\n value.clone().to_vec()\n\n}\n", "file_path": "cosmos-abci/src/crypto_transform/hashers.rs", "rank": 42, "score": 90973.1291685677 }, { "content": "/// Generate ripemd160 hash from value.\n\npub fn get_ripemd160_hash(from: &[u8]) -> Vec<u8> {\n\n let mut digest = Ripemd160::new();\n\n digest.update(from);\n\n let value = digest.finalize();\n\n value.clone().to_vec()\n\n}\n\n\n", "file_path": "cosmos-abci/src/crypto_transform/hashers.rs", "rank": 43, "score": 90973.1291685677 }, { "content": "/// Generate a crypto pair from seed.\n\npub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {\n\n TPublic::Pair::from_string(&format!(\"//{}\", seed), None)\n\n .expect(\"static values are valid; qed\")\n\n .public()\n\n}\n\n\n", "file_path": "src/chain_spec.rs", "rank": 44, "score": 90022.49131019946 }, { "content": "/// Method for start RPC server.\n\npub fn start_server(client: Arc<crate::service::FullClient>) {\n\n let mut io = IoHandler::new();\n\n\n\n /** Method for broadcasting abci tx value and return block best_number. */\n\n fn broadcast_abci_tx(tx_value: Vec<u8>, client: Arc<crate::service::FullClient>) -> u32 {\n\n let info = client.info();\n\n let at = BlockId::<Block>::hash(info.best_hash);\n\n client.runtime_api().broadcast_abci_tx(&at, tx_value).ok();\n\n info.best_number\n\n };\n\n\n\n /** Handle and map RPC server error. */\n\n fn handle_error(e: std::boxed::Box<dyn std::error::Error>) -> Error {\n\n Error {\n\n code: ErrorCode::ServerError(1),\n\n message: e.to_string(),\n\n data: None,\n\n }\n\n }\n\n\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 46, "score": 88791.18191354137 }, { "content": "type AbciStorageType = Box<dyn AbciStorage + Send>;\n", "file_path": "cosmos-abci/abci_storage/src/lib.rs", "rank": 47, "score": 86665.14921715416 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn native_version() -> NativeVersion {\n\n NativeVersion {\n\n runtime_version: VERSION,\n\n can_author_with: Default::default(),\n\n }\n\n}\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: BlockNumber = 2400;\n\n /// We allow for 2 seconds of compute with a 6 second average block time.\n\n pub const MaximumBlockWeight: Weight = 2 * WEIGHT_PER_SECOND;\n\n pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75);\n\n /// Assume 10% of weight for average on_initialize calls.\n\n pub MaximumExtrinsicWeight: Weight = AvailableBlockRatio::get()\n\n .saturating_sub(Perbill::from_percent(10)) * MaximumBlockWeight::get();\n\n pub const MaximumBlockLength: u32 = 5 * 1024 * 1024;\n\n pub const Version: RuntimeVersion = VERSION;\n\n}\n\n\n\n// Configure FRAME pallets to include in runtime.\n", "file_path": "runtime/src/lib.rs", "rank": 48, "score": 86153.97483051808 }, { "content": "/// Parse and run command line arguments\n\npub fn run() -> sc_cli::Result<()> {\n\n let cli = Cli::from_args();\n\n match cli.subcommand {\n\n Some(Subcommand::BuildSpec(ref cmd)) => {\n\n let runner = cli.create_runner(cmd)?;\n\n runner.sync_run(|config| cmd.run(config.chain_spec, config.network))\n\n }\n\n Some(Subcommand::CheckBlock(ref cmd)) => {\n\n let runner = cli.create_runner(cmd)?;\n\n runner.async_run(|config| {\n\n let PartialComponents {\n\n client,\n\n task_manager,\n\n import_queue,\n\n ..\n\n } = service::new_partial(&config)?;\n\n Ok((cmd.run(client, import_queue), task_manager))\n\n })\n\n }\n\n Some(Subcommand::ExportBlocks(ref cmd)) => {\n", "file_path": "src/command.rs", "rank": 49, "score": 85106.7510887224 }, { "content": " trait Store for Module<T: Trait> as ABCITxStorage {\n\n ABCITxStorage get(fn abci_tx): map hasher(blake2_128_concat) T::BlockNumber => ABCITxs;\n\n CosmosAccounts get(fn cosmos_accounts): map hasher(blake2_128_concat) Vec<u8> => Option<T::AccountId> = None;\n\n AccountLedger get(fn account_ledgers): map hasher(blake2_128_concat) T::AccountId => OptionalLedger<T::AccountId>;\n\n SubstrateAccounts get(fn substrate_accounts): map hasher(blake2_128_concat) <T as session::Trait>::ValidatorId => Option<utils::CosmosAccount> = None;\n\n }\n\n}\n\n\n\ndecl_module! {\n\n /// The cosmos_abci pallet that connects Cosmos and Substrate nodes.\n\n pub struct Module<T: Trait> for enum Call where origin: T::Origin {\n\n // Block initialization.\n\n fn on_initialize(block_number: T::BlockNumber) -> Weight {\n\n 0\n\n }\n\n\n\n // Map Cosmos account with the provided Substrate account.\n\n #[weight = 0]\n\n fn insert_cosmos_account(\n\n origin,\n", "file_path": "cosmos-abci/src/lib.rs", "rank": 50, "score": 76960.13502472229 }, { "content": "/// Builds a new service for a light client.\n\npub fn new_light(config: Configuration) -> Result<TaskManager, ServiceError> {\n\n let (client, backend, keystore, mut task_manager, on_demand) =\n\n sc_service::new_light_parts::<Block, RuntimeApi, Executor>(&config)?;\n\n\n\n let transaction_pool = Arc::new(sc_transaction_pool::BasicPool::new_light(\n\n config.transaction_pool.clone(),\n\n config.prometheus_registry(),\n\n task_manager.spawn_handle(),\n\n client.clone(),\n\n on_demand.clone(),\n\n ));\n\n\n\n let grandpa_block_import = sc_finality_grandpa::light_block_import(\n\n client.clone(),\n\n backend.clone(),\n\n &(client.clone() as Arc<_>),\n\n Arc::new(on_demand.checker().clone()) as Arc<_>,\n\n )?;\n\n let finality_proof_import = grandpa_block_import.clone();\n\n let finality_proof_request_builder =\n", "file_path": "src/service.rs", "rank": 51, "score": 69246.77391702545 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(config: Configuration) -> Result<TaskManager, ServiceError> {\n\n // TODO: fix it afer substarte update\n\n let config_dir = config\n\n .base_path\n\n .as_ref()\n\n .ok_or_else(|| \"base_path has not been set\")\n\n .unwrap()\n\n .path()\n\n .to_path_buf()\n\n .join(\"chains\")\n\n .join(config.chain_spec.id());\n\n let abci_server_url = &pallet_abci::get_server_url();\n\n\n\n // Init ABCI instance.\n\n pallet_abci::set_abci_instance(Box::new(\n\n pallet_abci::grpc::AbciinterfaceGrpc::connect(abci_server_url)\n\n .map_err(|_| \"failed to connect\")\n\n .unwrap(),\n\n ))\n\n .map_err(|_| \"failed to set abci instance\")\n", "file_path": "src/service.rs", "rank": 52, "score": 69246.77391702545 }, { "content": "fn test_abci_check_tx() {\n\n let mut abci_mock = MockAbciInterface::new();\n\n abci_mock.expect_check_tx().returning(|v: Vec<u8>| {\n\n let mut ret = MockResponseCheckTx::new();\n\n ret.expect_get_data()\n\n .returning(move || -> Vec<u8> { v.clone() });\n\n Ok(Box::new(ret))\n\n });\n\n\n\n set_abci_instance(Box::new(abci_mock)).unwrap();\n\n\n\n assert_eq!(\n\n get_abci_instance()\n\n .unwrap()\n\n .check_tx(vec![1, 2, 3])\n\n .unwrap()\n\n .get_data(),\n\n vec![1, 2, 3]\n\n );\n\n\n\n assert_ne!(\n\n get_abci_instance()\n\n .unwrap()\n\n .check_tx(vec![1, 2, 3])\n\n .unwrap()\n\n .get_data(),\n\n vec![1, 2, 4]\n\n );\n\n}\n\n\n", "file_path": "cosmos-abci/abci/tests/unit_tests.rs", "rank": 53, "score": 68992.33772032091 }, { "content": "fn test_abci_deliver_tx() {\n\n let mut abci_mock = MockAbciInterface::new();\n\n abci_mock.expect_deliver_tx().returning(|v: Vec<u8>| {\n\n let mut ret = MockResponseDeliverTx::new();\n\n ret.expect_get_data()\n\n .returning(move || -> Vec<u8> { v.clone() });\n\n Ok(Box::new(ret))\n\n });\n\n\n\n set_abci_instance(Box::new(abci_mock)).unwrap();\n\n\n\n assert_eq!(\n\n get_abci_instance()\n\n .unwrap()\n\n .deliver_tx(vec![1, 2, 3])\n\n .unwrap()\n\n .get_data(),\n\n vec![1, 2, 3]\n\n );\n\n\n\n assert_ne!(\n\n get_abci_instance()\n\n .unwrap()\n\n .deliver_tx(vec![1, 2, 3])\n\n .unwrap()\n\n .get_data(),\n\n vec![1, 2, 4]\n\n );\n\n}\n\n\n", "file_path": "cosmos-abci/abci/tests/unit_tests.rs", "rank": 54, "score": 68992.33772032091 }, { "content": "fn initial_poa_keys() -> Vec<(AccountId, AccountId, SessionKeys)> {\n\n vec![\n\n (\n\n get_account_id_from_seed::<ed25519::Public>(\"Alice\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n to_session_keys(&Ed25519Keyring::Alice, &Sr25519Keyring::Alice),\n\n ),\n\n (\n\n get_account_id_from_seed::<ed25519::Public>(\"Bob\"),\n\n get_account_id_from_seed::<sr25519::Public>(\"Bob\"),\n\n to_session_keys(&Ed25519Keyring::Bob, &Sr25519Keyring::Bob),\n\n ),\n\n ]\n\n}\n\n\n", "file_path": "src/chain_spec.rs", "rank": 55, "score": 66793.90274088547 }, { "content": "fn init_chain() -> Result<(), ServiceError> {\n\n let key = b\"init_chain_info\".to_vec();\n\n\n\n let mut abci_storage = abci_storage::get_abci_storage_instance()\n\n .map_err(|_| \"failed to get abci storage instance\")?;\n\n\n\n match abci_storage\n\n .get(key.clone())\n\n .map_err(|_| \"failed to get value from the abci storage\")?\n\n {\n\n // Just check that in storage exists some value to the following key\n\n Some(_) => {}\n\n None => {\n\n let genesis = pallet_abci::utils::parse_cosmos_genesis_file(\n\n &pallet_abci::utils::get_abci_genesis(),\n\n )\n\n .map_err(|_| \"failed to get cosmos genesis file\")?;\n\n\n\n let response = pallet_abci::get_abci_instance()\n\n .map_err(|_| \"failed to setup connection\")?\n", "file_path": "src/service.rs", "rank": 56, "score": 66085.74144223533 }, { "content": "fn main() {\n\n generate_cargo_keys();\n\n rerun_if_git_head_changed();\n\n}\n", "file_path": "build.rs", "rank": 57, "score": 55341.82201743641 }, { "content": "type AbciClient = protos::tendermint::abci::abci_application_client::AbciApplicationClient<\n\n tonic::transport::Channel,\n\n>;\n\n\n\npub struct AbciinterfaceGrpc {\n\n rt: Runtime,\n\n client: AbciClient,\n\n chain_id: String,\n\n last_commit_hash: Vec<u8>,\n\n tx_chain: Vec<Vec<u8>>,\n\n}\n\n\n\nimpl AbciinterfaceGrpc {\n\n pub fn connect(abci_endpoint: &str) -> Result<Self, Box<dyn std::error::Error>> {\n\n let mut rt = Runtime::new()?;\n\n let future = async {\n\n // Translates str into static str\n\n let endpoint: &'static str = Box::leak(abci_endpoint.into());\n\n AbciClient::connect(endpoint).await\n\n };\n", "file_path": "cosmos-abci/abci/src/grpc.rs", "rank": 58, "score": 54669.84199888118 }, { "content": "type FullGrandpaBlockImport =\n\n sc_finality_grandpa::GrandpaBlockImport<FullBackend, Block, FullClient, FullSelectChain>;\n\n\n\n// Returns a new patrial for babe.\n", "file_path": "src/service.rs", "rank": 59, "score": 53763.68463653118 }, { "content": "fn main() {\n\n WasmBuilder::new()\n\n .with_current_project()\n\n .with_wasm_builder_from_crates(\"2.0.0\")\n\n .export_heap_base()\n\n .import_memory()\n\n .build()\n\n}\n", "file_path": "runtime/build.rs", "rank": 60, "score": 53673.439169563 }, { "content": "fn wait<F: Future>(rt: &Runtime, future: F) -> F::Output {\n\n let handle = rt.handle().clone();\n\n block_in_place(move || handle.block_on(future))\n\n}\n", "file_path": "cosmos-abci/abci/src/grpc.rs", "rank": 61, "score": 51818.69685125571 }, { "content": "/// Configure initial storage state for FRAME modules.\n\nfn testnet_genesis(\n\n wasm_binary: &[u8],\n\n session_keys: Vec<(AccountId, AccountId, SessionKeys)>,\n\n root_key: AccountId,\n\n endowed_accounts: Vec<AccountId>,\n\n _enable_println: bool,\n\n) -> GenesisConfig {\n\n GenesisConfig {\n\n frame_system: Some(SystemConfig {\n\n // Add Wasm runtime to storage.\n\n code: wasm_binary.to_vec(),\n\n changes_trie_config: Default::default(),\n\n }),\n\n pallet_balances: Some(BalancesConfig {\n\n // Configure endowed accounts with initial balance of 1 << 60.\n\n balances: endowed_accounts\n\n .iter()\n\n .cloned()\n\n .map(|k| (k, 1 << 60))\n\n .collect(),\n", "file_path": "src/chain_spec.rs", "rank": 62, "score": 50801.04210337147 }, { "content": "fn main() {\n\n tonic_build::configure()\n\n .type_attribute(\".\", \"#[derive(serde::Serialize, serde::Deserialize)]\")\n\n .compile(&[\"proto/types.proto\"], &[\"proto\"])\n\n .unwrap();\n\n}\n", "file_path": "cosmos-abci/abci/build.rs", "rank": 63, "score": 50801.04210337147 }, { "content": "#[test]\n\nfn run_all_tests() {\n\n test_abci_info();\n\n test_abci_set_option();\n\n test_abci_check_tx();\n\n test_abci_deliver_tx();\n\n test_abci_echo();\n\n test_abci_flush();\n\n}\n\n\n", "file_path": "cosmos-abci/abci/tests/unit_tests.rs", "rank": 64, "score": 47369.718243921954 }, { "content": "fn test_abci_info() {\n\n let mut abci_mock = MockAbciInterface::new();\n\n let cosmos_response_app_version = 0;\n\n let cosmos_response_version = \"\".to_string();\n\n let cosmos_response_data = \"SimApp\".to_string();\n\n\n\n abci_mock.expect_info().returning(|| {\n\n let mut ret = MockResponseInfo::new();\n\n ret.expect_get_data()\n\n .returning(move || -> String { \"SimApp\".to_string() });\n\n ret.expect_get_app_version().returning(move || -> u64 { 0 });\n\n ret.expect_get_version()\n\n .returning(move || -> String { \"\".to_string() });\n\n Ok(Box::new(ret))\n\n });\n\n\n\n set_abci_instance(Box::new(abci_mock)).unwrap();\n\n\n\n assert_eq!(\n\n get_abci_instance().unwrap().info().unwrap().get_data(),\n", "file_path": "cosmos-abci/abci/tests/unit_tests.rs", "rank": 65, "score": 46404.96056861081 }, { "content": "fn test_abci_flush() {\n\n let mut abci_mock = MockAbciInterface::new();\n\n abci_mock.expect_flush().returning(|| {\n\n let ret = MockResponseFlush::new();\n\n Ok(Box::new(ret))\n\n });\n\n\n\n set_abci_instance(Box::new(abci_mock)).unwrap();\n\n\n\n assert_eq!(get_abci_instance().unwrap().flush().is_ok(), true);\n\n}\n", "file_path": "cosmos-abci/abci/tests/unit_tests.rs", "rank": 66, "score": 46404.96056861081 }, { "content": "fn test_abci_echo() {\n\n let mut abci_mock = MockAbciInterface::new();\n\n abci_mock.expect_echo().returning(|mut v: String| {\n\n let mut ret = MockResponseEcho::new();\n\n ret.expect_get_message().returning(move || -> String {\n\n v.push_str(&v.clone());\n\n v.clone()\n\n });\n\n Ok(Box::new(ret))\n\n });\n\n\n\n //\n\n set_abci_instance(Box::new(abci_mock)).unwrap();\n\n\n\n assert_eq!(\n\n get_abci_instance()\n\n .unwrap()\n\n .echo(\"Hello\".to_string())\n\n .unwrap()\n\n .get_message(),\n", "file_path": "cosmos-abci/abci/tests/unit_tests.rs", "rank": 67, "score": 46404.96056861081 }, { "content": "fn test_abci_set_option() {\n\n let mut abci_mock = MockAbciInterface::new();\n\n let cosmos_response_code: u32 = 0;\n\n let cosmos_response_log = \"IHAVEIDEA\";\n\n let cosmos_response_info = \"IHAVENOIDEA\";\n\n abci_mock\n\n .expect_set_option()\n\n .returning(|_key: &str, _value: &str| {\n\n println!(\"{}\", _key);\n\n println!(\"{}\", _value);\n\n let mut ret = MockResponseSetOption::new();\n\n ret.expect_get_code().returning(move || -> u32 { 0 });\n\n ret.expect_get_log()\n\n .returning(move || -> String { \"IHAVEIDEA\".to_string() });\n\n ret.expect_get_info()\n\n .returning(move || -> String { \"IHAVENOIDEA\".to_string() });\n\n Ok(Box::new(ret))\n\n });\n\n\n\n set_abci_instance(Box::new(abci_mock)).unwrap();\n", "file_path": "cosmos-abci/abci/tests/unit_tests.rs", "rank": 68, "score": 45512.85908938559 }, { "content": "fn main() -> sc_cli::Result<()> {\n\n command::run()\n\n}\n", "file_path": "src/main.rs", "rank": 69, "score": 45326.42398802951 }, { "content": "#[test]\n\nfn test_abci_storage() {\n\n // init storage instance\n\n let storage = abci_storage::rocksdb::AbciStorageRocksdb::init(\"abci_storage_test\").unwrap();\n\n abci_storage::set_abci_storage_instance(Box::new(storage)).unwrap();\n\n\n\n abci_storage::get_abci_storage_instance()\n\n .unwrap()\n\n .write(b\"key 1\".to_vec(), b\"value 1\".to_vec())\n\n .unwrap();\n\n\n\n abci_storage::get_abci_storage_instance()\n\n .unwrap()\n\n .write(b\"key 2\".to_vec(), b\"value 2\".to_vec())\n\n .unwrap();\n\n\n\n abci_storage::get_abci_storage_instance()\n\n .unwrap()\n\n .write(b\"key 3\".to_vec(), b\"value 3\".to_vec())\n\n .unwrap();\n\n\n", "file_path": "cosmos-abci/abci_storage/tests/abci_storage_test.rs", "rank": 70, "score": 44685.50401024748 }, { "content": "const { ApiPromise, WsProvider } = require('@polkadot/api');\n\nconst { Keyring } = require('@polkadot/keyring');\n\nconst http = require('http');\n\nconst url = require('url');\n\nconst appConstants = require('./app.constants');\n\n\n\nfunction getBlockchainAccount(keypair) {\n\n const keyring = new Keyring({ type: \"sr25519\" });\n\n return keyring.addFromUri(process.env.SUDO || keypair);\n\n}\n\n\n\nasync function startServer() {\n\n const WS_URL = process.env.NODE_ENV === 'production' ? appConstants.ENDPOINT_PROD : appConstants.ENDPOINT_LOCAL;\n\n const provider = new WsProvider(WS_URL);\n\n const api = await ApiPromise.create({ provider });\n\n const server = http.createServer();\n\n\n\n // Query node data.\n\n server.on('request', async (req, res) => {\n\n const { type } = req.query || {};\n\n if (type) {\n\n res.write(type);\n\n res.end();\n\n } else {\n\n const validators = await api.query.session.validators();\n\n res.write(JSON.stringify(validators));\n\n res.end();\n\n }\n\n });\n\n\n\n server.listen(8000).on('listening', () => {\n\n console.log('Server listen port', 8000);\n\n });\n\n}\n\n\n\nstartServer();\n", "file_path": "node_testing_ui/tx-server.app.js", "rank": 71, "score": 44445.968399875776 }, { "content": "/// Public account type.\n\ntype AccountPublic = <Signature as Verify>::Signer;\n\n\n", "file_path": "src/chain_spec.rs", "rank": 72, "score": 43293.14909688561 }, { "content": "/// Backend type include Block.\n\ntype FullBackend = sc_service::TFullBackend<Block>;\n", "file_path": "src/service.rs", "rank": 73, "score": 42919.360737218 }, { "content": "/// The optional ledger type.\n\ntype OptionalLedger<AccountId> = Option<(AccountId, Balance)>;\n\n\n\n/// The default Cosmos account curve type.\n\npub const COSMOS_ACCOUNT_DEFAULT_PUB_KEY_TYPE: &str = \"ed25519\";\n\n/// Priority for unsigned transactions.\n\npub const UNSIGNED_TXS_PRIORITY: u64 = 100;\n\n/// Session duration in blocks.\n\npub const SESSION_BLOCKS_PERIOD: u32 = 5;\n\n#[allow(dead_code)]\n\nconst LAST_COSMOS_VALIDATORS_KEY: &[u8; 22] = b\"last_cosmos_validators\";\n\n\n\n/// The KeyType ID.\n\npub const KEY_TYPE: KeyTypeId = KeyTypeId(*b\"abci\");\n\n/// Based on the above `KeyTypeId` we need to generate a pallet-specific crypto type wrapper.\n\n/// We can utilize the supported crypto algorithms (`sr25519`, `ed25519` and `ecdsa`) and augment\n\n/// them with the pallet-specific identifier.\n\npub mod crypto {\n\n use crate::KEY_TYPE;\n\n use sp_runtime::app_crypto::{app_crypto, sr25519};\n\n\n\n app_crypto!(sr25519, KEY_TYPE);\n\n}\n\n\n", "file_path": "cosmos-abci/src/lib.rs", "rank": 74, "score": 39748.8329679075 }, { "content": "/// Longest selected chain type include FullBackend, Block.\n\ntype FullSelectChain = sc_consensus::LongestChain<FullBackend, Block>;\n\n\n", "file_path": "src/service.rs", "rank": 75, "score": 39748.460651346715 }, { "content": "type CustomStorageResult<T> = Result<T, Box<dyn std::error::Error>>;\n\n\n", "file_path": "cosmos-abci/abci_storage/src/lib.rs", "rank": 76, "score": 32205.399264077358 }, { "content": "use codec::{Decode, Encode};\n\nuse sp_runtime::RuntimeDebug;\n\nuse sp_std::prelude::*;\n\n\n\npub struct ExposureOf<T>(sp_std::marker::PhantomData<T>);\n\n\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, Default, RuntimeDebug)]\n\npub struct Exposure<AccountId, Balance> {\n\n pub total: Balance,\n\n pub own: Balance,\n\n pub others: Vec<(AccountId, Balance)>,\n\n}\n\n\n\npub struct StashOf<T>(sp_std::marker::PhantomData<T>);\n\n\n\npub type CosmosAccountPubKey = Vec<u8>;\n\n\n\n#[derive(Encode, Decode)]\n\npub struct CosmosAccount {\n\n pub pub_key: CosmosAccountPubKey,\n\n pub power: i64,\n\n}\n", "file_path": "cosmos-abci/src/utils.rs", "rank": 78, "score": 30305.151433233987 }, { "content": "//! types module that define Substrate RPC responses.\n\n\n\nuse serde_derive::{Deserialize, Serialize};\n\n\n\n/// AbciQueryParams RPC response.\n\n#[derive(Serialize, Deserialize)]\n\npub struct AbciQueryParams {\n\n pub path: String,\n\n pub data: String,\n\n pub height: String,\n\n pub prove: bool,\n\n}\n\n\n\n/// AbciTxCommitParams RPC response.\n\n#[derive(Serialize, Deserialize)]\n\npub struct AbciTxCommitParams {\n\n pub tx: String,\n\n}\n\n\n\n/// AbciTxBroadcastParams RPC response.\n", "file_path": "src/cosmos_rpc/types.rs", "rank": 79, "score": 30251.55528237302 }, { "content": "#[derive(Serialize, Deserialize)]\n\npub struct AbciTxBroadcastParams {\n\n pub tx: String,\n\n}\n\n\n\n/// AbciInfo RPC response.\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct AbciInfo {}\n\n\n\n/// AbciSetOption RPC response.\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct AbciSetOption {\n\n pub key: String,\n\n pub value: String,\n\n}\n\n\n\n/// AbciCheckTx RPC response.\n\n#[derive(Serialize, Deserialize)]\n\npub struct AbciCheckTx {\n\n pub tx: String,\n\n pub check_tx_type: i32,\n\n}\n", "file_path": "src/cosmos_rpc/types.rs", "rank": 80, "score": 30245.38146705977 }, { "content": " .ok()\n\n .ok_or(FAILED_SETUP_CONNECTION_MSG);\n\n\n\n match abci_instance_res {\n\n Ok(mut abci_instance_res_ok) => {\n\n let data = hex::decode(query_params.data).unwrap_or_default();\n\n let mut path = query_params.path;\n\n\n\n if path.chars().count() == 0 {\n\n path = \"/\".to_string();\n\n }\n\n\n\n let height = query_params.height.parse::<i64>().unwrap_or(0);\n\n let abci_query_res = abci_instance_res_ok\n\n .query(path, data, height, query_params.prove)\n\n .ok()\n\n .ok_or(\"Failed to Query().\");\n\n\n\n match abci_query_res {\n\n Err(e) => Ok(json!({ \"error\": e })),\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 81, "score": 30188.67359650241 }, { "content": "\n\n match abci_instance_res {\n\n Ok(mut abci_instance_res_ok) => {\n\n let abci_check_tx_res = abci_instance_res_ok\n\n .check_tx(tx)\n\n .ok()\n\n .ok_or(\"Failed to CheckTx().\");\n\n\n\n match abci_check_tx_res {\n\n Ok(abci_check_tx_res_ok) => {\n\n let origin_data = abci_check_tx_res_ok.get_data();\n\n let mut data: Option<String> = None;\n\n\n\n match std::str::from_utf8(&origin_data) {\n\n Ok(data_res_ok) => {\n\n let data_str = data_res_ok.to_string();\n\n if data_str.chars().count() > 0 {\n\n data = Some(data_str);\n\n }\n\n }\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 82, "score": 30181.857773025033 }, { "content": " async move {\n\n let params: types::AbciTxCommitParams = params.parse()?;\n\n let tx_value = base64::decode(params.tx)\n\n .map_err(|_| handle_error(FAILED_TO_DECODE_TX_MSG.to_owned().into()))?;\n\n\n\n let result = pallet_abci::get_abci_instance()\n\n .map_err(handle_error)?\n\n .check_tx(tx_value.clone())\n\n .map_err(handle_error)?;\n\n\n\n let best_height: u32 = broadcast_abci_tx(tx_value, client);\n\n\n\n Ok(json!({\n\n \"height\": (best_height + 1).to_string(),\n\n \"hash\": \"\",\n\n \"deliver_tx\": {\n\n \"log\": result.get_log(),\n\n \"data\": base64::encode(result.get_data()),\n\n \"code\": result.get_code().to_string()\n\n },\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 83, "score": 30181.85674705728 }, { "content": "//! A module that expose start_server() method for running Substrate RPC server from node.\n\n\n\n/// Declare types module.\n\nmod types;\n\n\n\nuse jsonrpc_http_server::jsonrpc_core::{serde_json::json, Error, ErrorCode, IoHandler, Params};\n\nuse jsonrpc_http_server::ServerBuilder;\n\nuse node_template_runtime::opaque::Block;\n\nuse node_template_runtime::pallet_cosmos_abci::ExtrinsicConstructionApi;\n\nuse sp_api::ProvideRuntimeApi;\n\nuse sp_blockchain::HeaderBackend;\n\nuse sp_runtime::generic::BlockId;\n\nuse std::sync::Arc;\n\n\n\n/// Default ABCI RPC url.\n\npub const DEFAULT_ABCI_RPC_URL: &str = \"127.0.0.1:26657\";\n\n/// Error message for failed connection.\n\npub const FAILED_SETUP_CONNECTION_MSG: &str = \"Failed to get abci instance.\";\n\n/// Error message for decode tx failed.\n\npub const FAILED_TO_DECODE_TX_MSG: &str = \"Failde to decode tx.\";\n\n\n\n/// Method for getting RPC server url form active env.\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 84, "score": 30179.672317082885 }, { "content": " }))\n\n }\n\n\n\n /** Substrate RPC set_option() method. */\n\n async fn fetch_abci_set_option(\n\n params: Params,\n\n ) -> sc_service::Result<jsonrpc_core::Value, Error> {\n\n let query_params: types::AbciSetOption = params.parse()?;\n\n let key: &str = &query_params.key;\n\n let value: &str = &query_params.value;\n\n let abci_instance_res = pallet_abci::get_abci_instance()\n\n .ok()\n\n .ok_or(FAILED_SETUP_CONNECTION_MSG);\n\n\n\n match abci_instance_res {\n\n Ok(mut abci_instance_res_ok) => {\n\n let abci_set_option_res = abci_instance_res_ok\n\n .set_option(key, value)\n\n .ok()\n\n .ok_or(\"Failed to SetOption().\");\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 85, "score": 30179.144272313206 }, { "content": " Ok(abci_query_res_ok) => {\n\n let origin_proof = &abci_query_res_ok.get_proof();\n\n\n\n let mut proof: Option<String> = None;\n\n\n\n match origin_proof {\n\n Some(proof_res_ok) => {\n\n proof = Some(format!(\"{:?}\", proof_res_ok));\n\n }\n\n None => {}\n\n }\n\n\n\n Ok(json!({\n\n \"response\": {\n\n \"log\" : abci_query_res_ok.get_log(),\n\n \"height\" : abci_query_res_ok.get_height().to_string(),\n\n \"index\" : abci_query_res_ok.get_index().to_string(),\n\n \"code\" : abci_query_res_ok.get_code().to_string(),\n\n \"key\" : Some(base64::encode(abci_query_res_ok.get_key())),\n\n \"value\" : Some(base64::encode(abci_query_res_ok.get_value())),\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 86, "score": 30178.20658958016 }, { "content": " let result = pallet_abci::get_abci_instance()\n\n .map_err(handle_error)?\n\n .check_tx(tx_value.clone())\n\n .map_err(handle_error)?;\n\n\n\n broadcast_abci_tx(tx_value, client);\n\n\n\n Ok(json!({\n\n \"code\": result.get_code(),\n\n \"data\": base64::encode(result.get_data()),\n\n \"log\": result.get_log(),\n\n \"codespace\": \"\",\n\n \"hash\": \"\",\n\n }))\n\n }\n\n });\n\n\n\n let client_commit_copy = client;\n\n io.add_method(\"broadcast_tx_commit\", move |params: Params| {\n\n let client = client_commit_copy.clone();\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 87, "score": 30175.219007545926 }, { "content": " .ok()\n\n .ok_or(\"Failed to Flush().\");\n\n match abci_flush_resp {\n\n Ok(_) => Ok(json!({\n\n \"response\": { }\n\n })),\n\n Err(_e) => handle_ok_error(_e),\n\n }\n\n }\n\n Err(_e) => handle_ok_error(_e),\n\n }\n\n }\n\n\n\n /** Substrate RPC check_tx() method. */\n\n async fn abci_check_tx(params: Params) -> sc_service::Result<jsonrpc_core::Value, Error> {\n\n let query_params: types::AbciCheckTx = params.parse().unwrap();\n\n let tx = hex::decode(query_params.tx).unwrap_or_default();\n\n let abci_instance_res = pallet_abci::get_abci_instance()\n\n .ok()\n\n .ok_or(FAILED_SETUP_CONNECTION_MSG);\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 88, "score": 30174.776623729274 }, { "content": " broadcast_abci_tx(tx_value, client);\n\n\n\n Ok(json!({\n\n \"code\": 0,\n\n \"data\": \"\",\n\n \"log\": \"\",\n\n \"codespace\": \"\",\n\n \"hash\": \"\"\n\n }))\n\n }\n\n });\n\n\n\n let client_tx_sync_copy = client.clone();\n\n io.add_method(\"broadcast_tx_sync\", move |params: Params| {\n\n let client = client_tx_sync_copy.clone();\n\n async move {\n\n let params: types::AbciTxBroadcastParams = params.parse()?;\n\n let tx_value = base64::decode(params.tx)\n\n .map_err(|_| handle_error(FAILED_TO_DECODE_TX_MSG.to_owned().into()))?;\n\n\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 89, "score": 30173.454301360736 }, { "content": " /** Handle and dispatch not critical RPC server error. */\n\n fn handle_ok_error(e: &str) -> sc_service::Result<jsonrpc_core::Value, Error> {\n\n Ok(json!({\n\n \"error\": e.to_string()\n\n }))\n\n }\n\n\n\n /** Substrate RPC info() method. */\n\n async fn fetch_abci_info(_: Params) -> sc_service::Result<jsonrpc_core::Value, Error> {\n\n let result = pallet_abci::get_abci_instance()\n\n .map_err(handle_error)?\n\n .info()\n\n .map_err(handle_error)?;\n\n\n\n Ok(json!({\n\n \"response\": {\n\n \"data\": result.get_data(),\n\n \"version\": result.get_version(),\n\n \"app_version\": result.get_app_version().to_string()\n\n }\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 90, "score": 30173.34765594296 }, { "content": " \"check_tx\": {\n\n \"log\": result.get_log(),\n\n \"data\": base64::encode(result.get_data()),\n\n \"code\": result.get_code().to_string()\n\n }\n\n }))\n\n }\n\n });\n\n\n\n std::thread::spawn(move || {\n\n let rpc_url = get_abci_rpc_url();\n\n let server = ServerBuilder::new(io)\n\n .threads(3)\n\n .start_http(&rpc_url.as_str().parse().unwrap())\n\n .unwrap();\n\n server.wait();\n\n });\n\n}\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 91, "score": 30172.102565956346 }, { "content": " Err(_e) => {}\n\n }\n\n\n\n Ok(json!({\n\n \"response\": {\n\n \"code\": abci_check_tx_res_ok.get_code(),\n\n \"info\": abci_check_tx_res_ok.get_info(),\n\n \"log\": abci_check_tx_res_ok.get_log(),\n\n \"data\": data,\n\n \"gas_wanted\": abci_check_tx_res_ok.get_gas_wanted(),\n\n \"gas_used\": abci_check_tx_res_ok.get_gas_used(),\n\n \"codespace\": abci_check_tx_res_ok.get_codespace()\n\n }\n\n }))\n\n }\n\n Err(_e) => handle_ok_error(_e),\n\n }\n\n }\n\n Err(_e) => handle_ok_error(_e),\n\n }\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 92, "score": 30171.76668315203 }, { "content": "\n\n match abci_set_option_res {\n\n Err(_e) => handle_ok_error(_e),\n\n Ok(abci_set_option_res_ok) => Ok(json!({\n\n \"response\": {\n\n \"code\": abci_set_option_res_ok.get_code().to_string(),\n\n \"log\": abci_set_option_res_ok.get_log(),\n\n \"info\": abci_set_option_res_ok.get_info()\n\n }\n\n })),\n\n }\n\n }\n\n Err(_e) => handle_ok_error(_e),\n\n }\n\n }\n\n\n\n /** Substrate RPC query() method. */\n\n async fn fetch_abci_query(params: Params) -> sc_service::Result<jsonrpc_core::Value, Error> {\n\n let query_params: types::AbciQueryParams = params.parse()?;\n\n let abci_instance_res = pallet_abci::get_abci_instance()\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 93, "score": 30171.45009390063 }, { "content": " }\n\n\n\n io.add_method(\"abci_info\", fetch_abci_info);\n\n\n\n io.add_method(\"abci_set_option\", fetch_abci_set_option);\n\n\n\n io.add_method(\"abci_query\", fetch_abci_query);\n\n\n\n io.add_method(\"abci_flush\", fetch_abci_flush);\n\n\n\n io.add_method(\"abci_check_tx\", abci_check_tx);\n\n\n\n let client_tx_async_copy = client.clone();\n\n io.add_method(\"broadcast_tx_async\", move |params: Params| {\n\n let client = client_tx_async_copy.clone();\n\n async move {\n\n let params: types::AbciTxBroadcastParams = params.parse()?;\n\n let tx_value = base64::decode(params.tx)\n\n .map_err(|_| handle_error(FAILED_TO_DECODE_TX_MSG.to_owned().into()))?;\n\n\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 94, "score": 30170.594064527908 }, { "content": " \"proof_ops\" : &proof,\n\n }\n\n }))\n\n }\n\n }\n\n }\n\n Err(_e) => handle_ok_error(_e),\n\n }\n\n }\n\n\n\n /** Substrate RPC flush() method. */\n\n async fn fetch_abci_flush(_: Params) -> sc_service::Result<jsonrpc_core::Value, Error> {\n\n let abci_instance_res = pallet_abci::get_abci_instance()\n\n .ok()\n\n .ok_or(FAILED_SETUP_CONNECTION_MSG);\n\n\n\n match abci_instance_res {\n\n Ok(mut abci_instance_res_ok) => {\n\n let abci_flush_resp = abci_instance_res_ok\n\n .flush()\n", "file_path": "src/cosmos_rpc/mod.rs", "rank": 95, "score": 30170.46445935681 }, { "content": "/// VersionConfigs struct that represent app version confuration.\n\npub struct VersionConfigs {\n\n pub app_version: String,\n\n pub block_version: u64,\n\n pub p2p_version: u64,\n\n}\n\n\n\n/// Implementation for VersionConfigs struct.\n\nimpl VersionConfigs {\n\n fn log_info(&self) {\n\n println!(\"BlockVersion is {}\", self.block_version);\n\n println!(\"AppVersion is {}\", self.app_version);\n\n println!(\"P2PVersion is {}\", self.p2p_version);\n\n }\n\n}\n\n\n", "file_path": "cosmos-abci/abci/src/defaults.rs", "rank": 96, "score": 29013.412269439035 }, { "content": "/// Method for getting gRPC url form active env.\n", "file_path": "cosmos-abci/abci/src/defaults.rs", "rank": 97, "score": 29004.87416872698 }, { "content": "pub mod rocksdb;\n\n\n\nuse lazy_static::lazy_static;\n\nuse mockall::automock;\n\nuse owning_ref::MutexGuardRefMut;\n\nuse std::sync::Mutex;\n\n\n\nlazy_static! {\n\n static ref ABCI_STORAGE_INSTANCE: Mutex<Option<AbciStorageType>> = Mutex::new(None);\n\n}\n\n\n", "file_path": "cosmos-abci/abci_storage/src/lib.rs", "rank": 99, "score": 24.56643159107891 } ]
Rust
contracts/link-token/src/contract.rs
hackbg/chainlink-terra-cosmwasm-contracts
a1a82fa5db9942f8c8e6ec5d0b8fe7effb830f84
use cosmwasm_std::{ to_binary, Binary, Deps, DepsMut, Env, MessageInfo, Response, StdResult, Uint128, }; use cw20::{Cw20Coin, TokenInfoResponse}; use cw20_base::{ allowances::{ execute_decrease_allowance, execute_increase_allowance, execute_transfer_from, query_allowance, }, contract::{create_accounts, execute_send, execute_transfer, query_balance}, ContractError, }; use crate::{ msg::{ExecuteMsg, InstantiateMsg, QueryMsg}, state::{TokenInfo, TOKEN_INFO}, }; pub const TOKEN_NAME: &str = "Chainlink"; pub const TOKEN_SYMBOL: &str = "LINK"; pub const DECIMALS: u8 = 18; pub const TOTAL_SUPPLY: u128 = 1_000_000_000; pub fn instantiate( mut deps: DepsMut, _env: Env, info: MessageInfo, _msg: InstantiateMsg, ) -> StdResult<Response> { let main_balance = Cw20Coin { address: info.sender.into(), amount: Uint128::from(TOTAL_SUPPLY), }; let total_supply = create_accounts(&mut deps, &[main_balance])?; let data = TokenInfo { name: TOKEN_NAME.to_string(), symbol: TOKEN_SYMBOL.to_string(), decimals: DECIMALS, total_supply, }; TOKEN_INFO.save(deps.storage, &data)?; Ok(Response::default()) } pub fn execute( deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg, ) -> Result<Response, ContractError> { match msg { ExecuteMsg::Transfer { recipient, amount } => { execute_transfer(deps, env, info, recipient, amount) } ExecuteMsg::TransferFrom { owner, recipient, amount, } => execute_transfer_from(deps, env, info, owner, recipient, amount), ExecuteMsg::Send { contract, amount, msg, } => execute_send(deps, env, info, contract, amount, msg), ExecuteMsg::IncreaseAllowance { spender, amount, expires, } => execute_increase_allowance(deps, env, info, spender, amount, expires), ExecuteMsg::DecreaseAllowance { spender, amount, expires, } => execute_decrease_allowance(deps, env, info, spender, amount, expires), } } pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> { match msg { QueryMsg::Balance { address } => to_binary(&query_balance(deps, address)?), QueryMsg::TokenInfo {} => to_binary(&query_token_info(deps)?), QueryMsg::Allowance { owner, spender } => { to_binary(&query_allowance(deps, owner, spender)?) } } } pub fn query_token_info(deps: Deps) -> StdResult<TokenInfoResponse> { let info = TOKEN_INFO.load(deps.storage)?; Ok(info.into()) } #[cfg(test)] mod tests { use super::*; use cosmwasm_std::testing::{mock_dependencies, mock_env, mock_info}; use cosmwasm_std::{coins, Uint128}; #[test] fn test_query_token_info() { let mut deps = mock_dependencies(&coins(2, "test_token")); let env = mock_env(); let info = mock_info(&"creator", &[]); let _ = instantiate(deps.as_mut(), env, info, InstantiateMsg {}).unwrap(); let query_res = query_token_info(deps.as_ref()).unwrap(); assert_eq!( query_res, TokenInfoResponse { name: "Chainlink".to_string(), symbol: "LINK".to_string(), decimals: 18, total_supply: Uint128::from(1_000_000_000_u128) } ); } }
use cosmwasm_std::{ to_binary, Binary, Deps, DepsMut, Env, MessageInfo, Response, StdResult, Uint128, }; use cw20::{Cw20Coin, TokenInfoResponse}; use cw20_base::{ allowances::{ execute_decrease_allowance, execute_increase_allowance, execute_transfer_from, query_allowance, }, contract::{create_accounts, execute_send, execute_transfer, query_balance}, ContractError, }; use crate::{ msg::{ExecuteMsg, InstantiateMsg, QueryMsg}, state::{TokenInfo, TOKEN_INFO}, }; pub const TOKEN_NAME: &str = "Chainlink"; pub const TOKEN_SYMBOL: &str = "LINK"; pub const DECIMALS: u8 = 18; pub const TOTAL_SUPPLY: u128 = 1_000_000_000; pub fn instantiate( mut deps: DepsMut, _env: Env, info: MessageInfo, _msg: InstantiateMsg, ) -> StdResult<Response> { let main_balance = Cw20Coin { address: info.sender.into(), amount: Uint128::from(TOTAL_SUPPLY), }; let total_supply = create_accounts(&mut deps, &[main_balance])?; let data = TokenInfo { name: TOKEN_NAME.to_string(), symbol: TOKEN_SYMBOL.to_string(), decimals: DECIMALS, total_supply, }; TOKEN_INFO.save(deps.storage, &data)?; Ok(Response::default()) } pub fn execute( deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg, ) -> Result<Response, ContractError> { match msg { ExecuteMsg::Transfer { recipient, amount } => { execute_transfer(deps, env, info, recipient, amount) } ExecuteMsg::TransferFrom { owner, recipient, amount, } => execute_transfer_from(deps, env, info, owner, recipient, amount), ExecuteMsg::Send { contract, amount, msg, } => execute_send(deps, env, info, contract, amount, msg), ExecuteMsg::IncreaseAllowance { spender, amount, expires, } => execute_increase_allowance(deps, env, info, spender, amount, expires), ExecuteMsg::DecreaseAllowance { spender, amount, expires, } => execute_decrease_allowance(deps, env, info, spender, amount, expires), } } pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> { match msg { QueryMsg::Balance { address } => to_binary(&query_balance(deps, address)?),
pub fn query_token_info(deps: Deps) -> StdResult<TokenInfoResponse> { let info = TOKEN_INFO.load(deps.storage)?; Ok(info.into()) } #[cfg(test)] mod tests { use super::*; use cosmwasm_std::testing::{mock_dependencies, mock_env, mock_info}; use cosmwasm_std::{coins, Uint128}; #[test] fn test_query_token_info() { let mut deps = mock_dependencies(&coins(2, "test_token")); let env = mock_env(); let info = mock_info(&"creator", &[]); let _ = instantiate(deps.as_mut(), env, info, InstantiateMsg {}).unwrap(); let query_res = query_token_info(deps.as_ref()).unwrap(); assert_eq!( query_res, TokenInfoResponse { name: "Chainlink".to_string(), symbol: "LINK".to_string(), decimals: 18, total_supply: Uint128::from(1_000_000_000_u128) } ); } }
QueryMsg::TokenInfo {} => to_binary(&query_token_info(deps)?), QueryMsg::Allowance { owner, spender } => { to_binary(&query_allowance(deps, owner, spender)?) } } }
function_block-function_prefix_line
[ { "content": "pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::IsValid {\n\n previous_answer,\n\n answer,\n\n } => to_binary(&is_valid(deps, previous_answer, answer)?),\n\n QueryMsg::GetFlaggingThreshold {} => to_binary(&query_flagging_threshold(deps)?),\n\n QueryMsg::GetOwner {} => to_binary(&get_owner(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/deviation-flagging-validator/src/contract.rs", "rank": 1, "score": 314659.7963055122 }, { "content": "// Called when migrating a contract instance to a new code ID\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> Result<Response, ContractError> {\n\n // Do nothing\n\n Ok(Response::default())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use cosmwasm_std::{\n\n testing::{mock_env, MockApi, MockStorage},\n\n Addr, Empty,\n\n };\n\n use cw20::Cw20Coin;\n\n use cw_multi_test::{App, BankKeeper, Contract, ContractWrapper, Executor};\n\n\n\n const OWNER: &str = \"admin0001\";\n\n\n\n const PAYMENT_AMOUNT: Uint128 = Uint128::new(3);\n\n\n\n pub fn contract_proxy() -> Box<dyn Contract<Empty>> {\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 2, "score": 309791.48405648465 }, { "content": "pub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::GetPhaseAggregators {} => to_binary(&get_phase_aggregators(deps, env)?),\n\n QueryMsg::GetProposedRoundData { round_id } => {\n\n to_binary(&get_proposed_round_data(deps, env, round_id)?)\n\n }\n\n QueryMsg::GetProposedLatestRoundData {} => {\n\n to_binary(&get_proposed_latest_round_data(deps, env)?)\n\n }\n\n QueryMsg::GetProposedAggregator {} => to_binary(&get_proposed_aggregator(deps, env)?),\n\n QueryMsg::GetAggregator {} => to_binary(&get_aggregator(deps, env)?),\n\n QueryMsg::GetPhaseId {} => to_binary(&get_phase_id(deps, env)?),\n\n QueryMsg::GetOwner {} => to_binary(&get_owner(deps)?),\n\n QueryMsg::AggregatorQuery(GetRoundData { round_id }) => {\n\n to_binary(&get_round_data(deps, env, round_id)?)\n\n }\n\n QueryMsg::AggregatorQuery(GetLatestRoundData {}) => {\n\n to_binary(&get_latest_round_data(deps, env)?)\n\n }\n\n QueryMsg::AggregatorQuery(GetDecimals {}) => to_binary(&get_decimals(deps, env)?),\n\n QueryMsg::AggregatorQuery(GetVersion {}) => to_binary(&get_version(deps, env)?),\n\n QueryMsg::AggregatorQuery(GetDescription {}) => to_binary(&get_description(deps, env)?),\n\n QueryMsg::AggregatorQuery(GetLatestAnswer {}) => to_binary(&get_latest_answer(deps, env)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 3, "score": 306971.0951179938 }, { "content": "pub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::GetAggregatorConfig {} => to_binary(&get_aggregator_config(deps, env)?),\n\n QueryMsg::GetAllocatedFunds {} => to_binary(&get_allocated_funds(deps, env)?),\n\n QueryMsg::GetAvailableFunds {} => to_binary(&get_available_funds(deps, env)?),\n\n QueryMsg::GetWithdrawablePayment { oracle } => {\n\n to_binary(&get_withdrawable_payment(deps, env, oracle)?)\n\n }\n\n QueryMsg::GetOracleCount {} => to_binary(&get_oracle_count(deps, env)?),\n\n QueryMsg::GetOracles {} => to_binary(&get_oracles(deps, env)?),\n\n QueryMsg::GetAdmin { oracle } => to_binary(&get_admin(deps, env, oracle)?),\n\n QueryMsg::GetOracleStatus { oracle } => to_binary(&get_oracle_status(deps, env, oracle)?),\n\n QueryMsg::GetOwner {} => to_binary(&get_owner(deps)?),\n\n QueryMsg::AggregatorQuery(GetRoundData { round_id }) => {\n\n to_binary(&get_round_data(deps, env, round_id)?)\n\n }\n\n QueryMsg::AggregatorQuery(GetLatestRoundData {}) => {\n\n to_binary(&get_latest_round_data(deps, env)?)\n\n }\n\n QueryMsg::AggregatorQuery(GetDecimals {}) => to_binary(&get_decimals(deps, env)?),\n\n QueryMsg::AggregatorQuery(GetVersion {}) => to_binary(&get_version(deps, env)?),\n\n QueryMsg::AggregatorQuery(GetDescription {}) => to_binary(&get_description(deps, env)?),\n\n QueryMsg::AggregatorQuery(GetLatestAnswer {}) => to_binary(&get_latest_answer(deps, env)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 4, "score": 306971.0951179938 }, { "content": "pub fn get_decimals(deps: Deps, _env: Env) -> StdResult<u8> {\n\n query_current(deps, GetDecimals {})\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 5, "score": 305173.3750426047 }, { "content": "pub fn get_decimals(deps: Deps, _env: Env) -> StdResult<u8> {\n\n CONFIG.load(deps.storage).map(|config| config.decimals)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 6, "score": 305173.3750426047 }, { "content": "fn accept_ownership(deps: DepsMut, _env: Env, info: MessageInfo) -> StdResult<Vec<Attribute>> {\n\n let sender = info.sender;\n\n\n\n owner(deps.storage).update(|mut state| -> StdResult<_> {\n\n state.owner = sender.clone();\n\n state.pending_owner = None;\n\n\n\n Ok(state)\n\n })?;\n\n\n\n Ok(vec![\n\n attr(\"action\", \"ownership accepted\"),\n\n attr(\"owner\", sender),\n\n ])\n\n}\n\n\n", "file_path": "contracts/owned/src/contract.rs", "rank": 7, "score": 293419.6637934941 }, { "content": "pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<QueryResponse> {\n\n match msg {\n\n QueryMsg::GetOwner {} => to_binary(&get_owner(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/owned/src/contract.rs", "rank": 8, "score": 290987.3030963396 }, { "content": "#[entry_point]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> Result<QueryResponse, ContractError> {\n\n match msg {\n\n QueryMsg::GetFlag { subject } => Ok(to_binary(&get_flag(deps, subject)?)?),\n\n QueryMsg::GetFlags { subjects } => Ok(to_binary(&get_flags(deps, subjects)?)?),\n\n QueryMsg::GetRac {} => Ok(to_binary(&get_rac(deps)?)?),\n\n QueryMsg::GetOwner {} => Ok(to_binary(&get_owner(deps)?)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 9, "score": 286372.1867139882 }, { "content": "pub fn get_version(_deps: Deps, _env: Env) -> StdResult<Uint128> {\n\n Ok(VERSION)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 10, "score": 276867.5840122532 }, { "content": "pub fn get_version(deps: Deps, _env: Env) -> StdResult<Uint128> {\n\n query_current(deps, GetVersion {})\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 11, "score": 276867.5840122532 }, { "content": "pub fn get_oracle_count(deps: Deps, _env: Env) -> StdResult<u8> {\n\n Ok(ORACLE_ADDRESSES.load(deps.storage)?.len() as u8)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 12, "score": 274013.46019927145 }, { "content": "pub fn get_allocated_funds(deps: Deps, _env: Env) -> StdResult<Uint128> {\n\n Ok(RECORDED_FUNDS.load(deps.storage)?.allocated)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 13, "score": 273873.65207619965 }, { "content": "pub fn get_available_funds(deps: Deps, _env: Env) -> StdResult<Uint128> {\n\n Ok(RECORDED_FUNDS.load(deps.storage)?.available)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 14, "score": 273873.65207619965 }, { "content": "fn validate_ownership(deps: Deps, _env: &Env, info: MessageInfo) -> Result<(), ContractError> {\n\n let owner = get_owner(deps)?;\n\n if info.sender != owner {\n\n return Err(ContractError::NotOwner {});\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 15, "score": 259256.22252661377 }, { "content": "pub fn get_withdrawable_payment(deps: Deps, _env: Env, oracle: String) -> StdResult<Uint128> {\n\n let addr = deps.api.addr_validate(&oracle)?;\n\n let oracle = ORACLES.load(deps.storage, &addr)?;\n\n Ok(oracle.withdrawable)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 16, "score": 258937.5080789618 }, { "content": "fn validate_ownership(deps: Deps, _env: &Env, info: MessageInfo) -> Result<(), ContractError> {\n\n let owner = get_owner(deps)?;\n\n if info.sender != owner {\n\n return Err(ContractError::NotOwner {});\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use cosmwasm_std::testing::{mock_dependencies, mock_env, mock_info};\n\n use cosmwasm_std::{attr, coins, Api};\n\n\n\n #[test]\n\n fn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n flags: \"flags\".to_string(),\n", "file_path": "contracts/deviation-flagging-validator/src/contract.rs", "rank": 17, "score": 254785.55381401215 }, { "content": "pub fn get_latest_round_data(deps: Deps, _env: Env) -> StdResult<RoundDataResponse> {\n\n let Phase {\n\n aggregator_addr,\n\n id,\n\n } = CURRENT_PHASE.load(deps.storage)?;\n\n let res: RoundDataResponse = deps\n\n .querier\n\n .query_wasm_smart(aggregator_addr, &GetLatestRoundData {}.wrap())?;\n\n Ok(add_phase_ids(res, id))\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 18, "score": 253662.14923085365 }, { "content": "pub fn get_proposed_latest_round_data(deps: Deps, _env: Env) -> StdResult<RoundDataResponse> {\n\n let proposed = get_proposed(deps.storage)?;\n\n deps.querier\n\n .query_wasm_smart(proposed, &GetLatestRoundData {}.wrap())\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 19, "score": 251083.83799545155 }, { "content": "pub fn get_round_data(deps: Deps, _env: Env, round_id: u32) -> StdResult<RoundDataResponse> {\n\n let phase_id: u16 = (round_id >> PHASE_OFFSET.u128())\n\n .try_into()\n\n // TODO improve error\n\n .map_err(|_| StdError::generic_err(\"Failed parse\"))?;\n\n let aggregator = PHASE_AGGREGATORS.load(deps.storage, phase_id.into())?;\n\n let res: RoundDataResponse = deps\n\n .querier\n\n .query_wasm_smart(aggregator, &GetRoundData { round_id }.wrap())?;\n\n Ok(add_phase_ids(res, phase_id))\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 20, "score": 241206.75697891082 }, { "content": "pub fn get_round_data(deps: Deps, _env: Env, round_id: u32) -> StdResult<RoundDataResponse> {\n\n let round = ROUNDS.load(deps.storage, round_id.into())?;\n\n if round.answered_in_round == 0 {\n\n return Err(StdError::generic_err(ContractError::NoData {}.to_string()));\n\n }\n\n Ok(RoundDataResponse {\n\n round_id,\n\n answer: round.answer,\n\n started_at: round.started_at,\n\n updated_at: round.updated_at,\n\n answered_in_round: round.answered_in_round,\n\n })\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 21, "score": 241206.75697891077 }, { "content": "pub fn get_latest_round_data(deps: Deps, env: Env) -> StdResult<RoundDataResponse> {\n\n let round_id = LATEST_ROUND_ID.load(deps.storage)?;\n\n\n\n get_round_data(deps, env, round_id)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 22, "score": 240853.5395207976 }, { "content": "pub fn get_description(deps: Deps, _env: Env) -> StdResult<String> {\n\n CONFIG.load(deps.storage).map(|config| config.description)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 23, "score": 227053.05605908204 }, { "content": "pub fn get_aggregator(deps: Deps, _env: Env) -> StdResult<Addr> {\n\n CURRENT_PHASE\n\n .load(deps.storage)\n\n .map(|phase| phase.aggregator_addr)\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 24, "score": 227053.05605908204 }, { "content": "pub fn get_description(deps: Deps, _env: Env) -> StdResult<String> {\n\n query_current(deps, GetDescription {})\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 25, "score": 227053.05605908204 }, { "content": "pub fn get_proposed_aggregator(deps: Deps, _env: Env) -> StdResult<Addr> {\n\n PROPOSED_AGGREGATOR.load(deps.storage)\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 26, "score": 224654.83471330127 }, { "content": "pub fn get_phase_id(deps: Deps, _env: Env) -> StdResult<u16> {\n\n Ok(CURRENT_PHASE.load(deps.storage)?.id)\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 27, "score": 224654.83471330127 }, { "content": "pub fn get_aggregator_config(deps: Deps, _env: Env) -> StdResult<ConfigResponse> {\n\n let config = CONFIG.load(deps.storage)?;\n\n Ok(ConfigResponse {\n\n link: config.link,\n\n validator: config.validator,\n\n payment_amount: config.payment_amount,\n\n max_submission_count: config.min_submission_count,\n\n min_submission_count: config.max_submission_count,\n\n restart_delay: config.restart_delay,\n\n timeout: config.timeout,\n\n decimals: config.decimals,\n\n description: config.description,\n\n min_submission_value: config.min_submission_value,\n\n max_submission_value: config.max_submission_value,\n\n })\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 28, "score": 222339.18316649587 }, { "content": "pub fn get_phase_aggregators(deps: Deps, _env: Env) -> StdResult<PhaseAggregators> {\n\n PHASE_AGGREGATORS\n\n .range(deps.storage, None, None, Order::Ascending)\n\n .map(|entry| {\n\n entry.map(|aggregator| {\n\n (\n\n u16::from_be_bytes(aggregator.0.as_slice().try_into().unwrap()),\n\n aggregator.1,\n\n )\n\n })\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 29, "score": 222339.18316649593 }, { "content": "pub fn get_oracles(deps: Deps, _env: Env) -> StdResult<Vec<Addr>> {\n\n ORACLE_ADDRESSES.load(deps.storage)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 30, "score": 220798.09649327907 }, { "content": "pub fn get_latest_answer(deps: Deps, _env: Env) -> StdResult<LatestAnswerResponse> {\n\n query_current(deps, GetLatestAnswer {})\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 31, "score": 220101.7614540949 }, { "content": "fn transfer_ownership(deps: DepsMut, _env: Env, to: Addr) -> Result<Vec<Attribute>, ContractError> {\n\n owner(deps.storage).update(|mut state| -> StdResult<_> {\n\n state.pending_owner = Some(to.clone());\n\n\n\n Ok(state)\n\n })?;\n\n\n\n Ok(vec![\n\n attr(\"action\", \"ownership transferred\"),\n\n attr(\"pending_owner\", to),\n\n ])\n\n}\n\n\n", "file_path": "contracts/owned/src/contract.rs", "rank": 32, "score": 219254.7935760589 }, { "content": "pub fn get_admin(deps: Deps, _env: Env, oracle: String) -> StdResult<Addr> {\n\n let addr = deps.api.addr_validate(&oracle)?;\n\n let oracle = ORACLES.load(deps.storage, &addr)?;\n\n Ok(oracle.admin)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 34, "score": 214960.4395927143 }, { "content": "pub fn get_oracle_status(deps: Deps, _env: Env, oracle: String) -> StdResult<OracleStatus> {\n\n let addr = deps.api.addr_validate(&oracle)?;\n\n ORACLES.load(deps.storage, &addr)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 35, "score": 210559.79438861273 }, { "content": "pub fn get_latest_answer(deps: Deps, env: Env) -> StdResult<LatestAnswerResponse> {\n\n get_latest_round_data(deps, env).map(|round| LatestAnswerResponse(round.answer))\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 36, "score": 207293.1517440389 }, { "content": "pub fn owner(storage: &mut dyn Storage) -> Singleton<State> {\n\n singleton(storage, OWNER_KEY)\n\n}\n\n\n", "file_path": "contracts/owned/src/state.rs", "rank": 37, "score": 195016.0743852068 }, { "content": "fn validate_ownership(deps: Deps, info: &MessageInfo) -> Result<(), ContractError> {\n\n let owner = get_owner(deps)?;\n\n if info.sender != owner {\n\n return Err(ContractError::NotOwner {});\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use cosmwasm_std::{OverflowError, OverflowOperation};\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_prev_round_id() {\n\n assert_eq!(prev_round_id(1), Ok(0));\n\n assert_eq!(\n\n prev_round_id(0),\n\n Err(StdError::overflow(OverflowError::new(\n\n OverflowOperation::Sub,\n\n 0.to_string(),\n\n 1.to_string()\n\n )))\n\n );\n\n }\n\n}\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 38, "score": 191407.36523825815 }, { "content": "fn validate_ownership(deps: Deps, info: &MessageInfo) -> Result<(), ContractError> {\n\n let owner = get_owner(deps)?;\n\n if info.sender != owner {\n\n return Err(ContractError::NotOwner {});\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 39, "score": 191407.36523825815 }, { "content": "pub fn get_owner(deps: Deps) -> StdResult<Addr> {\n\n let owner = owner_read(deps.storage).load()?.owner;\n\n\n\n Ok(owner)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use cosmwasm_std::testing::{mock_dependencies, mock_env, mock_info, MOCK_CONTRACT_ADDR};\n\n use cosmwasm_std::{coins, Api};\n\n\n\n #[test]\n\n fn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {};\n\n let info = mock_info(\"creator\", &coins(1000, \"earth\"));\n\n\n\n let sender = info.clone().sender;\n", "file_path": "contracts/owned/src/contract.rs", "rank": 40, "score": 189023.43328688372 }, { "content": "fn required_reserve(payment: Uint128, oracle_count: u8) -> Uint128 {\n\n Uint128::new(payment.u128() * oracle_count as u128 * RESERVE_ROUNDS)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 43, "score": 172865.8492315264 }, { "content": "pub fn execute_set_flags_address(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n flags: Addr,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &env, info)?;\n\n let previous = CONFIG.load(deps.storage)?.flags;\n\n if previous != flags {\n\n CONFIG.update(deps.storage, |mut state| -> StdResult<_> {\n\n state.flags = flags.clone();\n\n Ok(state)\n\n })?;\n\n }\n\n\n\n Ok(Response::new()\n\n .add_attribute(\"action\", \"flags_address_updated\")\n\n .add_attribute(\"previous\", previous))\n\n}\n\n\n", "file_path": "contracts/deviation-flagging-validator/src/contract.rs", "rank": 44, "score": 167124.83768977548 }, { "content": "fn is_valid(deps: Deps, previous_answer: Uint128, answer: Uint128) -> StdResult<bool> {\n\n if previous_answer == Uint128::zero() {\n\n Ok(true)\n\n } else {\n\n let flagging_threshold = CONFIG.load(deps.storage)?.flagging_threshold;\n\n let change = if previous_answer.u128() > answer.u128() {\n\n previous_answer.u128() - answer.u128()\n\n } else {\n\n answer.u128() - previous_answer.u128()\n\n };\n\n //Uint128::from(previous_answer.u128() - answer.u128());\n\n let ratio_numerator = change * THRESHOLD_MULTIPLIER;\n\n let ratio = ratio_numerator / previous_answer.u128();\n\n Ok(ratio <= flagging_threshold as u128)\n\n }\n\n}\n\n\n", "file_path": "contracts/deviation-flagging-validator/src/contract.rs", "rank": 45, "score": 165708.611956934 }, { "content": "pub fn get_rac(deps: Deps) -> Result<Addr, ContractError> {\n\n let raising_access_controller = config_read(deps.storage).load()?.raising_access_controller;\n\n Ok(raising_access_controller)\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 46, "score": 157450.50051851998 }, { "content": "pub fn contract_link_token() -> Box<dyn Contract<Empty>> {\n\n let contract = ContractWrapper::new(execute, instantiate, query);\n\n Box::new(contract)\n\n}\n\n\n", "file_path": "contracts/link-token/src/integration_tests/mod.rs", "rank": 47, "score": 156473.7004736288 }, { "content": "#[entry_point]\n\npub fn instantiate(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n _msg: InstantiateMsg,\n\n) -> Result<Response, owned::error::ContractError> {\n\n // config(deps.storage).save(&State {\n\n // raising_access_controller: msg.rac_address,\n\n // })?;\n\n owned_init(deps, env, info, owned::msg::InstantiateMsg {})?;\n\n\n\n Ok(Response::default())\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 48, "score": 149890.11369154596 }, { "content": "pub fn instantiate(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n _msg: InstantiateMsg,\n\n) -> Result<Response, ContractError> {\n\n let state = State {\n\n owner: info.sender,\n\n pending_owner: None,\n\n };\n\n\n\n owner(deps.storage).save(&state)?;\n\n\n\n Ok(Response::default())\n\n}\n\n\n", "file_path": "contracts/owned/src/contract.rs", "rank": 49, "score": 149890.11369154596 }, { "content": "pub fn execute(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: ExecuteMsg,\n\n) -> Result<Response, ContractError> {\n\n match msg {\n\n ExecuteMsg::TransferOwnership { to } => execute_transfer_ownership(deps, env, info, to),\n\n ExecuteMsg::AcceptOwnership {} => execute_accept_ownership(deps, env, info),\n\n }\n\n}\n\n\n", "file_path": "contracts/owned/src/contract.rs", "rank": 50, "score": 149446.0008089958 }, { "content": "#[entry_point]\n\npub fn execute(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: ExecuteMsg,\n\n) -> Result<Response, ContractError> {\n\n match msg {\n\n ExecuteMsg::RaiseFlag { subject } => execute_raise_flag(deps, env, info, subject),\n\n ExecuteMsg::RaiseFlags { subjects } => execute_raise_flags(deps, env, info, subjects),\n\n ExecuteMsg::LowerFlags { subjects } => execute_lower_flags(deps, env, info, subjects),\n\n ExecuteMsg::SetRaisingAccessController { rac_address } => {\n\n execute_set_raising_access_controller(deps, env, info, rac_address)\n\n }\n\n ExecuteMsg::TransferOwnership { to } => {\n\n execute_transfer_ownership(deps, env, info, to).map_err(ContractError::from)\n\n }\n\n ExecuteMsg::AcceptOwnership {} => {\n\n execute_accept_ownership(deps, env, info).map_err(ContractError::from)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 51, "score": 149446.0008089958 }, { "content": "pub fn get_flag(deps: Deps, subject: String) -> Result<bool, ContractError> {\n\n check_access(deps)?;\n\n let subject = deps.api.addr_validate(&subject)?;\n\n Ok(FLAGS.load(deps.storage, &subject)?)\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 52, "score": 149341.24847435608 }, { "content": "pub fn query_flagging_threshold(deps: Deps) -> StdResult<FlaggingThresholdResponse> {\n\n let flagging_threshold = CONFIG.load(deps.storage)?.flagging_threshold;\n\n Ok(FlaggingThresholdResponse {\n\n threshold: flagging_threshold,\n\n })\n\n}\n\n\n", "file_path": "contracts/deviation-flagging-validator/src/contract.rs", "rank": 53, "score": 149219.88760763023 }, { "content": "pub fn contract_link_token() -> Box<dyn Contract<Empty>> {\n\n let contract = ContractWrapper::new(\n\n cw20_base::contract::execute,\n\n cw20_base::contract::instantiate,\n\n cw20_base::contract::query,\n\n );\n\n Box::new(contract)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/integration_tests.rs", "rank": 54, "score": 148893.9064936688 }, { "content": "pub fn instantiate(\n\n mut deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: InstantiateMsg,\n\n) -> Result<Response, ContractError> {\n\n if msg.min_submission_value > msg.max_submission_value {\n\n return Err(ContractError::MinGreaterThanMax {});\n\n }\n\n ORACLE_ADDRESSES.save(deps.storage, &vec![])?;\n\n RECORDED_FUNDS.save(deps.storage, &Funds::default())?;\n\n REPORTING_ROUND_ID.save(deps.storage, &0)?;\n\n\n\n let link = deps.api.addr_validate(&msg.link)?;\n\n let validator = deps.api.addr_validate(&msg.validator)?;\n\n\n\n owned_init(\n\n deps.branch(),\n\n env.clone(),\n\n info,\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 55, "score": 146977.5684839843 }, { "content": "pub fn instantiate(\n\n mut deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: InstantiateMsg,\n\n) -> Result<Response, ContractError> {\n\n owned_instantiate(deps.branch(), env, info, owned::msg::InstantiateMsg {})?;\n\n\n\n let aggregator_addr = deps.api.addr_validate(&msg.aggregator)?;\n\n\n\n PHASE_AGGREGATORS.save(deps.storage, 1.into(), &aggregator_addr)?;\n\n CURRENT_PHASE.save(\n\n deps.storage,\n\n &Phase {\n\n id: 1,\n\n aggregator_addr,\n\n },\n\n )?;\n\n\n\n Ok(Response::default())\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 56, "score": 146977.5684839843 }, { "content": "pub fn execute(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: ExecuteMsg,\n\n) -> Result<Response, ContractError> {\n\n match msg {\n\n ExecuteMsg::Submit {\n\n round_id,\n\n submission,\n\n } => execute_submit(deps, env, info, round_id, submission),\n\n ExecuteMsg::ChangeOracles {\n\n removed,\n\n added,\n\n added_admins,\n\n min_submissions,\n\n max_submissions,\n\n restart_delay,\n\n } => execute_change_oracles(\n\n deps,\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 57, "score": 146545.09636801408 }, { "content": "pub fn execute(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: ExecuteMsg,\n\n) -> Result<Response, ContractError> {\n\n match msg {\n\n ExecuteMsg::ProposeAggregator { aggregator } => {\n\n execute_propose_aggregator(deps, env, info, aggregator)\n\n }\n\n ExecuteMsg::ConfirmAggregator { aggregator } => {\n\n execute_confirm_aggregator(deps, env, info, aggregator)\n\n }\n\n ExecuteMsg::TransferOwnership { to } => {\n\n execute_transfer_ownership(deps, env, info, to.to_string()).map_err(ContractError::from)\n\n }\n\n ExecuteMsg::AcceptOwnership {} => {\n\n execute_accept_ownership(deps, env, info).map_err(ContractError::from)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 58, "score": 146545.09636801408 }, { "content": "pub fn contract_receiver_mock() -> Box<dyn Contract<Empty>> {\n\n let contract = ContractWrapper::new(\n\n |_, _, _, msg: MockHandleMsg| -> StdResult<Response> {\n\n match msg {\n\n MockHandleMsg::Receive(Cw20ReceiveMsg {\n\n sender: _,\n\n amount: _,\n\n msg,\n\n }) => {\n\n let received: PingMsg = from_binary(&msg)?;\n\n Ok(Response::new()\n\n .add_attribute(\"action\", \"pong\")\n\n .set_data(to_binary(&received.payload)?))\n\n }\n\n }\n\n },\n\n |_, _, _, _: MockInstantiateMsg| -> StdResult<Response> { Ok(Response::default()) },\n\n |_, _, _: MockQueryMsg| -> StdResult<Binary> { unimplemented!() },\n\n );\n\n Box::new(contract)\n\n}\n", "file_path": "contracts/link-token/src/integration_tests/receiver_mock.rs", "rank": 59, "score": 145045.51182927255 }, { "content": "pub fn instantiate(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: InstantiateMsg,\n\n) -> Result<Response, ContractError> {\n\n let flags = deps.api.addr_validate(&msg.flags)?;\n\n let state = State {\n\n flags,\n\n flagging_threshold: msg.flagging_threshold,\n\n };\n\n\n\n CONFIG.save(deps.storage, &state)?;\n\n owned_init(deps, env, info, owned::msg::InstantiateMsg {})?;\n\n\n\n Ok(Response::default())\n\n}\n\n\n", "file_path": "contracts/deviation-flagging-validator/src/contract.rs", "rank": 60, "score": 144208.2625269094 }, { "content": "pub fn execute_transfer_ownership(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n to: String,\n\n) -> Result<Response, ContractError> {\n\n let to = deps.api.addr_validate(&to)?;\n\n let sender = info.sender;\n\n let owner = owner_read(deps.storage).load()?.owner;\n\n if sender != owner {\n\n return Err(ContractError::OnlyOwner {});\n\n }\n\n\n\n let attributes = transfer_ownership(deps, env, to)?;\n\n\n\n Ok(Response::new().add_attributes(attributes))\n\n}\n\n\n", "file_path": "contracts/owned/src/contract.rs", "rank": 61, "score": 143786.8365253564 }, { "content": "pub fn execute(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: ExecuteMsg,\n\n) -> Result<Response, ContractError> {\n\n match msg {\n\n ExecuteMsg::SetFlagsAddress { flags } => execute_set_flags_address(deps, env, info, flags),\n\n ExecuteMsg::SetFlaggingThreshold { threshold } => {\n\n execute_set_flagging_threshold(deps, env, info, threshold)\n\n }\n\n ExecuteMsg::Validate {\n\n previous_round_id,\n\n previous_answer,\n\n round_id,\n\n answer,\n\n } => execute_validate(\n\n deps,\n\n env,\n\n info,\n", "file_path": "contracts/deviation-flagging-validator/src/contract.rs", "rank": 62, "score": 143786.8365253564 }, { "content": "pub fn execute_raise_flags(\n\n deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n subjects: Vec<String>,\n\n) -> Result<Response, ContractError> {\n\n check_access(deps.as_ref())?;\n\n\n\n let subjects = subjects\n\n .iter()\n\n .map(|subject| deps.api.addr_validate(subject))\n\n .collect::<Result<Vec<Addr>, _>>()?;\n\n\n\n let mut attributes = vec![];\n\n for subject in subjects {\n\n if FLAGS.may_load(deps.as_ref().storage, &subject)? == Some(true) {\n\n attributes.extend_from_slice(&[\n\n attr(\"action\", \"already raised flag\"),\n\n attr(\"subject\", subject),\n\n ]);\n\n } else {\n\n FLAGS.save(deps.storage, &subject, &true)?;\n\n attributes\n\n .extend_from_slice(&[attr(\"action\", \"flag raised\"), attr(\"subject\", subject)]);\n\n }\n\n }\n\n Ok(Response::new().add_attributes(attributes))\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 63, "score": 143786.8365253564 }, { "content": "pub fn execute_accept_ownership(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n) -> Result<Response, ContractError> {\n\n let sender = info.clone().sender;\n\n let pending_owner = owner_read(deps.storage).load()?.pending_owner;\n\n\n\n if sender != pending_owner.unwrap() {\n\n return Err(ContractError::MustBeProposed {});\n\n }\n\n\n\n let logs = accept_ownership(deps, env, info)?;\n\n\n\n Ok(Response::new().add_attributes(logs))\n\n}\n\n\n", "file_path": "contracts/owned/src/contract.rs", "rank": 64, "score": 143786.8365253564 }, { "content": "pub fn execute_raise_flag(\n\n deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n subject: String,\n\n) -> Result<Response, ContractError> {\n\n check_access(deps.as_ref())?;\n\n let subject = deps.api.addr_validate(&subject)?;\n\n if FLAGS.may_load(deps.as_ref().storage, &subject)? == Some(true) {\n\n Ok(Response::new().add_attributes(vec![\n\n attr(\"action\", \"already raised flag\"),\n\n attr(\"subject\", subject),\n\n ]))\n\n } else {\n\n FLAGS.save(deps.storage, &subject, &true)?;\n\n Ok(Response::new().add_attributes(vec![\n\n attr(\"action\", \"raised flag\"),\n\n attr(\"subject\", subject),\n\n ]))\n\n }\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 65, "score": 143786.8365253564 }, { "content": "pub fn execute_lower_flags(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n subjects: Vec<String>,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &env, info)?;\n\n\n\n let subjects = subjects\n\n .iter()\n\n .map(|subject| deps.api.addr_validate(subject))\n\n .collect::<Result<Vec<Addr>, _>>()?;\n\n\n\n let mut attributes = vec![];\n\n for subject in subjects {\n\n if FLAGS.may_load(deps.storage, &subject)? == Some(true) {\n\n FLAGS.save(deps.storage, &subject, &false)?;\n\n attributes\n\n .extend_from_slice(&[attr(\"action\", \"flag lowered\"), attr(\"address\", subject)]);\n\n }\n\n }\n\n Ok(Response::new().add_attributes(attributes))\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 66, "score": 143786.8365253564 }, { "content": "pub fn execute_submit(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n round_id: u32,\n\n submission: Uint128,\n\n) -> Result<Response, ContractError> {\n\n let Config {\n\n min_submission_value,\n\n max_submission_value,\n\n min_submission_count,\n\n max_submission_count,\n\n restart_delay,\n\n timeout,\n\n payment_amount,\n\n validator,\n\n ..\n\n } = CONFIG.load(deps.storage)?;\n\n if submission < min_submission_value {\n\n return Err(ContractError::UnderMin {});\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 67, "score": 143786.8365253564 }, { "content": "pub fn execute_receive(\n\n deps: DepsMut,\n\n env: Env,\n\n _info: MessageInfo,\n\n receive_msg: Cw20ReceiveMsg,\n\n) -> Result<Response, ContractError> {\n\n if !receive_msg.msg.is_empty() {\n\n return Err(ContractError::UnexpectedReceivePayload {});\n\n }\n\n\n\n let link_addr = CONFIG.load(deps.storage)?.link;\n\n let BalanceResponse { balance } = deps.querier.query_wasm_smart(\n\n link_addr,\n\n &LinkQuery::Balance {\n\n address: env.contract.address.to_string(),\n\n },\n\n )?;\n\n\n\n match update_available_funds(deps, balance)? {\n\n Some(now_available) => Ok(Response::new()\n\n .add_attribute(\"action\", \"update_available_funds\")\n\n .add_attribute(\"amount\", now_available)),\n\n None => Ok(Response::default()),\n\n }\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 68, "score": 143786.8365253564 }, { "content": "pub fn config(storage: &mut dyn Storage) -> Singleton<State> {\n\n singleton(storage, CONFIG_KEY)\n\n}\n\n\n", "file_path": "contracts/flags/src/state.rs", "rank": 69, "score": 142989.7522112572 }, { "content": "pub fn get_flags(deps: Deps, subjects: Vec<String>) -> Result<Vec<bool>, ContractError> {\n\n check_access(deps)?;\n\n\n\n let subjects = subjects\n\n .iter()\n\n .map(|subject| deps.api.addr_validate(subject))\n\n .collect::<Result<Vec<Addr>, _>>()?;\n\n\n\n let flags = subjects\n\n .iter()\n\n .filter_map(|subject| {\n\n let flag = FLAGS.load(deps.storage, subject).ok()?;\n\n Some(flag)\n\n })\n\n .collect();\n\n Ok(flags)\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 70, "score": 142213.2051506529 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn execute_change_oracles(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n removed: Vec<String>,\n\n added: Vec<String>,\n\n added_admins: Vec<String>,\n\n min_submissions: u32,\n\n max_submissions: u32,\n\n restart_delay: u32,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &info)?;\n\n\n\n let mut response = Response::new();\n\n\n\n for oracle in removed.iter() {\n\n let oracle = deps.api.addr_validate(oracle)?;\n\n remove_oracle(deps.storage, oracle)?;\n\n }\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 71, "score": 141164.71941723293 }, { "content": "pub fn execute_withdraw_funds(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n recipient: String,\n\n amount: Uint128,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &info)?;\n\n\n\n let funds = RECORDED_FUNDS.load(deps.storage)?;\n\n let payment_amount = CONFIG.load(deps.storage)?.payment_amount;\n\n let oracle_count = get_oracle_count(deps.as_ref(), env)?;\n\n let available = funds\n\n .available\n\n .checked_sub(required_reserve(payment_amount, oracle_count))\n\n .map_err(StdError::from)?;\n\n\n\n if available < amount {\n\n return Err(ContractError::InsufficientReserveFunds {});\n\n }\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 72, "score": 141160.82247829132 }, { "content": "pub fn execute_withdraw_payment(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n oracle: String,\n\n recipient: String,\n\n amount: Uint128,\n\n) -> Result<Response, ContractError> {\n\n let oracle = deps.api.addr_validate(&oracle)?;\n\n let oracle_status = ORACLES.load(deps.storage, &oracle)?;\n\n\n\n if oracle_status.admin != info.sender {\n\n return Err(ContractError::NotAdmin {});\n\n }\n\n if oracle_status.withdrawable < amount {\n\n return Err(ContractError::InsufficientWithdrawableFunds {});\n\n }\n\n\n\n ORACLES.save(\n\n deps.storage,\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 73, "score": 141160.82247829132 }, { "content": "pub fn execute_propose_aggregator(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n aggregator: String,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &info)?;\n\n\n\n let aggregator_addr = deps.api.addr_validate(&aggregator)?;\n\n PROPOSED_AGGREGATOR.save(deps.storage, &aggregator_addr)?;\n\n\n\n Ok(Response::default())\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 74, "score": 141160.82247829132 }, { "content": "pub fn execute_transfer_admin(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n oracle: String,\n\n new_admin: String,\n\n) -> Result<Response, ContractError> {\n\n let oracle_addr = deps.api.addr_validate(&oracle)?;\n\n let new_admin_addr = deps.api.addr_validate(&new_admin)?;\n\n\n\n ORACLES.update(deps.storage, &oracle_addr, |status| {\n\n let mut status = status.unwrap();\n\n if status.admin != info.sender {\n\n return Err(ContractError::NotAdmin {});\n\n }\n\n status.pending_admin = Some(new_admin_addr);\n\n\n\n Ok(status)\n\n })?;\n\n\n\n Ok(Response::new()\n\n .add_attribute(\"action\", \"transfer_admin\")\n\n .add_attribute(\"oracle\", oracle)\n\n .add_attribute(\"sender\", info.sender)\n\n .add_attribute(\"new_admin\", new_admin))\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 75, "score": 141160.82247829132 }, { "content": "pub fn execute_accept_admin(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n oracle: String,\n\n) -> Result<Response, ContractError> {\n\n let oracle_addr = deps.api.addr_validate(&oracle)?;\n\n\n\n ORACLES.update(deps.storage, &oracle_addr, |status| {\n\n let mut status = status.unwrap();\n\n if let Some(pending_admin) = status.pending_admin {\n\n if pending_admin != info.sender.clone() {\n\n return Err(ContractError::NotPendingAdmin {});\n\n }\n\n status.admin = info.sender.clone();\n\n status.pending_admin = None;\n\n\n\n Ok(status)\n\n } else {\n\n Err(ContractError::PendingAdminMissing {})\n\n }\n\n })?;\n\n\n\n Ok(Response::new()\n\n .add_attribute(\"oracle_admin_updated\", oracle)\n\n .add_attribute(\"new_admin\", info.sender))\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 76, "score": 141160.82247829132 }, { "content": "pub fn execute_confirm_aggregator(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n aggregator: String,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &info)?;\n\n\n\n let mut response = Response::new();\n\n\n\n let aggregator_addr = deps.api.addr_validate(&aggregator)?;\n\n\n\n let proposed = PROPOSED_AGGREGATOR\n\n .may_load(deps.storage)?\n\n .ok_or(ContractError::InvalidProposedAggregator {})?;\n\n if proposed != aggregator_addr {\n\n return Err(ContractError::InvalidProposedAggregator {});\n\n }\n\n\n\n PROPOSED_AGGREGATOR.remove(deps.storage);\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 77, "score": 141160.82247829132 }, { "content": "pub fn execute_validate(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n _previous_round_id: u32,\n\n previous_answer: Uint128,\n\n _round_id: u32,\n\n answer: Uint128,\n\n) -> Result<Response, ContractError> {\n\n if !(is_valid(deps.as_ref(), previous_answer, answer)?) {\n\n let flags = CONFIG.load(deps.storage)?.flags;\n\n let raise_flag_msg = WasmMsg::Execute {\n\n contract_addr: String::from(flags),\n\n msg: to_binary(&FlagsMsg::RaiseFlag {\n\n subject: info.sender.to_string(),\n\n })?,\n\n funds: vec![],\n\n };\n\n Ok(Response::new()\n\n .add_message(raise_flag_msg)\n", "file_path": "contracts/deviation-flagging-validator/src/contract.rs", "rank": 78, "score": 141160.82247829132 }, { "content": "pub fn execute_set_validator(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n validator: String,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &info)?;\n\n\n\n let validator_addr = deps.api.addr_validate(&validator)?;\n\n let old_validator = CONFIG.load(deps.storage)?.validator;\n\n if old_validator == validator_addr {\n\n return Ok(Response::default());\n\n }\n\n\n\n CONFIG.update(deps.storage, |config| -> StdResult<_> {\n\n Ok(Config {\n\n validator: validator_addr,\n\n ..config\n\n })\n\n })?;\n\n\n\n Ok(Response::new()\n\n .add_attribute(\"action\", \"validator_updated\")\n\n .add_attribute(\"previous\", old_validator.to_string())\n\n .add_attribute(\"new\", validator))\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 79, "score": 141160.82247829132 }, { "content": "pub fn get_proposed_round_data(\n\n deps: Deps,\n\n _env: Env,\n\n round_id: u32,\n\n) -> StdResult<RoundDataResponse> {\n\n let proposed = get_proposed(deps.storage)?;\n\n deps.querier\n\n .query_wasm_smart(proposed, &GetRoundData { round_id }.wrap())\n\n}\n\n\n", "file_path": "contracts/aggregator-proxy/src/contract.rs", "rank": 80, "score": 139041.6335557802 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn execute_update_future_rounds(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n payment_amount: Uint128,\n\n min_submissions: u32,\n\n max_submissions: u32,\n\n restart_delay: u32,\n\n timeout: u32,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &info)?;\n\n\n\n let oracle_count = get_oracle_count(deps.as_ref(), env)?;\n\n\n\n if min_submissions > max_submissions {\n\n return Err(ContractError::MinGreaterThanMax {});\n\n }\n\n if (oracle_count as u32) < max_submissions {\n\n return Err(ContractError::MaxGreaterThanTotal {});\n\n }\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 81, "score": 138661.547235002 }, { "content": "pub fn execute_update_available_funds(\n\n deps: DepsMut,\n\n env: Env,\n\n _info: MessageInfo,\n\n) -> Result<Response, ContractError> {\n\n let link_addr = CONFIG.load(deps.storage)?.link;\n\n let prev_available: BalanceResponse = deps.querier.query_wasm_smart(\n\n link_addr,\n\n &LinkQuery::Balance {\n\n address: env.contract.address.to_string(),\n\n },\n\n )?;\n\n\n\n match update_available_funds(deps, prev_available.balance)? {\n\n Some(now_available) => Ok(Response::new()\n\n .add_attribute(\"action\", \"update_available_funds\")\n\n .add_attribute(\"amount\", now_available)),\n\n None => Ok(Response::default()),\n\n }\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 82, "score": 138657.6502960604 }, { "content": "pub fn execute_request_new_round(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n) -> Result<Response, ContractError> {\n\n let requester = REQUESTERS\n\n .may_load(deps.storage, &info.sender)?\n\n .ok_or(ContractError::Unauthorized {})?;\n\n if !requester.authorized {\n\n return Err(ContractError::Unauthorized {});\n\n }\n\n let current_round_id = REPORTING_ROUND_ID.load(deps.storage)?;\n\n let current_round = ROUNDS.load(deps.storage, current_round_id.into())?;\n\n let timestamp = timestamp_to_seconds(env.block.time);\n\n if current_round.updated_at.is_none() && !timed_out(deps.storage, current_round_id, timestamp)?\n\n {\n\n return Err(ContractError::NotSupersedable {});\n\n }\n\n\n\n let new_round_id = current_round_id + 1;\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 83, "score": 138657.6502960604 }, { "content": "pub fn execute_set_raising_access_controller(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n rac_address: String,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &env, info)?;\n\n\n\n let new_rac = deps.api.addr_validate(&rac_address)?;\n\n let prev_rac = config_read(deps.storage).load()?.raising_access_controller;\n\n config(deps.storage).save(&State {\n\n raising_access_controller: new_rac,\n\n })?;\n\n Ok(Response::new().add_attributes(vec![\n\n attr(\"action\", \"raising access controller updated\"),\n\n attr(\"address\", rac_address),\n\n attr(\"previous\", prev_rac),\n\n ]))\n\n}\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 84, "score": 138657.6502960604 }, { "content": "pub fn execute_set_requester_permissions(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n requester: String,\n\n authorized: bool,\n\n delay: u32,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &info)?;\n\n\n\n let requester_addr = deps.api.addr_validate(&requester)?;\n\n let curr_requester = REQUESTERS\n\n .may_load(deps.storage, &requester_addr)?\n\n .unwrap_or_default();\n\n\n\n if curr_requester.authorized == authorized {\n\n return Ok(Response::default());\n\n }\n\n if authorized {\n\n REQUESTERS.save(\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 85, "score": 138657.6502960604 }, { "content": "pub fn execute_set_flagging_threshold(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n threshold: u32,\n\n) -> Result<Response, ContractError> {\n\n validate_ownership(deps.as_ref(), &env, info)?;\n\n let previous_ft = CONFIG.load(deps.storage)?.flagging_threshold;\n\n\n\n if previous_ft != threshold {\n\n CONFIG.update(deps.storage, |mut state| -> StdResult<_> {\n\n state.flagging_threshold = threshold;\n\n Ok(state)\n\n })?;\n\n }\n\n\n\n Ok(Response::new()\n\n .add_attribute(\"action\", \"flagging_threshold_updated\")\n\n .add_attribute(\"previous\", previous_ft.to_string())\n\n .add_attribute(\"current\", threshold.to_string()))\n\n}\n\n\n", "file_path": "contracts/deviation-flagging-validator/src/contract.rs", "rank": 86, "score": 136268.79411112852 }, { "content": "pub fn calculate_median<T>(entries: &mut [T]) -> Result<T, EmptyArrayError>\n\nwhere\n\n T: PrimInt,\n\n{\n\n if entries.is_empty() {\n\n return Err(EmptyArrayError);\n\n }\n\n entries.sort_unstable();\n\n\n\n let mid = entries.len() / 2;\n\n let median = match entries.len() % 2 {\n\n 0 => (entries[mid - 1] + entries[mid]) / cast(2).unwrap(),\n\n _ => entries[mid],\n\n };\n\n\n\n Ok(median)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "packages/median/src/lib.rs", "rank": 87, "score": 121219.11963505251 }, { "content": "pub fn contract_df_validator() -> Box<dyn Contract<Empty>> {\n\n let contract = ContractWrapper::new(\n\n deviation_flagging_validator::contract::execute,\n\n deviation_flagging_validator::contract::instantiate,\n\n deviation_flagging_validator::contract::query,\n\n );\n\n Box::new(contract)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/integration_tests.rs", "rank": 88, "score": 120700.41728525399 }, { "content": "pub fn contract_flux_aggregator() -> Box<dyn Contract<Empty>> {\n\n let contract = ContractWrapper::new(execute, instantiate, query);\n\n Box::new(contract)\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/integration_tests.rs", "rank": 89, "score": 120700.41728525399 }, { "content": "pub fn owner_read(storage: &dyn Storage) -> ReadonlySingleton<State> {\n\n singleton_read(storage, OWNER_KEY)\n\n}\n", "file_path": "contracts/owned/src/state.rs", "rank": 90, "score": 120245.42260773375 }, { "content": "// TODO this needs to be an actual call to access controller\n\nfn check_access(_deps: Deps) -> Result<(), ContractError> {\n\n if false {\n\n return Err(ContractError::NoAccess {});\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use cosmwasm_std::coins;\n\n use cosmwasm_std::testing::{mock_dependencies, mock_env, mock_info};\n\n\n\n #[test]\n\n fn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {};\n\n let info = mock_info(\"creator\", &coins(1000, \"earth\"));\n\n\n", "file_path": "contracts/flags/src/contract.rs", "rank": 91, "score": 116414.861730938 }, { "content": "fn remove_oracle(storage: &mut dyn Storage, oracle: Addr) -> Result<(), ContractError> {\n\n // TODO: is this needed?\n\n // let reporting_round = REPORTING_ROUND_ID_read.load()?;\n\n let oracle_status = ORACLES.load(storage, &oracle)?;\n\n\n\n if oracle_status.ending_round != ROUND_MAX {\n\n return Err(ContractError::OracleNotEnabled {});\n\n }\n\n ORACLE_ADDRESSES.update(storage, |addresses| -> StdResult<_> {\n\n Ok(addresses\n\n .into_iter()\n\n .filter(|addr| *addr != oracle)\n\n .collect())\n\n })?;\n\n ORACLES.remove(storage, &oracle);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 92, "score": 115848.5855706959 }, { "content": "#[test]\n\nfn test_modify_allowance() {\n\n let mut router = mock_app();\n\n let id = router.store_code(contract_link_token());\n\n let owner = Addr::unchecked(\"owner\");\n\n let contract = router\n\n .instantiate_contract(id, owner.clone(), &InstantiateMsg {}, &[], \"LINK\", None)\n\n .unwrap();\n\n\n\n let spender_addr = \"spender\";\n\n\n\n let msg = ExecuteMsg::IncreaseAllowance {\n\n spender: spender_addr.to_owned(),\n\n amount: Uint128::new(100),\n\n expires: None,\n\n };\n\n router\n\n .execute_contract(owner.clone(), contract.clone(), &msg, &[])\n\n .unwrap();\n\n\n\n let allowance_query = QueryMsg::Allowance {\n", "file_path": "contracts/link-token/src/integration_tests/mod.rs", "rank": 93, "score": 111360.38819672067 }, { "content": "fn add_oracle(storage: &mut dyn Storage, oracle: Addr, admin: Addr) -> Result<(), ContractError> {\n\n let oracle_status = ORACLES\n\n .may_load(storage, &oracle)?\n\n .map(|oracle_status| {\n\n if oracle_status.ending_round == ROUND_MAX {\n\n return Err(ContractError::OracleNotEnabled {});\n\n }\n\n if oracle_status.admin != admin {\n\n return Err(ContractError::OverwritingAdmin {});\n\n }\n\n Ok(oracle_status)\n\n })\n\n .unwrap_or_else(|| {\n\n Ok(OracleStatus {\n\n withdrawable: Uint128::zero(),\n\n starting_round: 0,\n\n ending_round: 0,\n\n last_reported_round: None,\n\n last_started_round: None,\n\n latest_submission: None,\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 94, "score": 109522.72792107573 }, { "content": "#[test]\n\nfn test_transfer_from_without_allowance() {\n\n let mut router = mock_app();\n\n let id = router.store_code(contract_link_token());\n\n let owner = Addr::unchecked(\"owner\");\n\n let contract = router\n\n .instantiate_contract(id, owner.clone(), &InstantiateMsg {}, &[], \"LINK\", None)\n\n .unwrap();\n\n\n\n let recipient = \"recipient\";\n\n\n\n let allowance_query = QueryMsg::Allowance {\n\n owner: owner.to_string(),\n\n spender: recipient.to_owned(),\n\n };\n\n let allowance: AllowanceResponse = router\n\n .wrap()\n\n .query_wasm_smart(contract.clone(), &allowance_query)\n\n .unwrap();\n\n assert_eq!(allowance.allowance, Uint128::zero());\n\n\n\n let msg = ExecuteMsg::TransferFrom {\n\n owner: owner.to_string(),\n\n recipient: recipient.to_owned(),\n\n amount: Uint128::new(100),\n\n };\n\n let res = router.execute_contract(owner, contract, &msg, &[]);\n\n\n\n assert_eq!(res.unwrap_err(), ContractError::NoAllowance {}.to_string());\n\n}\n\n\n", "file_path": "contracts/link-token/src/integration_tests/mod.rs", "rank": 95, "score": 108890.5984834401 }, { "content": "#[test]\n\nfn test_change_allowance_self() {\n\n let mut router = mock_app();\n\n let id = router.store_code(contract_link_token());\n\n let owner = Addr::unchecked(\"owner\");\n\n let contract = router\n\n .instantiate_contract(id, owner.clone(), &InstantiateMsg {}, &[], \"LINK\", None)\n\n .unwrap();\n\n\n\n let msg = ExecuteMsg::IncreaseAllowance {\n\n spender: owner.to_string(),\n\n amount: Uint128::new(1000),\n\n expires: None,\n\n };\n\n let res = router.execute_contract(owner, contract, &msg, &[]);\n\n assert_eq!(\n\n res.unwrap_err(),\n\n ContractError::CannotSetOwnAccount {}.to_string()\n\n );\n\n}\n\n\n", "file_path": "contracts/link-token/src/integration_tests/mod.rs", "rank": 96, "score": 108890.5984834401 }, { "content": "fn initialize_new_round(storage: &mut dyn Storage, round_id: u32, timestamp: u64) -> StdResult<()> {\n\n // update round info if timed out\n\n let timed_out_round = prev_round_id(round_id)?;\n\n if timed_out(storage, timed_out_round, timestamp)? {\n\n let prev_round_id = prev_round_id(timed_out_round)?;\n\n let prev_round = ROUNDS.load(storage, prev_round_id.into())?;\n\n ROUNDS.update(storage, timed_out_round.into(), |round| -> StdResult<_> {\n\n Ok(Round {\n\n answer: prev_round.answer,\n\n answered_in_round: prev_round.answered_in_round,\n\n updated_at: Some(timestamp),\n\n ..round.unwrap()\n\n })\n\n })?;\n\n DETAILS.remove(storage, timed_out_round.into());\n\n }\n\n\n\n REPORTING_ROUND_ID.save(storage, &round_id)?;\n\n let Config {\n\n min_submission_count,\n", "file_path": "contracts/flux-aggregator/src/contract.rs", "rank": 97, "score": 107092.97780677196 }, { "content": "pub fn config_read(storage: &dyn Storage) -> ReadonlySingleton<State> {\n\n singleton_read(storage, CONFIG_KEY)\n\n}\n\n\n\npub const FLAGS: Map<&Addr, bool> = Map::new(\"flags\");\n", "file_path": "contracts/flags/src/state.rs", "rank": 98, "score": 90820.9044218106 }, { "content": "pub trait FeedQuerier {\n\n fn get_latest_answer(&self, feed_address: Addr) -> StdResult<LatestAnswerResponse>;\n\n\n\n fn get_round_data(&self, feed_address: Addr, round_id: u32) -> StdResult<RoundDataResponse>;\n\n\n\n fn get_latest_round_data(&self, feed_address: Addr) -> StdResult<RoundDataResponse>;\n\n\n\n fn get_description(&self, feed_address: Addr) -> StdResult<String>;\n\n\n\n fn get_decimals(&self, feed_address: Addr) -> StdResult<u8>;\n\n\n\n fn get_version(&self, feed_address: Addr) -> StdResult<Uint128>;\n\n}\n\n\n\nimpl<'a> FeedQuerier for QuerierWrapper<'a> {\n\n fn get_latest_answer(&self, feed_address: Addr) -> StdResult<LatestAnswerResponse> {\n\n self.query_wasm_smart(feed_address, &QueryMsg::GetLatestAnswer {}.wrap())\n\n }\n\n\n\n fn get_round_data(&self, feed_address: Addr, round_id: u32) -> StdResult<RoundDataResponse> {\n", "file_path": "packages/chainlink-aggregator/src/querier.rs", "rank": 99, "score": 89687.29363355125 } ]
Rust
merkle_tree/src/lib.rs
mikong/mori
75879172eeb7e733c62d472bafc6c6792d3b754a
use std::mem; use sha2::{Sha256, Digest}; use sha2::digest::generic_array::GenericArray; use sha2::digest::generic_array::typenum::U32; use sha2::digest::generic_array::sequence::Concat; type HashResult = GenericArray<u8, U32>; #[derive(Debug, PartialEq)] pub enum Position { Left, Right, } #[derive(Debug)] pub enum MerkleTree { Empty, NonEmpty(Box<Node>), } #[derive(Debug)] pub struct Node { element: HashResult, leaf_count: usize, left: MerkleTree, right: MerkleTree, } impl MerkleTree { fn new( element: HashResult, leaf_count: usize, left: MerkleTree, right: MerkleTree ) -> MerkleTree { MerkleTree::NonEmpty(Box::new(Node { element, leaf_count, left, right, })) } pub fn build<T: AsRef<[u8]>>(data: &[T]) -> MerkleTree { if data.len() == 0 { panic!("Merkle tree can't be empty: the len is 0"); } let mut leaf_nodes = data.iter().map(|val| { let hash = Sha256::digest(val.as_ref()); MerkleTree::new(hash, 1, MerkleTree::Empty, MerkleTree::Empty) }).collect(); MerkleTree::build_tree(&mut leaf_nodes) } fn build_tree(nodes: &mut Vec<MerkleTree>) -> MerkleTree { let mut new_nodes = vec![]; for pair in nodes.chunks_exact_mut(2) { let mut left = MerkleTree::Empty; let mut right = MerkleTree::Empty; mem::swap(&mut left, &mut pair[0]); mem::swap(&mut right, &mut pair[1]); let hash = MerkleTree::concat_and_hash(&left, &right); let leaf_count = left.leaf_count() + right.leaf_count(); let tree = MerkleTree::new(hash, leaf_count, left, right); new_nodes.push(tree); } if nodes.len() % 2 == 1 { new_nodes.push(nodes.pop().unwrap()); } if new_nodes.len() == 1 { return new_nodes.pop().unwrap(); } MerkleTree::build_tree(&mut new_nodes) } fn concat_and_hash(left: &MerkleTree, right: &MerkleTree) -> HashResult { let value = match (&left, &right) { (MerkleTree::NonEmpty(l), MerkleTree::NonEmpty(r)) => { l.element.concat(r.element) }, (_, _) => unreachable!(), }; Sha256::digest(&value) } fn leaf_count(&self) -> usize { match self { MerkleTree::NonEmpty(n) => n.leaf_count, MerkleTree::Empty => 0, } } pub fn get_proof(&self, index: usize) -> Vec<(Position, HashResult)> { if index >= self.leaf_count() { panic!( "index out of bounds: the len is {} but the index is {}", self.leaf_count(), index ); } let mut stack = Vec::new(); let mut current = self; let mut base = 0; use MerkleTree::NonEmpty; while current.leaf_count() > 1 { if let NonEmpty(node) = current { if let (NonEmpty(l), NonEmpty(r)) = (&node.left, &node.right) { if index < l.leaf_count + base { stack.push((Position::Right, r.element)); current = &node.left; } else { base += l.leaf_count; stack.push((Position::Left, l.element)); current = &node.right; } } } } stack.reverse(); stack } pub fn root_hash(&self) -> HashResult { match self { MerkleTree::NonEmpty(node) => node.element, MerkleTree::Empty => panic!("Merkle tree can't be empty"), } } pub fn validate( target: HashResult, proof: Vec<(Position, HashResult)>, root: HashResult ) -> bool { let hash = proof.iter().fold(target, |acc, (pos, h)| { match pos { Position::Right => Sha256::digest(&acc.concat(*h)), Position::Left => Sha256::digest(&h.concat(acc)), } }); hash == root } } #[cfg(test)] mod tests { use super::*; use super::MerkleTree::*; #[test] #[should_panic] fn zero_element() { let data: [String; 0] = []; MerkleTree::build(&data); } #[test] fn small_trees() { let ha = Sha256::digest(b"A"); let hb = Sha256::digest(b"B"); let hc = Sha256::digest(b"C"); let hab = Sha256::digest(&ha.concat(hb)); let habc = Sha256::digest(&hab.concat(hc)); let data = ["A"]; let tree = MerkleTree::build(&data); assert_eq!(tree.root_hash(), GenericArray::clone_from_slice(&ha)); let data = ["A", "B"]; let tree = MerkleTree::build(&data); assert_eq!(tree.root_hash(), GenericArray::clone_from_slice(&hab)); let data = ["A", "B", "C"]; let tree = MerkleTree::build(&data); assert_eq!(tree.root_hash(), GenericArray::clone_from_slice(&habc)); } #[test] fn it_works() { let data = vec!["A", "B", "C", "D", "E"]; let tree = MerkleTree::build(&data); let ha = Sha256::digest(b"A"); let hb = Sha256::digest(b"B"); let hc = Sha256::digest(b"C"); let hd = Sha256::digest(b"D"); let he = Sha256::digest(b"E"); let hab = Sha256::digest(&ha.concat(hb)); let hcd = Sha256::digest(&hc.concat(hd)); let habcd = Sha256::digest(&hab.concat(hcd)); let root_hash = Sha256::digest(&habcd.concat(he)); assert_eq!(tree.root_hash(), GenericArray::clone_from_slice(&root_hash)); if let NonEmpty(node) = tree { if let (NonEmpty(lnode), NonEmpty(rnode)) = (&node.left, &node.right) { assert_eq!(lnode.element, GenericArray::clone_from_slice(&habcd)); assert_eq!(rnode.element, GenericArray::clone_from_slice(&he)); } } else { panic!("Tree can't be empty"); } } #[test] fn leaf_count() { let tree = MerkleTree::Empty; assert_eq!(tree.leaf_count(), 0); let data = ["A"]; let tree = MerkleTree::build(&data); assert_eq!(tree.leaf_count(), 1); let data = ["A", "B", "C", "D", "E"]; let tree = MerkleTree::build(&data); assert_eq!(tree.leaf_count(), 5); } #[test] fn get_proof() { let data = ["A", "B", "C", "D", "E"]; let tree = MerkleTree::build(&data); let proof = tree.get_proof(2); let ha = Sha256::digest(b"A"); let hb = Sha256::digest(b"B"); let hd = Sha256::digest(b"D"); let he = Sha256::digest(b"E"); let hab = Sha256::digest(&ha.concat(hb)); let mut proof_iter = proof.iter(); let (pos, hash) = proof_iter.next().unwrap(); assert_eq!(pos, &Position::Right); assert_eq!(*hash, GenericArray::clone_from_slice(&hd)); let (pos, hash) = proof_iter.next().unwrap(); assert_eq!(pos, &Position::Left); assert_eq!(*hash, GenericArray::clone_from_slice(&hab)); let (pos, hash) = proof_iter.next().unwrap(); assert_eq!(pos, &Position::Right); assert_eq!(*hash, GenericArray::clone_from_slice(&he)); } #[test] fn validate() { let data = ["A"]; let tree = MerkleTree::build(&data); let root = tree.root_hash(); let ha = Sha256::digest(b"A"); let proof = tree.get_proof(0); assert_eq!(proof.len(), 0); assert_eq!(MerkleTree::validate(ha, proof, root), true); let data = ["A", "B", "C", "D", "E", "F", "G"]; let tree = MerkleTree::build(&data); let root = tree.root_hash(); let hc = Sha256::digest(b"C"); let proof = tree.get_proof(2); assert_eq!(MerkleTree::validate(hc, proof, root), true); let proof = tree.get_proof(3); assert_eq!(MerkleTree::validate(hc, proof, root), false); let hf = Sha256::digest(b"F"); let proof = tree.get_proof(5); assert_eq!(MerkleTree::validate(hf, proof, root), true); let hh = Sha256::digest(b"H"); let proof = tree.get_proof(6); assert_eq!(MerkleTree::validate(hh, proof, root), false); } }
use std::mem; use sha2::{Sha256, Digest}; use sha2::digest::generic_array::GenericArray; use sha2::digest::generic_array::typenum::U32; use sha2::digest::generic_array::sequence::Concat; type HashResult = GenericArray<u8, U32>; #[derive(Debug, PartialEq)] pub enum Position { Left, Right, } #[derive(Debug)] pub enum MerkleTree { Empty, NonEmpty(Box<Node>), } #[derive(Debug)] pub struct Node { element: HashResult, leaf_count: usize, left: MerkleTree, right: MerkleTree, } impl MerkleTree { fn new( element: HashResult, leaf_count: usize, left: MerkleTree, right: MerkleTree ) -> MerkleTree { MerkleTree::NonEmpty(Box::new(Node { element, leaf_count, left, right, })) } pub fn build<T: AsRef<[u8]>>(data: &[T]) -> MerkleTree { if data.len() == 0 { panic!("Merkle tree can't be empty: the len is 0"); } let mut leaf_nodes = data.iter().map(|val| { let hash = Sha256::digest(val.as_ref()); MerkleTree::new(hash, 1, MerkleTree::Empty, MerkleTree::Empty) }).collect(); MerkleTree::build_tree(&mut leaf_nodes) } fn build_tree(nodes: &mut Vec<MerkleTree>) -> MerkleTree { let mut new_nodes = vec![]; for pair in nodes.chunks_exact_mut(2) { let mut left = MerkleTree::Empty; let mut right = MerkleTree::Empty; mem::swap(&mut left, &mut pair[0]); mem::swap(&mut right, &mut pair[1]); let hash = MerkleTree::concat_and_hash(&left, &right); let leaf_count = left.leaf_count() + right.leaf_count(); let tree = MerkleTree::new(hash, leaf_count, left, right); new_nodes.push(tree); } if nodes.len() % 2 == 1 { new_nodes.push(nodes.pop().unwrap()); } if new_nodes.len() == 1 { return new_nodes.pop().unwrap(); } MerkleTree::build_tree(&mut new_nodes) } fn concat_and_hash(left: &MerkleTree, right: &MerkleTree) -> HashResult { let value = match (&left, &right) { (MerkleTree::NonEmpty(l), MerkleTree::NonEmpty(r)) => { l.element.concat(r.element) }, (_, _) => unreachable!(), }; Sha256::digest(&value) } fn leaf_count(&self) -> usize { match self { MerkleTree::NonEmpty(n) => n.leaf_count, MerkleTree::Empty => 0, } } pub fn get_proof(&self, index: usize) -> Vec<(Position, HashResult)> { if index >= self.leaf_count() { panic!( "index out of bounds: the len is {} but the index is {}", self.leaf_count(), index ); } let mut stack = Vec::new(); let mut current = self; let mut base = 0; use MerkleTree::NonEmpty; while current.leaf_count() > 1 { if let NonEmpty(node) = current { if let (NonEmpty(l), NonEmpty(r)) = (&node.left, &node.right) { if index < l.leaf_count + base { stack.push((Position::Right, r.element)); current = &node.left; } else { base += l.leaf_count; stack.push((Position::Left, l.element)); current = &node.right; } } } } stack.reverse(); stack }
_iter.next().unwrap(); assert_eq!(pos, &Position::Right); assert_eq!(*hash, GenericArray::clone_from_slice(&hd)); let (pos, hash) = proof_iter.next().unwrap(); assert_eq!(pos, &Position::Left); assert_eq!(*hash, GenericArray::clone_from_slice(&hab)); let (pos, hash) = proof_iter.next().unwrap(); assert_eq!(pos, &Position::Right); assert_eq!(*hash, GenericArray::clone_from_slice(&he)); } #[test] fn validate() { let data = ["A"]; let tree = MerkleTree::build(&data); let root = tree.root_hash(); let ha = Sha256::digest(b"A"); let proof = tree.get_proof(0); assert_eq!(proof.len(), 0); assert_eq!(MerkleTree::validate(ha, proof, root), true); let data = ["A", "B", "C", "D", "E", "F", "G"]; let tree = MerkleTree::build(&data); let root = tree.root_hash(); let hc = Sha256::digest(b"C"); let proof = tree.get_proof(2); assert_eq!(MerkleTree::validate(hc, proof, root), true); let proof = tree.get_proof(3); assert_eq!(MerkleTree::validate(hc, proof, root), false); let hf = Sha256::digest(b"F"); let proof = tree.get_proof(5); assert_eq!(MerkleTree::validate(hf, proof, root), true); let hh = Sha256::digest(b"H"); let proof = tree.get_proof(6); assert_eq!(MerkleTree::validate(hh, proof, root), false); } }
pub fn root_hash(&self) -> HashResult { match self { MerkleTree::NonEmpty(node) => node.element, MerkleTree::Empty => panic!("Merkle tree can't be empty"), } } pub fn validate( target: HashResult, proof: Vec<(Position, HashResult)>, root: HashResult ) -> bool { let hash = proof.iter().fold(target, |acc, (pos, h)| { match pos { Position::Right => Sha256::digest(&acc.concat(*h)), Position::Left => Sha256::digest(&h.concat(acc)), } }); hash == root } } #[cfg(test)] mod tests { use super::*; use super::MerkleTree::*; #[test] #[should_panic] fn zero_element() { let data: [String; 0] = []; MerkleTree::build(&data); } #[test] fn small_trees() { let ha = Sha256::digest(b"A"); let hb = Sha256::digest(b"B"); let hc = Sha256::digest(b"C"); let hab = Sha256::digest(&ha.concat(hb)); let habc = Sha256::digest(&hab.concat(hc)); let data = ["A"]; let tree = MerkleTree::build(&data); assert_eq!(tree.root_hash(), GenericArray::clone_from_slice(&ha)); let data = ["A", "B"]; let tree = MerkleTree::build(&data); assert_eq!(tree.root_hash(), GenericArray::clone_from_slice(&hab)); let data = ["A", "B", "C"]; let tree = MerkleTree::build(&data); assert_eq!(tree.root_hash(), GenericArray::clone_from_slice(&habc)); } #[test] fn it_works() { let data = vec!["A", "B", "C", "D", "E"]; let tree = MerkleTree::build(&data); let ha = Sha256::digest(b"A"); let hb = Sha256::digest(b"B"); let hc = Sha256::digest(b"C"); let hd = Sha256::digest(b"D"); let he = Sha256::digest(b"E"); let hab = Sha256::digest(&ha.concat(hb)); let hcd = Sha256::digest(&hc.concat(hd)); let habcd = Sha256::digest(&hab.concat(hcd)); let root_hash = Sha256::digest(&habcd.concat(he)); assert_eq!(tree.root_hash(), GenericArray::clone_from_slice(&root_hash)); if let NonEmpty(node) = tree { if let (NonEmpty(lnode), NonEmpty(rnode)) = (&node.left, &node.right) { assert_eq!(lnode.element, GenericArray::clone_from_slice(&habcd)); assert_eq!(rnode.element, GenericArray::clone_from_slice(&he)); } } else { panic!("Tree can't be empty"); } } #[test] fn leaf_count() { let tree = MerkleTree::Empty; assert_eq!(tree.leaf_count(), 0); let data = ["A"]; let tree = MerkleTree::build(&data); assert_eq!(tree.leaf_count(), 1); let data = ["A", "B", "C", "D", "E"]; let tree = MerkleTree::build(&data); assert_eq!(tree.leaf_count(), 5); } #[test] fn get_proof() { let data = ["A", "B", "C", "D", "E"]; let tree = MerkleTree::build(&data); let proof = tree.get_proof(2); let ha = Sha256::digest(b"A"); let hb = Sha256::digest(b"B"); let hd = Sha256::digest(b"D"); let he = Sha256::digest(b"E"); let hab = Sha256::digest(&ha.concat(hb)); let mut proof_iter = proof.iter(); let (pos, hash) = proof
random
[ { "content": "type NodeId = usize;\n\n\n\nimpl<K, V> Node<K, V> {\n\n pub fn new(key: K, value: V, color: Color) -> Self {\n\n Node {\n\n key,\n\n value,\n\n left: None,\n\n right: None,\n\n color,\n\n size: 1,\n\n }\n\n }\n\n}\n\n\n\npub struct TreeIter<'a, K: 'a, V: 'a> {\n\n stack: Vec<NodeId>,\n\n tree: &'a RedBlackTree<K, V>,\n\n}\n\n\n", "file_path": "red_black_tree/src/lib.rs", "rank": 0, "score": 87903.78767802964 }, { "content": "#[test]\n\nfn it_validates() {\n\n let data = [\"A\", \"BC\", \"DEF\"];\n\n let hbc = Sha256::digest(b\"BC\");\n\n\n\n // Calls all public API functions of MerkleTree\n\n let tree = MerkleTree::build(&data);\n\n let proof = tree.get_proof(1);\n\n let root = tree.root_hash();\n\n assert!(MerkleTree::validate(hbc, proof, root));\n\n}\n", "file_path": "merkle_tree/tests/integration_test.rs", "rank": 2, "score": 48078.71804318698 }, { "content": "#[derive(Debug)]\n\npub enum Heap<T> {\n\n Empty,\n\n NonEmpty(Box<Node<T>>),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Node<T> {\n\n element: T,\n\n list: Vec<Heap<T>>,\n\n}\n\n\n\nimpl<T: Ord> Heap<T> {\n\n pub fn new(element: T, list: Vec<Heap<T>>) -> Heap<T> {\n\n Heap::NonEmpty(Box::new(Node {\n\n element,\n\n list,\n\n }))\n\n }\n\n\n", "file_path": "pairing_heap/src/lib.rs", "rank": 3, "score": 26339.860429484816 }, { "content": " pub fn is_empty(&self) -> bool {\n\n match self {\n\n Heap::Empty => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn find_min(&self) -> Option<&T> {\n\n match self {\n\n Heap::NonEmpty(node) => Some(&node.element),\n\n Heap::Empty => None,\n\n }\n\n }\n\n\n\n pub fn merge(a: Heap<T>, b: Heap<T>) -> Heap<T> {\n\n match (a, b) {\n\n (h, Heap::Empty) => h,\n\n (Heap::Empty, h) => h,\n\n (Heap::NonEmpty(mut h1), Heap::NonEmpty(mut h2)) => {\n\n // The tree with the larger root should become\n", "file_path": "pairing_heap/src/lib.rs", "rank": 4, "score": 26338.556481387972 }, { "content": " fn merge_pairs(mut list: Vec<Heap<T>>) -> Heap<T> {\n\n if list.is_empty() {\n\n Heap::Empty\n\n } else if list.len() == 1 {\n\n list.pop().unwrap()\n\n } else {\n\n let h1 = list.pop().unwrap();\n\n let h2 = list.pop().unwrap();\n\n Heap::merge(Heap::merge(h1, h2), Heap::merge_pairs(list))\n\n }\n\n }\n\n\n\n pub fn delete_min(self) -> Heap<T> {\n\n match self {\n\n Heap::Empty => Heap::Empty,\n\n Heap::NonEmpty(node) => {\n\n Heap::merge_pairs(node.list)\n\n },\n\n }\n\n }\n", "file_path": "pairing_heap/src/lib.rs", "rank": 5, "score": 26338.207094713354 }, { "content": " heap = heap.delete_min();\n\n assert_eq!(heap.find_min(), Some(&15));\n\n }\n\n\n\n #[test]\n\n fn merge() {\n\n let mut heap = Heap::new(5, vec![]);\n\n\n\n heap = Heap::merge(heap, Empty);\n\n if let Heap::NonEmpty(ref node) = heap {\n\n assert_eq!(node.element, 5);\n\n } else {\n\n panic!(\"Heap can't be Empty\");\n\n }\n\n\n\n heap = Heap::merge(Empty, heap);\n\n if let Heap::NonEmpty(ref node) = heap {\n\n assert_eq!(node.element, 5);\n\n } else {\n\n panic!(\"Heap can't be Empty\");\n", "file_path": "pairing_heap/src/lib.rs", "rank": 6, "score": 26337.19208224164 }, { "content": " }\n\n\n\n let h2 = Heap::new(10, vec![]);\n\n heap = Heap::merge(heap, h2);\n\n if let Heap::NonEmpty(ref node) = heap {\n\n assert_eq!(node.element, 5);\n\n if let Heap::NonEmpty(ref child) = node.list.first().unwrap() {\n\n assert_eq!(child.element, 10);\n\n } else {\n\n panic!(\"List element can't be Empty\");\n\n }\n\n } else {\n\n panic!(\"Heap can't be Empty\");\n\n }\n\n }\n\n}\n", "file_path": "pairing_heap/src/lib.rs", "rank": 7, "score": 26336.611795702713 }, { "content": " // the leftmost child of the tree with the smaller\n\n // root. As an optimization, we can treat the list\n\n // as if it's in reverse order so we only need to\n\n // push to the end of the Vec.\n\n if h1.element <= h2.element {\n\n h1.list.push(Heap::NonEmpty(h2));\n\n Heap::NonEmpty(h1)\n\n } else {\n\n h2.list.push(Heap::NonEmpty(h1));\n\n Heap::NonEmpty(h2)\n\n }\n\n },\n\n }\n\n }\n\n\n\n pub fn insert(self, x: T) -> Heap<T> {\n\n let h = Heap::new(x, vec![]);\n\n Heap::merge(h, self)\n\n }\n\n\n", "file_path": "pairing_heap/src/lib.rs", "rank": 8, "score": 26334.34729611214 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use super::Heap::*;\n\n\n\n #[test]\n\n fn it_works() {\n\n let mut heap = Empty;\n\n heap = heap.insert(5);\n\n assert_eq!(heap.find_min(), Some(&5));\n\n }\n\n\n\n #[test]\n\n fn is_empty() {\n\n let mut heap = Empty;\n\n assert_eq!(heap.is_empty(), true);\n\n\n\n heap = heap.insert(10);\n", "file_path": "pairing_heap/src/lib.rs", "rank": 9, "score": 26329.662842944614 }, { "content": " assert_eq!(heap.is_empty(), false);\n\n }\n\n\n\n #[test]\n\n fn find_min() {\n\n let mut heap = Empty;\n\n assert_eq!(heap.find_min(), None);\n\n\n\n heap = heap.insert(20).insert(30).insert(25).insert(10).insert(15);\n\n assert_eq!(heap.find_min(), Some(&10));\n\n }\n\n\n\n #[test]\n\n fn delete_min() {\n\n let mut heap = Empty;\n\n heap = heap.delete_min();\n\n assert_eq!(heap.is_empty(), true);\n\n\n\n heap = heap.insert(20).insert(10).insert(15);\n\n assert_eq!(heap.find_min(), Some(&10));\n", "file_path": "pairing_heap/src/lib.rs", "rank": 10, "score": 26325.95964671708 }, { "content": "use std::rc::Rc;\n\nuse std::cell::RefCell;\n\nuse std::cell::Ref;\n\nuse std::collections::VecDeque;\n\n\n\n#[derive(Debug)]\n\npub struct Node<V: Clone>(Rc<RefCell<RawNode<V>>>);\n\n\n\nimpl<V: Clone> Node<V> {\n\n pub fn new(key: usize, value: V) -> Self {\n\n let node = Rc::new(RefCell::new(RawNode {\n\n key,\n\n value,\n\n left: None,\n\n right: None,\n\n size: 1,\n\n }));\n\n Node(node)\n\n }\n\n\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 27, "score": 20504.257453120103 }, { "content": "\n\n self.push_left_edge(&node.get().right);\n\n\n\n let key = node.get().key;\n\n let value = node.get().value.clone();\n\n Some((key, value))\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct BST<V: Clone> {\n\n root: Option<Node<V>>,\n\n}\n\n\n\nimpl<V: Clone> BST<V> {\n\n /// Creates a new empty Binary Search Tree.\n\n pub fn new() -> Self {\n\n BST { root: None }\n\n }\n\n\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 28, "score": 20500.50534408381 }, { "content": " }\n\n None\n\n }\n\n\n\n fn rmax(&self, node_id: NodeId) -> NodeId {\n\n match self.nodes[node_id].right {\n\n Some(right_id) => self.rmax(right_id),\n\n None => node_id,\n\n }\n\n }\n\n\n\n // Iterator\n\n pub fn iter(&self) -> TreeIter<K, V> {\n\n let mut iter = TreeIter {\n\n stack: Vec::new(),\n\n tree: self,\n\n };\n\n iter.push_left_edge(self.root);\n\n iter\n\n }\n", "file_path": "red_black_tree/src/lib.rs", "rank": 29, "score": 20500.492830433286 }, { "content": " nodes: Vec<Node<K, V>>,\n\n}\n\n\n\nimpl<K, V> RedBlackTree<K, V>\n\n where K: PartialOrd + Clone,\n\n V: Clone,\n\n{\n\n pub fn new() -> Self {\n\n RedBlackTree {\n\n root: None,\n\n nodes: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.root.is_none()\n\n }\n\n\n\n pub fn size(&self) -> usize {\n\n self.size_of(self.root)\n", "file_path": "red_black_tree/src/lib.rs", "rank": 30, "score": 20500.12512896944 }, { "content": " /// Returns `true` if the tree has no node elements.\n\n pub fn is_empty(&self) -> bool {\n\n Node::size(&self.root) == 0\n\n }\n\n\n\n /// Returns the number of elements in the tree.\n\n pub fn size(&self) -> usize {\n\n Node::size(&self.root)\n\n }\n\n\n\n pub fn contains(&self, key: usize) -> bool {\n\n self.get(key).is_some()\n\n }\n\n\n\n /// Returns a clone of the value associated with the given key.\n\n pub fn get(&self, key: usize) -> Option<V> {\n\n BST::get_value(&self.root, key)\n\n }\n\n\n\n fn get_value(x: &Option<Node<V>>, key: usize) -> Option<V> {\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 31, "score": 20499.860097315854 }, { "content": " }\n\n\n\n fn size_of(&self, node: Option<NodeId>) -> usize {\n\n node.map_or(0, |id| self.nodes[id].size)\n\n }\n\n\n\n fn update_size_for(&mut self, parent: NodeId) {\n\n let left_size = self.size_of(self.nodes[parent].left);\n\n let right_size = self.size_of(self.nodes[parent].right);\n\n self.nodes[parent].size = 1 + left_size + right_size;\n\n }\n\n\n\n fn new_node(&mut self, key: K, value: V, color: Color) -> NodeId {\n\n let next_index = self.nodes.len();\n\n\n\n self.nodes.push(Node::new(key, value, color));\n\n\n\n next_index\n\n }\n\n\n", "file_path": "red_black_tree/src/lib.rs", "rank": 32, "score": 20499.64646988817 }, { "content": "\n\n pub fn keys(&self) -> Vec<usize> {\n\n let mut v = Vec::new();\n\n BST::inorder(&self.root, &mut v);\n\n v\n\n }\n\n\n\n fn inorder(x: &Option<Node<V>>, v: &mut Vec<usize>) {\n\n if let Some(node) = x {\n\n BST::inorder(&node.get().left, v);\n\n v.push(node.get().key);\n\n BST::inorder(&node.get().right, v);\n\n }\n\n }\n\n\n\n pub fn level_order(&self) -> Vec<usize> {\n\n let mut keys = Vec::new();\n\n let mut queue = VecDeque::new();\n\n if let Some(node) = &self.root {\n\n queue.push_back(node.clone());\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 33, "score": 20499.521000925863 }, { "content": "impl<'a, K: 'a, V: 'a> TreeIter<'a, K, V> {\n\n fn push_left_edge(&mut self, mut node: Option<NodeId>) {\n\n while let Some(node_id) = node {\n\n self.stack.push(node_id);\n\n node = self.tree.nodes[node_id].left;\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, K, V> Iterator for TreeIter<'a, K, V> {\n\n type Item = (&'a K, &'a V);\n\n\n\n fn next(&mut self) -> Option<(&'a K, &'a V)> {\n\n let node_id = match self.stack.pop() {\n\n Some(n) => n,\n\n None => return None,\n\n };\n\n\n\n self.push_left_edge(self.tree.nodes[node_id].right);\n\n\n", "file_path": "red_black_tree/src/lib.rs", "rank": 34, "score": 20497.67187274242 }, { "content": " unvisited: Vec<Node<V>>,\n\n}\n\n\n\nimpl<V: Clone> TreeIter<V> {\n\n fn push_left_edge(&mut self, x: &Option<Node<V>>) {\n\n if let Some(ref node) = *x {\n\n self.unvisited.push(node.clone());\n\n self.push_left_edge(&node.get().left);\n\n }\n\n }\n\n}\n\n\n\nimpl<V: Clone> Iterator for TreeIter<V> {\n\n type Item = (usize, V);\n\n\n\n fn next(&mut self) -> Option<(usize, V)> {\n\n let node = match self.unvisited.pop() {\n\n Some(n) => n,\n\n None => return None,\n\n };\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 35, "score": 20497.633927977873 }, { "content": "\n\n pub fn clone(&self) -> Self {\n\n Node(Rc::clone(&self.0))\n\n }\n\n\n\n pub fn size(node: &Option<Node<V>>) -> usize {\n\n node.as_ref().map_or(0, |n| n.0.borrow().size)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RawNode<V: Clone> {\n\n key: usize,\n\n value: V,\n\n left: Option<Node<V>>,\n\n right: Option<Node<V>>,\n\n size: usize,\n\n}\n\n\n\npub struct TreeIter<V: Clone> {\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 36, "score": 20496.952838496592 }, { "content": " if let Some(node) = x {\n\n if key < node.get().key {\n\n return BST::get_value(&node.get().left, key);\n\n } else if key > node.get().key {\n\n return BST::get_value(&node.get().right, key);\n\n } else {\n\n return Some(node.get().value.clone());\n\n }\n\n }\n\n None\n\n }\n\n\n\n /// Inserts the given key-value pair into the tree. If the tree already\n\n /// contains the given key, the associated value is updated.\n\n pub fn put(&mut self, key: usize, value: V) {\n\n self.root = BST::upsert(&self.root, key, value);\n\n }\n\n\n\n fn upsert(x: &Option<Node<V>>, key: usize, value: V) -> Option<Node<V>> {\n\n if let Some(node) = x {\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 37, "score": 20496.94993892425 }, { "content": " if key < node.get().key {\n\n let new_node = BST::upsert(&node.get().left, key, value);\n\n node.set_left(new_node);\n\n } else if key > node.get().key {\n\n let new_node = BST::upsert(&node.get().right, key, value);\n\n node.set_right(new_node);\n\n } else {\n\n node.set_value(value);\n\n }\n\n node.update_size();\n\n return Some(node.clone());\n\n }\n\n // x = None\n\n Some(Node::new(key, value))\n\n }\n\n\n\n /// Removes the smallest key and its associated value from the tree.\n\n pub fn delete_min(&mut self) {\n\n self.root = BST::remove_min(&self.root);\n\n }\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 38, "score": 20496.728985501966 }, { "content": " if let Some(node) = x {\n\n if node.get().right.is_none() {\n\n return node.get().left.as_ref().map(|n| n.clone());\n\n }\n\n let new_node = BST::remove_max(&node.get().right);\n\n node.set_right(new_node);\n\n node.update_size();\n\n return Some(node.clone());\n\n }\n\n None\n\n }\n\n\n\n /// Removes the given key and its associated value from the tree.\n\n pub fn delete(&mut self, key: usize) {\n\n self.root = BST::remove(&self.root, key);\n\n }\n\n\n\n fn remove(x: &Option<Node<V>>, key: usize) -> Option<Node<V>> {\n\n if let Some(node) = x {\n\n if key < node.get().key {\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 39, "score": 20496.70314085707 }, { "content": " pub fn get(&self) -> Ref<RawNode<V>> {\n\n self.0.borrow()\n\n }\n\n\n\n pub fn set_value(&self, value: V) {\n\n self.0.borrow_mut().value = value;\n\n }\n\n\n\n pub fn set_left(&self, node: Option<Node<V>>) {\n\n self.0.borrow_mut().left = node;\n\n }\n\n\n\n pub fn set_right(&self, node: Option<Node<V>>) {\n\n self.0.borrow_mut().right = node;\n\n }\n\n\n\n pub fn update_size(&self) {\n\n let size = 1 + Node::size(&self.get().left) + Node::size(&self.get().right);\n\n self.0.borrow_mut().size = size;\n\n }\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 40, "score": 20496.123298878098 }, { "content": " None\n\n }\n\n\n\n fn maximum(x: &Option<Node<V>>) -> Option<Node<V>> {\n\n if let Some(node) = x {\n\n if node.get().right.is_none() {\n\n return Some(node.clone());\n\n } else {\n\n return BST::maximum(&node.get().right);\n\n }\n\n }\n\n\n\n None\n\n }\n\n\n\n pub fn iter(&self) -> TreeIter<V> {\n\n let mut iter = TreeIter { unvisited: Vec::new() };\n\n iter.push_left_edge(&self.root);\n\n iter\n\n }\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 41, "score": 20495.884700164017 }, { "content": " tree.put(\"C\".to_string(), 4);\n\n tree.put(\"H\".to_string(), 5);\n\n }\n\n\n\n #[test]\n\n fn create_node() {\n\n let n = Node::new(1, \"a\".to_string(), Color::Red);\n\n assert_eq!(n.left.is_none(), true);\n\n assert_eq!(n.right.is_none(), true);\n\n }\n\n\n\n #[test]\n\n fn get_value() {\n\n let mut tree = RedBlackTree::new();\n\n\n\n // empty tree case\n\n let s = \"S\".to_string();\n\n assert_eq!(tree.contains(&s), false);\n\n assert_eq!(tree.get(&s), None);\n\n\n", "file_path": "red_black_tree/src/lib.rs", "rank": 42, "score": 20494.463101824997 }, { "content": " let root_id = tree.root.unwrap();\n\n let root = &tree.nodes[root_id];\n\n assert_eq!(root.key, \"E\".to_string());\n\n assert_eq!(root.color, Color::Black);\n\n\n\n let left_id = root.left.unwrap();\n\n let left = &tree.nodes[left_id];\n\n assert_eq!(left.key, \"A\".to_string());\n\n assert_eq!(left.color, Color::Black);\n\n\n\n let right_id = root.right.unwrap();\n\n let right = &tree.nodes[right_id];\n\n assert_eq!(right.key, \"S\".to_string());\n\n assert_eq!(right.color, Color::Black);\n\n }\n\n\n\n #[test]\n\n fn delete_min() {\n\n // Case: Empty RBT\n\n let mut tree0: RedBlackTree<String, usize> = RedBlackTree::new();\n", "file_path": "red_black_tree/src/lib.rs", "rank": 43, "score": 20494.312586307104 }, { "content": "\n\n fn remove_min(x: &Option<Node<V>>) -> Option<Node<V>> {\n\n if let Some(node) = x {\n\n if node.get().left.is_none() {\n\n return node.get().right.as_ref().map(|n| n.clone());\n\n }\n\n let new_node = BST::remove_min(&node.get().left);\n\n node.set_left(new_node);\n\n node.update_size();\n\n return Some(node.clone());\n\n }\n\n None\n\n }\n\n\n\n /// Removes the largest key and its associated value from the tree.\n\n pub fn delete_max(&mut self) {\n\n self.root = BST::remove_max(&self.root);\n\n }\n\n\n\n fn remove_max(x: &Option<Node<V>>) -> Option<Node<V>> {\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 44, "score": 20493.949489950428 }, { "content": " let node_id = self.new_node(key, value, Color::Red);\n\n Some(node_id)\n\n }\n\n\n\n pub fn delete_min(&mut self) {\n\n if let Some(root_id) = self.root {\n\n let left_is_red = self.is_red(self.nodes[root_id].left);\n\n let right_is_red = self.is_red(self.nodes[root_id].right);\n\n if !left_is_red && !right_is_red {\n\n self.nodes[root_id].color = Color::Red;\n\n }\n\n\n\n self.root = self.rdelete_min(root_id);\n\n if let Some(root_id) = self.root {\n\n self.nodes[root_id].color = Color::Black;\n\n }\n\n }\n\n }\n\n\n\n fn rdelete_min(&mut self, mut node_id: NodeId) -> Option<NodeId> {\n", "file_path": "red_black_tree/src/lib.rs", "rank": 45, "score": 20493.400118372247 }, { "content": " pub fn get(&self, key: &K) -> Option<&V> {\n\n let mut x = &self.root;\n\n while let Some(node_id) = x {\n\n let node = &self.nodes[*node_id];\n\n if *key < node.key {\n\n x = &node.left;\n\n } else if *key > node.key {\n\n x = &node.right;\n\n } else {\n\n return Some(&node.value);\n\n }\n\n }\n\n None\n\n }\n\n\n\n pub fn contains(&self, key: &K) -> bool {\n\n self.get(key).is_some()\n\n }\n\n\n\n pub fn put(&mut self, key: K, value: V) {\n", "file_path": "red_black_tree/src/lib.rs", "rank": 46, "score": 20493.278751595168 }, { "content": " let left = temp.get().left.as_ref().map(|n| n.clone());\n\n node.set_left(left);\n\n\n\n node.update_size();\n\n return Some(node);\n\n }\n\n node.update_size();\n\n return Some(node.clone());\n\n }\n\n\n\n None\n\n }\n\n\n\n /// Returns the smallest key in the tree.\n\n pub fn min(&self) -> Option<usize> {\n\n if let Some(node) = BST::minimum(&self.root) {\n\n return Some(node.get().key)\n\n }\n\n\n\n None\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 47, "score": 20492.35085925967 }, { "content": " let mut bst = BST::new();\n\n populate_tree(&mut bst);\n\n\n\n assert_eq!(bst.keys().len(), 9);\n\n assert_eq!(bst.keys(), vec![1, 2, 3, 4, 5, 6, 7, 8, 9]);\n\n }\n\n\n\n #[test]\n\n fn levelorder_traversal() {\n\n let mut bst = BST::new();\n\n populate_tree(&mut bst);\n\n\n\n assert_eq!(bst.level_order().len(), 9);\n\n assert_eq!(bst.level_order(), vec![8, 3, 9, 1, 7, 2, 5, 4, 6]);\n\n }\n\n\n\n #[test]\n\n fn tree_size() {\n\n let mut bst = BST::new();\n\n\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 48, "score": 20492.3087986988 }, { "content": " }\n\n\n\n fn minimum(x: &Option<Node<V>>) -> Option<Node<V>> {\n\n if let Some(node) = x {\n\n if node.get().left.is_none() {\n\n return Some(node.clone());\n\n } else {\n\n return BST::minimum(&node.get().left);\n\n }\n\n }\n\n\n\n None\n\n }\n\n\n\n /// Returns the largest key in the tree.\n\n pub fn max(&self) -> Option<usize> {\n\n if let Some(node) = BST::maximum(&self.root) {\n\n return Some(node.get().key)\n\n }\n\n\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 49, "score": 20492.08342422068 }, { "content": " let node = &self.tree.nodes[node_id];\n\n Some((&node.key, &node.value))\n\n }\n\n}\n\n\n\npub struct Keys<'a, K: 'a, V: 'a> {\n\n inner: TreeIter<'a, K, V>,\n\n}\n\n\n\nimpl<'a, K, V> Iterator for Keys<'a, K, V> {\n\n type Item = &'a K;\n\n\n\n fn next(&mut self) -> Option<&'a K> {\n\n self.inner.next().map(|(k, _)| k)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RedBlackTree<K, V> {\n\n root: Option<NodeId>,\n", "file_path": "red_black_tree/src/lib.rs", "rank": 50, "score": 20492.028533120312 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq)]\n\npub enum Color {\n\n Red,\n\n Black,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Node<K, V> {\n\n key: K,\n\n value: V,\n\n left: Option<NodeId>,\n\n right: Option<NodeId>,\n\n color: Color,\n\n size: usize,\n\n}\n\n\n", "file_path": "red_black_tree/src/lib.rs", "rank": 51, "score": 20492.018018512055 }, { "content": " self.root = self.rput(self.root, key, value);\n\n\n\n if let Some(root_id) = self.root {\n\n self.nodes[root_id].color = Color::Black;\n\n }\n\n }\n\n\n\n fn rput(&mut self, node: Option<NodeId>, key: K, value: V) -> Option<NodeId> {\n\n if let Some(mut node_id) = node {\n\n if key < self.nodes[node_id].key {\n\n self.nodes[node_id].left = self.rput(self.nodes[node_id].left, key, value);\n\n } else if key > self.nodes[node_id].key {\n\n self.nodes[node_id].right = self.rput(self.nodes[node_id].right, key, value);\n\n } else {\n\n self.nodes[node_id].value = value;\n\n }\n\n\n\n // Fix any right-leaning links\n\n let right_is_red = self.is_red(self.nodes[node_id].right);\n\n let left_is_red = self.is_red(self.nodes[node_id].left);\n", "file_path": "red_black_tree/src/lib.rs", "rank": 52, "score": 20491.81577574758 }, { "content": " self.nodes[new].size = self.nodes[old].size;\n\n self.update_size_for(old);\n\n\n\n new\n\n }\n\n\n\n fn flip_colors(&mut self, node_id: NodeId) {\n\n let left = self.nodes[node_id].left.unwrap();\n\n let right = self.nodes[node_id].right.unwrap();\n\n match self.nodes[node_id].color {\n\n Color::Red => {\n\n self.nodes[node_id].color = Color::Black;\n\n self.nodes[left].color = Color::Red;\n\n self.nodes[right].color = Color::Red;\n\n },\n\n Color::Black => {\n\n self.nodes[node_id].color = Color::Red;\n\n self.nodes[left].color = Color::Black;\n\n self.nodes[right].color = Color::Black;\n\n },\n", "file_path": "red_black_tree/src/lib.rs", "rank": 53, "score": 20491.812056588344 }, { "content": " let lr = &tree7.nodes[lr_id];\n\n assert_eq!(lr.key, \"P\".to_string());\n\n assert_eq!(lr.color, Color::Black);\n\n let lll_id = ll.left.unwrap();\n\n let lll = &tree7.nodes[lll_id];\n\n assert_eq!(lll.key, \"H\".to_string());\n\n assert_eq!(lll.color, Color::Red);\n\n }\n\n\n\n #[test]\n\n fn delete() {\n\n let s = \"S\".to_string();\n\n let e = \"E\".to_string();\n\n\n\n // Case: Empty RBT\n\n let mut tree0: RedBlackTree<String, usize> = RedBlackTree::new();\n\n tree0.delete(&s);\n\n\n\n // Case: Delete from 2-node and 1-node tree\n\n let mut tree2 = RedBlackTree::new();\n", "file_path": "red_black_tree/src/lib.rs", "rank": 54, "score": 20491.65780787997 }, { "content": " let new = self.nodes[old].right.unwrap();\n\n\n\n self.nodes[old].right = self.nodes[new].left;\n\n self.nodes[new].left = Some(old);\n\n self.nodes[new].color = self.nodes[old].color;\n\n self.nodes[old].color = Color::Red;\n\n self.nodes[new].size = self.nodes[old].size;\n\n self.update_size_for(old);\n\n\n\n new\n\n }\n\n\n\n fn rotate_right(&mut self, parent: NodeId) -> NodeId {\n\n let old = parent;\n\n let new = self.nodes[old].left.unwrap();\n\n\n\n self.nodes[old].left = self.nodes[new].right;\n\n self.nodes[new].right = Some(old);\n\n self.nodes[new].color = self.nodes[old].color;\n\n self.nodes[old].color = Color::Red;\n", "file_path": "red_black_tree/src/lib.rs", "rank": 55, "score": 20491.655575417582 }, { "content": " // New node becomes root\n\n bst.put(2, \"b\".to_string());\n\n check_key(&bst.root, 2);\n\n\n\n // New node becomes left node\n\n bst.put(1, \"a\".to_string());\n\n if let Some(node) = &bst.root {\n\n check_key(&node.get().left, 1);\n\n } else {\n\n panic!(\"BST must have root\");\n\n }\n\n\n\n // New node becomes right node\n\n bst.put(3, \"c\".to_string());\n\n if let Some(node) = &bst.root {\n\n check_key(&node.get().right, 3);\n\n } else {\n\n panic!(\"BST must have root\");\n\n }\n\n }\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 56, "score": 20491.145802708823 }, { "content": " }\n\n if *key == self.nodes[node_id].key {\n\n let right_id = self.nodes[node_id].right.unwrap();\n\n let x_id = self.rmin(right_id);\n\n self.nodes[node_id].key = self.nodes[x_id].key.clone();\n\n self.nodes[node_id].value = self.nodes[x_id].value.clone();\n\n self.nodes[node_id].right = self.rdelete_min(right_id);\n\n } else {\n\n let right_id = self.nodes[node_id].right.unwrap();\n\n self.nodes[node_id].right = self.rdelete(right_id, key);\n\n }\n\n }\n\n Some(self.balance(node_id))\n\n }\n\n\n\n\n\n // Red-black tree helper methods\n\n\n\n fn rotate_left(&mut self, parent: NodeId) -> NodeId {\n\n let old = parent;\n", "file_path": "red_black_tree/src/lib.rs", "rank": 57, "score": 20490.905100360083 }, { "content": "\n\n bst.delete(3);\n\n assert_eq!(bst.size(), 8);\n\n assert_eq!(bst.keys(), vec![1, 2, 4, 5, 6, 7, 8, 9]);\n\n if let Some(node) = &bst.root {\n\n check_key(&node.get().left, 4);\n\n }\n\n\n\n bst.delete(7);\n\n assert_eq!(bst.size(), 7);\n\n assert_eq!(bst.keys(), vec![1, 2, 4, 5, 6, 8, 9]);\n\n }\n\n\n\n #[test]\n\n fn iterator() {\n\n let mut bst = BST::new();\n\n populate_tree(&mut bst);\n\n let mut tree_iter = bst.iter();\n\n\n\n assert_eq!(tree_iter.next(), Some((1, \"A\".to_string())));\n\n assert_eq!(tree_iter.next(), Some((2, \"C\".to_string())));\n\n assert_eq!(tree_iter.next(), Some((3, \"E\".to_string())));\n\n }\n\n}\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 58, "score": 20490.7679677007 }, { "content": " };\n\n }\n\n\n\n fn is_red(&self, node: Option<NodeId>) -> bool {\n\n match node {\n\n Some(node_id) => self.nodes[node_id].color == Color::Red,\n\n None => false,\n\n }\n\n }\n\n\n\n fn move_red_left(&mut self, mut node_id: NodeId) -> NodeId {\n\n self.flip_colors(node_id);\n\n\n\n if let Some(right_id) = self.nodes[node_id].right {\n\n if self.is_red(self.nodes[right_id].left) {\n\n self.nodes[node_id].right = Some(self.rotate_right(right_id));\n\n node_id = self.rotate_left(node_id);\n\n self.flip_colors(node_id);\n\n }\n\n }\n", "file_path": "red_black_tree/src/lib.rs", "rank": 59, "score": 20490.7548774695 }, { "content": " }\n\n while !queue.is_empty() {\n\n let node = queue.pop_front().unwrap();\n\n keys.push(node.get().key);\n\n if let Some(n) = &node.get().left {\n\n queue.push_back(n.clone());\n\n };\n\n if let Some(n) = &node.get().right {\n\n queue.push_back(n.clone());\n\n };\n\n }\n\n keys\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn check_key(cell: &Option<Node<String>>, key: usize) {\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 60, "score": 20490.35108624009 }, { "content": " let new_node = BST::remove(&node.get().left, key);\n\n node.set_left(new_node);\n\n } else if key > node.get().key {\n\n let new_node = BST::remove(&node.get().right, key);\n\n node.set_right(new_node);\n\n } else {\n\n if node.get().right.is_none() {\n\n return node.get().left.as_ref().map(|n| n.clone());\n\n }\n\n if node.get().left.is_none() {\n\n return node.get().right.as_ref().map(|n| n.clone());\n\n }\n\n\n\n let temp = node.clone();\n\n // minimum of the right replaces node to be deleted\n\n let node = BST::minimum(&temp.get().right).unwrap();\n\n\n\n // new node takes left and right of the deleted\n\n let right = BST::remove_min(&temp.get().right);\n\n node.set_right(right);\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 61, "score": 20490.090871008262 }, { "content": "\n\n node_id\n\n }\n\n\n\n fn move_red_right(&mut self, mut node_id: NodeId) -> NodeId {\n\n self.flip_colors(node_id);\n\n\n\n if let Some(left_id) = self.nodes[node_id].left {\n\n if self.is_red(self.nodes[left_id].left) {\n\n node_id = self.rotate_right(node_id);\n\n self.flip_colors(node_id);\n\n }\n\n }\n\n\n\n node_id\n\n }\n\n\n\n fn balance(&mut self, mut node_id: NodeId) -> NodeId {\n\n if self.is_red(self.nodes[node_id].right) {\n\n node_id = self.rotate_left(node_id);\n", "file_path": "red_black_tree/src/lib.rs", "rank": 62, "score": 20489.613865419567 }, { "content": " assert_eq!(bst.is_empty(), true);\n\n assert_eq!(bst.size(), 0);\n\n\n\n populate_tree(&mut bst);\n\n\n\n assert_eq!(bst.is_empty(), false);\n\n assert_eq!(bst.size(), 9);\n\n }\n\n\n\n #[test]\n\n fn delete_min() {\n\n let mut bst = BST::new();\n\n\n\n // delete min an empty BST\n\n bst.delete_min();\n\n\n\n populate_tree(&mut bst);\n\n\n\n bst.delete_min();\n\n assert_eq!(bst.keys(), vec![2, 3, 4, 5, 6, 7, 8, 9]);\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 63, "score": 20489.549286170863 }, { "content": " fn tree_size() {\n\n let mut tree = RedBlackTree::new();\n\n\n\n assert_eq!(tree.is_empty(), true);\n\n assert_eq!(tree.size(), 0);\n\n\n\n populate_tree(&mut tree);\n\n\n\n assert_eq!(tree.is_empty(), false);\n\n assert_eq!(tree.size(), 6);\n\n }\n\n\n\n #[test]\n\n fn min() {\n\n let mut tree = RedBlackTree::new();\n\n\n\n assert_eq!(tree.min(), None);\n\n\n\n populate_tree(&mut tree);\n\n\n", "file_path": "red_black_tree/src/lib.rs", "rank": 64, "score": 20489.47124002077 }, { "content": "\n\n #[test]\n\n fn update() {\n\n let mut bst = BST::new();\n\n populate_tree(&mut bst);\n\n\n\n assert_eq!(bst.get(5), Some(\"H\".to_string()));\n\n bst.put(5, \"I\".to_string());\n\n assert_eq!(bst.get(5), Some(\"I\".to_string()));\n\n assert_eq!(bst.size(), 9);\n\n }\n\n\n\n #[test]\n\n fn get_value() {\n\n let mut bst = BST::new();\n\n\n\n // empty tree case\n\n assert_eq!(bst.contains(8), false);\n\n assert_eq!(bst.get(8), None);\n\n\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 65, "score": 20489.201422266055 }, { "content": "\n\n pub fn keys(&self) -> Keys<K, V> {\n\n Keys { inner: self.iter() }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n // E\n\n // / \\\n\n // C R\n\n // / / \\\n\n // A H S\n\n fn populate_tree(tree: &mut RedBlackTree<String, usize>) {\n\n tree.put(\"S\".to_string(), 0);\n\n tree.put(\"E\".to_string(), 12);\n\n tree.put(\"A\".to_string(), 8);\n\n tree.put(\"R\".to_string(), 3);\n", "file_path": "red_black_tree/src/lib.rs", "rank": 66, "score": 20489.0625725454 }, { "content": " bst.put(7, \"R\".to_string());\n\n bst.put(2, \"C\".to_string());\n\n bst.put(5, \"H\".to_string());\n\n bst.put(9, \"X\".to_string());\n\n bst.put(6, \"M\".to_string());\n\n bst.put(4, \"G\".to_string());\n\n }\n\n\n\n #[test]\n\n fn create_node() {\n\n let n = Node::new(1, \"a\".to_string());\n\n assert_eq!(n.get().left.is_none(), true);\n\n assert_eq!(n.get().right.is_none(), true);\n\n }\n\n\n\n #[test]\n\n fn build_tree() {\n\n let mut bst = BST::new();\n\n assert_eq!(bst.root.is_none(), true);\n\n\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 67, "score": 20488.614970186576 }, { "content": " bst.delete_min();\n\n assert_eq!(bst.keys(), vec![3, 4, 5, 6, 7, 8, 9]);\n\n bst.delete_min();\n\n assert_eq!(bst.keys(), vec![4, 5, 6, 7, 8, 9]);\n\n }\n\n\n\n #[test]\n\n fn delete_max() {\n\n let mut bst = BST::new();\n\n\n\n // delete max an empty BST\n\n bst.delete_max();\n\n\n\n populate_tree(&mut bst);\n\n\n\n bst.delete_max();\n\n assert_eq!(bst.keys(), vec![1, 2, 3, 4, 5, 6, 7, 8]);\n\n bst.delete_max();\n\n assert_eq!(bst.keys(), vec![1, 2, 3, 4, 5, 6, 7]);\n\n bst.delete_max();\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 68, "score": 20488.488758889136 }, { "content": " return;\n\n }\n\n\n\n if let Some(root_id) = self.root {\n\n let left_is_red = self.is_red(self.nodes[root_id].left);\n\n let right_is_red = self.is_red(self.nodes[root_id].right);\n\n if !left_is_red && !right_is_red {\n\n self.nodes[root_id].color = Color::Red;\n\n }\n\n\n\n self.root = self.rdelete(root_id, key);\n\n if let Some(root_id) = self.root {\n\n self.nodes[root_id].color = Color::Black;\n\n }\n\n }\n\n }\n\n\n\n fn rdelete(&mut self, mut node_id: NodeId, key: &K) -> Option<NodeId> {\n\n if *key < self.nodes[node_id].key {\n\n if !self.is_red(self.nodes[node_id].left) {\n", "file_path": "red_black_tree/src/lib.rs", "rank": 69, "score": 20488.447014283593 }, { "content": " assert_eq!(tree_iter.next(), Some((&\"A\".to_string(), &8)));\n\n assert_eq!(tree_iter.next(), Some((&\"C\".to_string(), &4)));\n\n assert_eq!(tree_iter.next(), Some((&\"E\".to_string(), &12)));\n\n assert_eq!(tree_iter.next(), Some((&\"H\".to_string(), &5)));\n\n assert_eq!(tree_iter.next(), Some((&\"R\".to_string(), &3)));\n\n assert_eq!(tree_iter.next(), Some((&\"S\".to_string(), &0)));\n\n assert_eq!(tree_iter.next(), None);\n\n }\n\n\n\n #[test]\n\n fn keys() {\n\n let mut tree = RedBlackTree::new();\n\n populate_tree(&mut tree);\n\n let keys: Vec<&String> = tree.keys().collect();\n\n\n\n assert_eq!(keys, vec![&\"A\".to_string(), &\"C\".to_string(),\n\n &\"E\".to_string(), &\"H\".to_string(), &\"R\".to_string(),\n\n &\"S\".to_string()]);\n\n }\n\n}\n", "file_path": "red_black_tree/src/lib.rs", "rank": 70, "score": 20488.370788384924 }, { "content": " // \\ -> /\n\n // S E\n\n tree.put(\"S\".to_string(), 0);\n\n\n\n // check left-rotate of right-leaning link\n\n let root_id = tree.root.unwrap();\n\n let root = &tree.nodes[root_id];\n\n let left_id = root.left.unwrap();\n\n let left = &tree.nodes[left_id];\n\n assert_eq!(left.key, \"E\".to_string());\n\n assert_eq!(left.color, Color::Red);\n\n\n\n // S\n\n // / E\n\n // E -> / \\\n\n // / A S\n\n // A\n\n tree.put(\"A\".to_string(), 8);\n\n\n\n // check right-rotate then color-flip\n", "file_path": "red_black_tree/src/lib.rs", "rank": 71, "score": 20488.264506850664 }, { "content": " #[test]\n\n fn max() {\n\n let mut bst = BST::new();\n\n\n\n assert_eq!(bst.max(), None);\n\n\n\n populate_tree(&mut bst);\n\n\n\n assert_eq!(bst.max(), Some(9));\n\n }\n\n\n\n #[test]\n\n fn delete() {\n\n let mut bst = BST::new();\n\n\n\n // delete any key of an empty BST\n\n bst.delete(8);\n\n\n\n populate_tree(&mut bst);\n\n assert_eq!(bst.size(), 9);\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 72, "score": 20488.030298663165 }, { "content": "\n\n pub fn min(&self) -> Option<&K> {\n\n if let Some(root_id) = self.root {\n\n let min_id = self.rmin(root_id);\n\n return Some(&self.nodes[min_id].key);\n\n }\n\n None\n\n }\n\n\n\n fn rmin(&self, node_id: NodeId) -> NodeId {\n\n match self.nodes[node_id].left {\n\n Some(left_id) => self.rmin(left_id),\n\n None => node_id,\n\n }\n\n }\n\n\n\n pub fn max(&self) -> Option<&K> {\n\n if let Some(root_id) = self.root {\n\n let max_id = self.rmax(root_id);\n\n return Some(&self.nodes[max_id].key);\n", "file_path": "red_black_tree/src/lib.rs", "rank": 73, "score": 20487.77615753508 }, { "content": "use merkle_tree::MerkleTree;\n\nuse sha2::{Sha256, Digest};\n\n\n\n#[test]\n", "file_path": "merkle_tree/tests/integration_test.rs", "rank": 74, "score": 20487.61349605328 }, { "content": " if self.nodes[node_id].left.is_none() {\n\n return None;\n\n }\n\n\n\n if !self.is_red(self.nodes[node_id].left) {\n\n let left_id = self.nodes[node_id].left.unwrap();\n\n if !self.is_red(self.nodes[left_id].left) {\n\n node_id = self.move_red_left(node_id);\n\n }\n\n }\n\n\n\n // Note: left can't be None, even with move_red_left operation\n\n let left_id = self.nodes[node_id].left.unwrap();\n\n self.nodes[node_id].left = self.rdelete_min(left_id);\n\n\n\n Some(self.balance(node_id))\n\n }\n\n\n\n pub fn delete(&mut self, key: &K) {\n\n if !self.contains(key) {\n", "file_path": "red_black_tree/src/lib.rs", "rank": 75, "score": 20487.532135818303 }, { "content": " let left_id = self.nodes[node_id].left.unwrap();\n\n if !self.is_red(self.nodes[left_id].left) {\n\n node_id = self.move_red_left(node_id);\n\n }\n\n }\n\n let left_id = self.nodes[node_id].left.unwrap();\n\n self.nodes[node_id].left = self.rdelete(left_id, key);\n\n } else {\n\n if self.is_red(self.nodes[node_id].left) {\n\n node_id = self.rotate_right(node_id);\n\n }\n\n if *key == self.nodes[node_id].key && self.nodes[node_id].right.is_none() {\n\n return None;\n\n }\n\n \n\n if !self.is_red(self.nodes[node_id].right) {\n\n let right_id = self.nodes[node_id].right.unwrap();\n\n if !self.is_red(self.nodes[right_id].left) {\n\n node_id = self.move_red_right(node_id);\n\n }\n", "file_path": "red_black_tree/src/lib.rs", "rank": 76, "score": 20487.404432774358 }, { "content": " if right_is_red && !left_is_red {\n\n node_id = self.rotate_left(node_id);\n\n }\n\n if self.is_red(self.nodes[node_id].left) {\n\n let left_id = self.nodes[node_id].left.unwrap();\n\n if self.is_red(self.nodes[left_id].left) {\n\n node_id = self.rotate_right(node_id);\n\n }\n\n }\n\n let left_is_red = self.is_red(self.nodes[node_id].left);\n\n let right_is_red = self.is_red(self.nodes[node_id].right);\n\n if left_is_red && right_is_red {\n\n self.flip_colors(node_id);\n\n }\n\n\n\n self.update_size_for(node_id);\n\n\n\n return Some(node_id);\n\n }\n\n\n", "file_path": "red_black_tree/src/lib.rs", "rank": 77, "score": 20487.337155783138 }, { "content": " assert_eq!(tree.min(), Some(&\"A\".to_string()));\n\n }\n\n\n\n #[test]\n\n fn max() {\n\n let mut tree = RedBlackTree::new();\n\n\n\n assert_eq!(tree.max(), None);\n\n\n\n populate_tree(&mut tree);\n\n\n\n assert_eq!(tree.max(), Some(&\"S\".to_string()));\n\n }\n\n\n\n #[test]\n\n fn iterator() {\n\n let mut tree = RedBlackTree::new();\n\n populate_tree(&mut tree);\n\n let mut tree_iter = tree.iter();\n\n\n", "file_path": "red_black_tree/src/lib.rs", "rank": 78, "score": 20487.182584313316 }, { "content": " }\n\n\n\n if self.is_red(self.nodes[node_id].left) {\n\n let left_id = self.nodes[node_id].left.unwrap();\n\n if self.is_red(self.nodes[left_id].left) {\n\n node_id = self.rotate_right(node_id);\n\n }\n\n }\n\n\n\n let left_is_red = self.is_red(self.nodes[node_id].left);\n\n let right_is_red = self.is_red(self.nodes[node_id].right);\n\n if left_is_red && right_is_red {\n\n self.flip_colors(node_id);\n\n }\n\n\n\n self.update_size_for(node_id);\n\n node_id\n\n }\n\n\n\n // Ordered symbol table methods\n", "file_path": "red_black_tree/src/lib.rs", "rank": 79, "score": 20487.140497129632 }, { "content": " populate_tree(&mut tree);\n\n\n\n assert_eq!(tree.contains(&s), true);\n\n assert_eq!(tree.get(&s), Some(&0));\n\n assert_eq!(tree.get(&\"H\".to_string()), Some(&5));\n\n assert_eq!(tree.contains(&\"Z\".to_string()), false);\n\n }\n\n\n\n #[test]\n\n fn put() {\n\n let mut tree = RedBlackTree::new();\n\n\n\n tree.put(\"E\".to_string(), 12);\n\n\n\n // check root is black\n\n let root_id = tree.root.unwrap();\n\n let root = &tree.nodes[root_id];\n\n assert_eq!(root.color, Color::Black);\n\n\n\n // E S\n", "file_path": "red_black_tree/src/lib.rs", "rank": 80, "score": 20486.47467994373 }, { "content": " if let Some(node) = cell {\n\n assert_eq!(node.get().key, key);\n\n } else {\n\n panic!(\"Node can't be None\");\n\n }\n\n }\n\n\n\n // 8(S)\n\n // / \\\n\n // 3(E) 9(X)\n\n // / \\\n\n // 1(A) 7(R)\n\n // \\ /\n\n // 2(C) 5(H)\n\n // / \\\n\n // 4(G) 6(M)\n\n fn populate_tree(bst: &mut BST<String>) {\n\n bst.put(8, \"S\".to_string());\n\n bst.put(3, \"E\".to_string());\n\n bst.put(1, \"A\".to_string());\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 81, "score": 20486.371020146948 }, { "content": " assert_eq!(bst.keys(), vec![1, 2, 3, 4, 5, 6]);\n\n }\n\n\n\n #[test]\n\n fn min() {\n\n let mut bst = BST::new();\n\n\n\n assert_eq!(bst.min(), None);\n\n\n\n populate_tree(&mut bst);\n\n\n\n assert_eq!(bst.min(), Some(1));\n\n bst.delete_min();\n\n assert_eq!(bst.min(), Some(2));\n\n bst.delete_min();\n\n assert_eq!(bst.min(), Some(3));\n\n bst.delete_min();\n\n assert_eq!(bst.min(), Some(4));\n\n }\n\n\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 82, "score": 20485.79884074194 }, { "content": " tree0.delete_min();\n\n\n\n // Case: Delete from 2-node and 1-node tree\n\n let mut tree2 = RedBlackTree::new();\n\n\n\n // S\n\n // /\n\n // E\n\n //\n\n let e = \"E\".to_string();\n\n let s = \"S\".to_string();\n\n tree2.put(s.clone(), 1);\n\n tree2.put(e.clone(), 2);\n\n tree2.delete_min();\n\n assert_eq!(tree2.get(&e), None);\n\n assert_eq!(tree2.min(), Some(&s));\n\n let root_id = tree2.root.unwrap();\n\n let root = &tree2.nodes[root_id];\n\n assert_eq!(root.color, Color::Black);\n\n tree2.delete_min();\n", "file_path": "red_black_tree/src/lib.rs", "rank": 83, "score": 20485.557065984623 }, { "content": "\n\n // S\n\n // /\n\n // E\n\n //\n\n tree2.put(\"S\".to_string(), 1);\n\n tree2.put(\"E\".to_string(), 2);\n\n tree2.delete(&e);\n\n assert_eq!(tree2.get(&e), None);\n\n let root_id = tree2.root.unwrap();\n\n let root = &tree2.nodes[root_id];\n\n assert_eq!(root.color, Color::Black);\n\n tree2.delete(&s);\n\n assert_eq!(tree2.root, None);\n\n\n\n // Case: 4-node tree\n\n let mut tree4 = RedBlackTree::new();\n\n\n\n // E R\n\n // / \\ / \\\n", "file_path": "red_black_tree/src/lib.rs", "rank": 84, "score": 20485.554693309892 }, { "content": " assert_eq!(tree2.root, None);\n\n\n\n // Case: balanced 7-node tree with black links\n\n let mut tree7 = RedBlackTree::new();\n\n\n\n // L\n\n // / \\\n\n // H T\n\n // / \\ / \\\n\n // D J P X\n\n //\n\n tree7.put(\"L\".to_string(), 1);\n\n tree7.put(\"H\".to_string(), 2);\n\n tree7.put(\"T\".to_string(), 3);\n\n tree7.put(\"P\".to_string(), 4);\n\n tree7.put(\"X\".to_string(), 5);\n\n tree7.put(\"D\".to_string(), 6);\n\n tree7.put(\"J\".to_string(), 7);\n\n tree7.delete_min();\n\n\n", "file_path": "red_black_tree/src/lib.rs", "rank": 85, "score": 20484.684600407378 }, { "content": " // T\n\n // // \\\n\n // L X\n\n // / \\\n\n // J P Legend: / - black link\n\n // // // - red link\n\n // H\n\n //\n\n let root_id = tree7.root.unwrap();\n\n let root = &tree7.nodes[root_id];\n\n assert_eq!(root.key, \"T\".to_string());\n\n let left_id = root.left.unwrap();\n\n let left = &tree7.nodes[left_id];\n\n assert_eq!(left.key, \"L\".to_string());\n\n assert_eq!(left.color, Color::Red);\n\n let ll_id = left.left.unwrap();\n\n let ll = &tree7.nodes[ll_id];\n\n assert_eq!(ll.key, \"J\".to_string());\n\n assert_eq!(ll.color, Color::Black);\n\n let lr_id = left.right.unwrap();\n", "file_path": "red_black_tree/src/lib.rs", "rank": 86, "score": 20484.35206711232 }, { "content": " // A S -> A S\n\n // //\n\n // R\n\n //\n\n tree4.put(\"S\".to_string(), 1);\n\n tree4.put(\"E\".to_string(), 2);\n\n tree4.put(\"A\".to_string(), 3);\n\n tree4.put(\"R\".to_string(), 4);\n\n tree4.delete(&e);\n\n assert_eq!(tree4.get(&e), None);\n\n let root_id = tree4.root.unwrap();\n\n let root = &tree4.nodes[root_id];\n\n assert_eq!(root.key, \"R\".to_string());\n\n let right_id = root.right.unwrap();\n\n let right = &tree4.nodes[right_id];\n\n assert_eq!(right.key, \"S\".to_string());\n\n assert_eq!(right.color, Color::Black);\n\n }\n\n\n\n #[test]\n", "file_path": "red_black_tree/src/lib.rs", "rank": 87, "score": 20482.661186975947 }, { "content": " populate_tree(&mut bst);\n\n\n\n assert_eq!(bst.contains(8), true);\n\n assert_eq!(bst.get(8), Some(\"S\".to_string()));\n\n assert_eq!(bst.get(2), Some(\"C\".to_string()));\n\n assert_eq!(bst.get(9), Some(\"X\".to_string()));\n\n assert_eq!(bst.get(5), Some(\"H\".to_string()));\n\n\n\n // key not in tree\n\n assert_eq!(bst.contains(10), false);\n\n assert_eq!(bst.get(10), None);\n\n\n\n // after delete\n\n bst.delete(8);\n\n assert_eq!(bst.contains(8), false);\n\n assert_eq!(bst.get(8), None);\n\n }\n\n\n\n #[test]\n\n fn inorder_traversal() {\n", "file_path": "binary_search_tree/src/lib.rs", "rank": 88, "score": 20481.305929287482 }, { "content": "# Pairing Heap\n\n\n\nBased on the Pairing Heap discussed in section 5.5 of the book _Purely Functional Data Structures_ with the following Standard ML implementation:\n\n\n\n```sml\n\nfunctor PairingHeap (Element : ORDERED) : HEAP =\n\nstruct\n\n structure Elem = Element\n\n\n\n datatype Heap = E | T of Elem.T * Heap list\n\n\n\n val empty = E\n\n fun isEmpty E = true | isEmpty _ = false\n\n\n\n fun merge (h, E) = h\n\n | merge (E, h) = h\n\n | merge (h1 as T (x, hs1), h2 as T (y, hs2)) =\n\n if Elem.leq (x, y) then T (x, h2 :: hs1) else T (y, h1 :: hs2)\n\n fun insert (x, h) = merge (T (x, []), h)\n\n\n\n fun mergePairs [] = E\n\n | mergePairs [h] = h\n\n | mergePairs (h1 :: h2 :: hs) = merge (merge (h1, h2), mergePairs hs)\n\n\n\n fun findMin E = raise EMPTY\n\n | findMin (T (x, hs)) = x\n\n fun deleteMin E = raise EMPTY\n\n | deleteMin (T (x, hs)) = mergePairs hs\n\nend\n\n```\n", "file_path": "pairing_heap/README.md", "rank": 89, "score": 17750.62195896776 }, { "content": "# Merkle Tree\n\n\n\nThis Rust implementation of a [Merkle Tree](https://en.wikipedia.org/wiki/Merkle_tree) has the following properties:\n\n\n\n* binary\n\n* unbalanced\n\n* uses SHA256 for now\n\n* accepts a vector of arbitrary-length data to build a tree\n\n* not (yet) secure\n", "file_path": "merkle_tree/README.md", "rank": 90, "score": 14474.998059893556 }, { "content": " pub fn insert(self, element: T) -> Tree<T> {\n\n let tree = self.ins(element);\n\n match tree {\n\n Tree::NonEmpty(node) => {\n\n Tree::new(\n\n Color::Black,\n\n node.element,\n\n node.left,\n\n node.right,\n\n )\n\n },\n\n Tree::Empty => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use super::Color::*;\n", "file_path": "functional_rbt/src/lib.rs", "rank": 91, "score": 27.904965003826433 }, { "content": "impl<T: Ord> Tree<T> {\n\n pub fn new(color: Color, element: T, left: Tree<T>, right: Tree<T>) -> Tree<T> {\n\n Tree::NonEmpty(Box::new(Node {\n\n color,\n\n element,\n\n left,\n\n right,\n\n }))\n\n }\n\n\n\n fn memb(&self, element: &T, candidate: &T) -> bool {\n\n match self {\n\n Tree::Empty => *element == *candidate,\n\n Tree::NonEmpty(ref node) => {\n\n if *element < node.element {\n\n node.left.memb(element, candidate)\n\n } else {\n\n node.right.memb(element, &node.element)\n\n }\n\n },\n", "file_path": "functional_rbt/src/lib.rs", "rank": 92, "score": 25.872333938240818 }, { "content": " if let Tree::NonEmpty(rnode) = node.right {\n\n let new_l = Tree::new(Color::Black, node.element, node.left, rnode.left);\n\n let new_r = Tree::new(Color::Black, element, rnode.right, right);\n\n return Tree::new(Color::Red, rnode.element, new_l, new_r);\n\n }\n\n }\n\n } else {\n\n return Tree::new(color, element, left, right);\n\n }\n\n\n\n unreachable!();\n\n }\n\n\n\n fn ins(self, element: T) -> Tree<T> {\n\n match self {\n\n Tree::Empty => {\n\n Tree::new(Color::Red, element, Tree::Empty, Tree::Empty)\n\n },\n\n Tree::NonEmpty(node) => {\n\n if element < node.element {\n", "file_path": "functional_rbt/src/lib.rs", "rank": 93, "score": 25.593690567191846 }, { "content": " if let Tree::NonEmpty(rnode) = node.right {\n\n let new_l = Tree::new(Color::Black, element, left, node.left);\n\n let new_r = Tree::new(Color::Black, rnode.element, rnode.left, rnode.right);\n\n return Tree::new(Color::Red, node.element, new_l, new_r);\n\n }\n\n }\n\n } else {\n\n return Tree::new(color, element, left, right);\n\n }\n\n\n\n unreachable!();\n\n }\n\n\n\n // z\n\n // / \\\n\n // (x) d\n\n // / \\\n\n // a (y)\n\n // / \\\n\n // b c\n", "file_path": "functional_rbt/src/lib.rs", "rank": 94, "score": 23.245731082959324 }, { "content": "\n\n pub fn delete_min(self) -> Heap<T> {\n\n match self {\n\n Heap::Empty => Heap::Empty,\n\n Heap::NonEmpty(node) => {\n\n match node.left {\n\n Heap::Empty => node.right,\n\n Heap::NonEmpty(lnode) => {\n\n let h = Heap::new(node.element, lnode.right, node.right);\n\n if lnode.left.is_empty() {\n\n h\n\n } else {\n\n Heap::new(lnode.element, lnode.left.delete_min(), h)\n\n }\n\n },\n\n }\n\n },\n\n }\n\n }\n\n\n", "file_path": "splay_heap/src/lib.rs", "rank": 95, "score": 22.377891775300608 }, { "content": "#[derive(Debug)]\n\npub enum Heap<T> {\n\n Empty,\n\n NonEmpty(Box<Node<T>>),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Node<T> {\n\n element: T,\n\n left: Heap<T>,\n\n right: Heap<T>,\n\n}\n\n\n\nimpl<T: Ord> Heap<T> {\n\n pub fn new(element: T, left: Heap<T>, right: Heap<T>) -> Heap<T> {\n\n Heap::NonEmpty(Box::new(Node {\n\n element,\n\n left,\n\n right,\n\n }))\n", "file_path": "splay_heap/src/lib.rs", "rank": 96, "score": 21.609086383090656 }, { "content": "#[derive(Debug)]\n\npub enum Heap<T> {\n\n Empty,\n\n NonEmpty(Box<Node<T>>),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Node<T> {\n\n rank: usize,\n\n element: T,\n\n left: Heap<T>,\n\n right: Heap<T>,\n\n}\n\n\n\nimpl<T: Ord> Heap<T> {\n\n fn rank(&self) -> usize {\n\n match self {\n\n Heap::Empty => 0,\n\n Heap::NonEmpty(ref node) => node.rank,\n\n }\n", "file_path": "leftist_heap/src/lib.rs", "rank": 97, "score": 21.56338868192169 }, { "content": " //\n\n // x\n\n // / \\\n\n // a (z)\n\n // / \\\n\n // (y) d\n\n // / \\\n\n // b c\n\n //\n\n fn rbalance(color: Color, element: T, left: Tree<T>, right: Tree<T>) -> Tree<T> {\n\n if color == Color::Black && right.is_red() && right.left_is_red() {\n\n if let Tree::NonEmpty(node) = right {\n\n if let Tree::NonEmpty(lnode) = node.left {\n\n let new_l = Tree::new(Color::Black, element, left, lnode.left);\n\n let new_r = Tree::new(Color::Black, node.element, lnode.right, node.right);\n\n return Tree::new(Color::Red, lnode.element, new_l, new_r);\n\n }\n\n }\n\n } else if color == Color::Black && right.is_red() && right.right_is_red() {\n\n if let Tree::NonEmpty(node) = right {\n", "file_path": "functional_rbt/src/lib.rs", "rank": 98, "score": 21.37751485960366 }, { "content": " //\n\n // ⬇︎\n\n // z\n\n // / \\ (y)\n\n // (y) d -> / \\\n\n // / \\ x z\n\n // (x) c / \\ / \\\n\n // / \\ a b c d\n\n // a b\n\n fn lbalance(color: Color, element: T, left: Tree<T>, right: Tree<T>) -> Tree<T> {\n\n if color == Color::Black && left.is_red() && left.left_is_red() {\n\n if let Tree::NonEmpty(node) = left {\n\n if let Tree::NonEmpty(lnode) = node.left {\n\n let new_l = Tree::new(Color::Black, lnode.element, lnode.left, lnode.right);\n\n let new_r = Tree::new(Color::Black, element, node.right, right);\n\n return Tree::new(Color::Red, node.element, new_l, new_r);\n\n }\n\n }\n\n } else if color == Color::Black && left.is_red() && left.right_is_red() {\n\n if let Tree::NonEmpty(node) = left {\n", "file_path": "functional_rbt/src/lib.rs", "rank": 99, "score": 21.36157631270489 } ]
Rust
src/generate/generator.rs
nurmohammed840/virtue
b645e092861377a1b11b97c681de9e9acb5a0d22
use super::{GenerateMod, Impl, ImplFor, StreamBuilder}; use crate::parse::{GenericConstraints, Generics}; use crate::prelude::{Ident, TokenStream}; #[must_use] pub struct Generator { pub(crate) name: Ident, pub(crate) generics: Option<Generics>, pub(crate) generic_constraints: Option<GenericConstraints>, pub(crate) stream: StreamBuilder, } impl Generator { pub(crate) fn new( name: Ident, generics: Option<Generics>, generic_constraints: Option<GenericConstraints>, ) -> Self { Self { name, generics, generic_constraints, stream: StreamBuilder::new(), } } pub fn target_name(&self) -> Ident { self.name.clone() } pub fn r#impl(&mut self) -> Impl<Self> { Impl::with_parent_name(self) } pub fn generate_impl(&mut self) -> Impl<Self> { Impl::with_parent_name(self) } pub fn impl_for(&mut self, trait_name: impl Into<String>) -> ImplFor<Self> { ImplFor::new(self, trait_name) } pub fn impl_for_with_lifetimes<ITER, I, T>( &mut self, trait_name: T, lifetimes: ITER, ) -> ImplFor<Self> where ITER: IntoIterator<Item = I>, I: Into<String>, T: Into<String>, { ImplFor::new_with_lifetimes(self, trait_name, lifetimes) } pub fn generate_mod(&mut self, mod_name: impl Into<String>) -> GenerateMod<Self> { GenerateMod::new(self, mod_name) } pub fn export_to_file(&self, file_postfix: &str) -> bool { use std::io::Write; if let Ok(var) = std::env::var("CARGO_MANIFEST_DIR") { let mut path = std::path::PathBuf::from(var); loop { { let mut path = path.clone(); path.push("target"); if path.exists() { path.push(format!("{}_{}.rs", self.target_name(), file_postfix)); if let Ok(mut file) = std::fs::File::create(path) { let _ = file.write_all(self.stream.stream.to_string().as_bytes()); return true; } } } if let Some(parent) = path.parent() { path = parent.to_owned(); } else { break; } } } false } pub fn finish(mut self) -> crate::prelude::Result<TokenStream> { Ok(std::mem::take(&mut self.stream).stream) } } impl Drop for Generator { fn drop(&mut self) { if !self.stream.stream.is_empty() && !std::thread::panicking() { eprintln!("WARNING: Generator dropped but the stream is not empty. Please call `.finish()` on the generator"); } } } impl super::Parent for Generator { fn append(&mut self, builder: StreamBuilder) { self.stream.append(builder); } fn name(&self) -> &Ident { &self.name } fn generics(&self) -> Option<&Generics> { self.generics.as_ref() } fn generic_constraints(&self) -> Option<&GenericConstraints> { self.generic_constraints.as_ref() } }
use super::{GenerateMod, Impl, ImplFor, StreamBuilder}; use crate::parse::{GenericConstraints, Generics}; use crate::prelude::{Ident, TokenStream}; #[must_use] pub struct Generator { pub(crate) name: Ident, pub(crate) generics: Option<Generics>, pub(crate) generic_constraints: Option<GenericConstraints>, pub(crate) stream: StreamBuilder, } impl Generator { pub(crate) fn new( name: Ident, generics: Option<Generics>, generic_constraints: Option<GenericConstraints>, ) -> Self { Self { name, generics, generic_constraints, stream: StreamBuilder::new(), } } pub fn target_name(&self) -> Ident { self.name.clone() } pub fn r#impl(&mut self) -> Impl<Self> { Impl::with_parent_name(self) } pub fn generate_impl(&mut self) -> Impl<Self> { Impl::with_parent_name(self) } pub fn impl_for(&mut self, trait_name: impl Into<String>) -> ImplFor<Self> { ImplFor::new(self, trait_name) } pub fn impl_for_with_lifetimes<ITER, I, T>( &mut self, trait_name: T, lifetimes: ITER, ) -> ImplFor<Self> where ITER: IntoIterator<Item = I>, I: Into<String>, T: Into<String>, { ImplFor::new_with_lifetimes(self, trait_name, lifetimes) } pub fn generate_mod(&mut self, mod_name: impl Into<String>) -> GenerateMod<Self> { GenerateMod::new(self, mod_name) } pub fn export_to_file(&self, file_postfix: &str) -> bool { use std::io::Write; if let Ok(var) = std::env::var("CARGO_MANIFEST_DIR") { let mut path = std::path::PathBuf::from(var); loop { { let mut path = path.clone(); path.push("target"); if path.exists() { path.push(format!("{}_{}.rs", self.target_name(), file_postfix)); if let Ok(mut file) = std::fs::File::create(path) { let _ = file.write_all(self.stream.stream.to_string().as_bytes()); return true; } } }
} } false } pub fn finish(mut self) -> crate::prelude::Result<TokenStream> { Ok(std::mem::take(&mut self.stream).stream) } } impl Drop for Generator { fn drop(&mut self) { if !self.stream.stream.is_empty() && !std::thread::panicking() { eprintln!("WARNING: Generator dropped but the stream is not empty. Please call `.finish()` on the generator"); } } } impl super::Parent for Generator { fn append(&mut self, builder: StreamBuilder) { self.stream.append(builder); } fn name(&self) -> &Ident { &self.name } fn generics(&self) -> Option<&Generics> { self.generics.as_ref() } fn generic_constraints(&self) -> Option<&GenericConstraints> { self.generic_constraints.as_ref() } }
if let Some(parent) = path.parent() { path = parent.to_owned(); } else { break; }
if_condition
[]
Rust
src/smr/tests/proposal_test.rs
zeroqn/overlord
4bf43c4c4e94691e54bae586a07b82e39e13622a
use crate::smr::smr_types::{Lock, SMREvent, SMRTrigger, Step, TriggerType}; use crate::smr::tests::{gen_hash, trigger_test, InnerState, StateMachineTestCase}; use crate::{error::ConsensusError, types::Hash}; #[tokio::test(threaded_scheduler)] async fn test_proposal_trigger() { let mut index = 1; let mut test_cases: Vec<StateMachineTestCase> = Vec::new(); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(0, Step::Propose, Hash::new(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 0u64, block_hash: hash, lock_round: None, }, None, None, )); let hash = Hash::new(); test_cases.push(StateMachineTestCase::new( InnerState::new(0, Step::Propose, Hash::new(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 0u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = Hash::new(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, Hash::new(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Invalid lock".to_string())), None, )); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, Hash::new(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, None, None, )); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::SelfCheckErr("".to_string())), None, )); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::SelfCheckErr("".to_string())), None, )); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), None), SMRTrigger::new(Hash::new(), TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), None), SMRTrigger::new(Hash::new(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), Some(lock)), SMRTrigger::new(hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), Some(lock)), SMRTrigger::new(hash.clone(), TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), Some(lock)), SMRTrigger::new(hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), Some(lock)), SMRTrigger::new(lock_hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: Some(0), }, None, Some((0, lock_hash)), )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(hash, TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: lock_hash.clone(), lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), Some((0, lock_hash)), )); let hash = gen_hash(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(hash, TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: lock_hash.clone(), lock_round: Some(0), }, None, Some((0, lock_hash)), )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(hash, TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: lock_hash.clone(), lock_round: None, }, Some(ConsensusError::ProposalErr("Invalid lock".to_string())), Some((0, lock_hash)), )); let hash = gen_hash(); let lock_hash = gen_hash(); let lock = Lock::new(1, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(2, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(hash, TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 2u64, block_hash: lock_hash.clone(), lock_round: Some(1), }, None, Some((1, lock_hash)), )); let hash = gen_hash(); let lock_hash = gen_hash(); let lock = Lock::new(1, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(3, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(hash.clone(), TriggerType::Proposal, Some(2), 0), SMREvent::PrevoteVote { height: 0u64, round: 3u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::SelfCheckErr( "Invalid proposal hash".to_string(), )), None, )); let lock_hash = gen_hash(); let lock = Lock::new(1, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(2, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(lock_hash.clone(), TriggerType::Proposal, Some(1), 0), SMREvent::PrevoteVote { height: 0u64, round: 2u64, block_hash: lock_hash.clone(), lock_round: Some(1), }, None, Some((1, lock_hash)), )); for case in test_cases.into_iter() { println!("Proposal test {}/19", index); index += 1; trigger_test( case.base, case.input, case.output, case.err, case.should_lock, ) .await; } println!("Proposal test success"); }
use crate::smr::smr_types::{Lock, SMREvent, SMRTrigger, Step, TriggerType}; use crate::smr::tests::{gen_hash, trigger_test, InnerState, StateMachineTestCase}; use crate::{error::ConsensusError, types::Hash}; #[tokio::test(threaded_scheduler)] async fn test_proposal_trigger() { let mut index = 1; let mut test_cases: Vec<StateMachineTestCase> = Vec::new(); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(0, Step::Propose, Hash::new(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 0u64, block_hash: hash, lock_round: None, }, None, None, )); let hash = Hash::new(); test_cases.push(StateMachineTestCase::new( InnerState::new(0, Step::Propose, Hash::new(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 0u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = Hash::new(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, Hash::new(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Invalid lock".to_string())), None, )); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, Hash::new(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, None, None, )); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::SelfCheckErr("".to_string())), None, )); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), None), SMRTrigger::new(hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::SelfCheckErr("".to_string())), None, )); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), None), SMRTrigger::new(Hash::new(), TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = gen_hash(); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), None), SMRTrigger::new(Hash::new(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), Some(lock)), SMRTrigger::new(hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), Some(lock)), SMRTrigger::new(hash.clone(), TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), Some(lock)), SMRTrigger::new(hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), None, )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, hash.clone(), Some(lock)), SMRTrigger::new(lock_hash.clone(), TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: hash, lock_round: Some(0), }, None, Some((0, lock_hash)), )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(hash, TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: lock_hash.clone(), lock_round: None, }, Some(ConsensusError::ProposalErr("Empty qc".to_string())), Some((0, lock_hash)), )); let hash = gen_hash(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(hash, TriggerType::Proposal, None, 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: lock_hash.clone(), lock_round: Some(0), }, None, Some((0, lock_hash)), )); let hash = Hash::new(); let lock_hash = gen_hash(); let lock = Lock::new(0, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(1, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(hash, TriggerType::Proposal, Some(0), 0), SMREvent::PrevoteVote { height: 0u64, round: 1u64, block_hash: lock_hash.clone(), lock_round: None, }, Some(ConsensusError::ProposalErr("Invalid lock".to_string())), Some((0, lock_hash)), )); let hash = gen_hash(); let lock_hash = gen_hash(); let lock = Lock::new(1, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(2, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(hash, TriggerType::Proposal, Some(0), 0), SMREvent::Pre
ash = gen_hash(); let lock = Lock::new(1, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(2, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(lock_hash.clone(), TriggerType::Proposal, Some(1), 0), SMREvent::PrevoteVote { height: 0u64, round: 2u64, block_hash: lock_hash.clone(), lock_round: Some(1), }, None, Some((1, lock_hash)), )); for case in test_cases.into_iter() { println!("Proposal test {}/19", index); index += 1; trigger_test( case.base, case.input, case.output, case.err, case.should_lock, ) .await; } println!("Proposal test success"); }
voteVote { height: 0u64, round: 2u64, block_hash: lock_hash.clone(), lock_round: Some(1), }, None, Some((1, lock_hash)), )); let hash = gen_hash(); let lock_hash = gen_hash(); let lock = Lock::new(1, lock_hash.clone()); test_cases.push(StateMachineTestCase::new( InnerState::new(3, Step::Propose, lock_hash.clone(), Some(lock)), SMRTrigger::new(hash.clone(), TriggerType::Proposal, Some(2), 0), SMREvent::PrevoteVote { height: 0u64, round: 3u64, block_hash: hash, lock_round: None, }, Some(ConsensusError::SelfCheckErr( "Invalid proposal hash".to_string(), )), None, )); let lock_h
random
[ { "content": "fn gen_hash() -> Hash {\n\n Hash::from((0..16).map(|_| random::<u8>()).collect::<Vec<_>>())\n\n}\n", "file_path": "tests/test_utils.rs", "rank": 0, "score": 137237.66059652547 }, { "content": "fn gen_hash() -> Hash {\n\n Hash::from((0..16).map(|_| random::<u8>()).collect::<Vec<_>>())\n\n}\n\n\n\nasync fn trigger_test(\n\n base: InnerState,\n\n input: SMRTrigger,\n\n output: SMREvent,\n\n err: Option<ConsensusError>,\n\n should_lock: Option<(u64, Hash)>,\n\n) {\n\n let (trigger_tx, trigger_rx) = unbounded();\n\n // let (event_tx, mut event_rx) = unbounded();\n\n // let (state_tx, _state_rx) = unbounded();\n\n\n\n let (mut state_machine, mut event, _event) = StateMachine::new(trigger_rx);\n\n state_machine.set_status(base.round, base.step, base.proposal_hash, base.lock);\n\n trigger_tx.unbounded_send(input).unwrap();\n\n\n\n let res = state_machine.next().await;\n", "file_path": "src/smr/tests/mod.rs", "rank": 1, "score": 134843.1725958614 }, { "content": "fn hash(bytes: &Bytes) -> Bytes {\n\n let mut out = [0u8; 32];\n\n out.copy_from_slice(&HASHER_INST.digest(bytes));\n\n BytesMut::from(&out[..]).freeze()\n\n}\n\n\n", "file_path": "examples/salon.rs", "rank": 2, "score": 98285.75261216071 }, { "content": "pub fn hash(bytes: &Bytes) -> Bytes {\n\n let mut out = [0u8; 32];\n\n out.copy_from_slice(&HASHER_INST.digest(bytes));\n\n BytesMut::from(&out[..]).freeze()\n\n}\n\n\n", "file_path": "tests/integration_tests/utils.rs", "rank": 3, "score": 88589.09441692402 }, { "content": "fn synchronize_height(\n\n records: &Record,\n\n alive_nodes: Vec<Node>,\n\n alive_handlers: Vec<Arc<Participant>>,\n\n test_id: u64,\n\n) {\n\n let interval = records.interval;\n\n let height_record = Arc::<Mutex<HashMap<Bytes, u64>>>::clone(&records.height_record);\n\n let node_record = records.node_record.clone();\n\n\n\n tokio::spawn(async move {\n\n thread::sleep(Duration::from_millis(interval));\n\n let max_height = get_max_alive_height(&height_record, &alive_nodes);\n\n let height_record = height_record.lock().unwrap();\n\n height_record.iter().for_each(|(address, height)| {\n\n if *height < max_height {\n\n alive_handlers\n\n .iter()\n\n .filter(|node| node.adapter.address == address)\n\n .for_each(|node| {\n", "file_path": "tests/integration_tests/run.rs", "rank": 4, "score": 85550.07322882435 }, { "content": "pub fn get_max_alive_height(\n\n height_record: &Arc<Mutex<HashMap<Bytes, u64>>>,\n\n alives: &[Node],\n\n) -> u64 {\n\n let height_record = height_record.lock().unwrap();\n\n if let Some(max_height) = height_record\n\n .clone()\n\n .into_iter()\n\n .filter(|(address, _)| alives.iter().any(|node| node.address == address))\n\n .collect::<HashMap<Bytes, u64>>()\n\n .values()\n\n .max()\n\n {\n\n *max_height\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "tests/integration_tests/utils.rs", "rank": 5, "score": 76345.01829372345 }, { "content": "pub fn get_index(nodes: &[Node], address: &Bytes) -> usize {\n\n let mut index = std::usize::MAX;\n\n nodes.iter().enumerate().for_each(|(i, node)| {\n\n if node.address == address {\n\n index = i;\n\n }\n\n });\n\n index\n\n}\n", "file_path": "tests/integration_tests/utils.rs", "rank": 6, "score": 64364.27871529063 }, { "content": "pub fn get_index_array(nodes: &[Node], alives: &[Node]) -> Vec<usize> {\n\n nodes\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, node)| alives.contains(node))\n\n .map(|(i, _)| i)\n\n .collect()\n\n}\n\n\n", "file_path": "tests/integration_tests/utils.rs", "rank": 7, "score": 60368.48541034688 }, { "content": "pub fn get_random_proposer_index(seed: u64, weights: &[u64], weight_sum: u64) -> usize {\n\n let tmp = u64::max_value() / weight_sum;\n\n let mut rng = Pcg::seed_from_u64(seed);\n\n let mut acc = 0u64;\n\n let mut random_digit = rng.next_u64();\n\n while random_digit >= weight_sum * tmp {\n\n random_digit = rng.next_u64();\n\n }\n\n\n\n for (index, weight) in weights.iter().enumerate() {\n\n acc += *weight;\n\n if random_digit < acc * tmp {\n\n return index;\n\n }\n\n }\n\n 0\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/utils/rand_proposer.rs", "rank": 8, "score": 55546.978404068854 }, { "content": "#[test]\n\nfn test_block_codec() {\n\n for _ in 0..100 {\n\n let content = gen_random_bytes();\n\n let block = Block::from(content);\n\n\n\n let decode: Block = Codec::decode(Codec::encode(&block).unwrap()).unwrap();\n\n assert_eq!(decode, block);\n\n }\n\n}\n", "file_path": "tests/integration_tests/primitive.rs", "rank": 9, "score": 52058.546398923674 }, { "content": "fn run_alive_nodes(\n\n records: &Record,\n\n alive_nodes: Vec<Node>,\n\n) -> (Vec<Arc<Participant>>, Vec<Sender<OverlordMsg<Block>>>) {\n\n let records = records.as_internal();\n\n let interval = records.interval;\n\n let alive_num = alive_nodes.len();\n\n\n\n let channels: Vec<Channel> = (0..alive_num).map(|_| unbounded()).collect();\n\n let hearings: HashMap<Bytes, Receiver<OverlordMsg<Block>>> = alive_nodes\n\n .iter()\n\n .map(|node| node.address.clone())\n\n .zip(channels.iter().map(|(_, receiver)| receiver.clone()))\n\n .collect();\n\n\n\n let mut alive_handlers = Vec::new();\n\n for node in alive_nodes.iter() {\n\n let address = node.address.clone();\n\n let mut talk_to: HashMap<Bytes, Sender<OverlordMsg<Block>>> = alive_nodes\n\n .iter()\n", "file_path": "tests/integration_tests/run.rs", "rank": 10, "score": 52058.546398923674 }, { "content": "fn kill_alive_nodes(\n\n alive_handlers: Vec<Arc<Participant>>,\n\n senders: Vec<Sender<OverlordMsg<Block>>>,\n\n) {\n\n alive_handlers.iter().for_each(|node| {\n\n node.handler\n\n .send_msg(Context::new(), OverlordMsg::Stop)\n\n .unwrap()\n\n });\n\n senders\n\n .iter()\n\n .for_each(|sender| sender.send(OverlordMsg::Stop).unwrap());\n\n}\n", "file_path": "tests/integration_tests/run.rs", "rank": 11, "score": 52058.546398923674 }, { "content": "fn gen_random_bytes() -> Bytes {\n\n let vec: Vec<u8> = (0..10).map(|_| random::<u8>()).collect();\n\n Bytes::from(vec)\n\n}\n\n\n", "file_path": "examples/salon.rs", "rank": 12, "score": 51189.19669102145 }, { "content": "/// give the validators list and bitmap, returns the activated validators, the authority_list MUST\n\n/// be sorted\n\npub fn extract_voters(\n\n authority_list: &mut Vec<Node>,\n\n address_bitmap: &bytes::Bytes,\n\n) -> ConsensusResult<Vec<Address>> {\n\n authority_list.sort();\n\n let bitmap = BitVec::from_bytes(&address_bitmap);\n\n let voters: Vec<Address> = bitmap\n\n .iter()\n\n .zip(authority_list.iter())\n\n .filter(|pair| pair.0) //the bitmap must hit\n\n .map(|pair| pair.1.address.clone()) //get the corresponding address\n\n .collect::<Vec<_>>();\n\n Ok(voters)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n extern crate test;\n\n\n\n use bit_vec::BitVec;\n", "file_path": "src/utils/auth_manage.rs", "rank": 13, "score": 49920.652580198876 }, { "content": "fn mock_init_qc() -> AggregatedVote {\n\n let aggregated_signature = AggregatedSignature {\n\n signature: Signature::default(),\n\n address_bitmap: Bytes::default(),\n\n };\n\n\n\n AggregatedVote {\n\n signature: aggregated_signature,\n\n vote_type: VoteType::Precommit,\n\n height: 0u64,\n\n round: 0u64,\n\n block_hash: Hash::default(),\n\n leader: Address::default(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::time::Duration;\n\n\n", "file_path": "src/state/process.rs", "rank": 14, "score": 48748.55552216481 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq)]\n\nstruct ProposalRoundCollector<T: Codec>(HashMap<u64, SignedProposal<T>>);\n\n\n\nimpl<T> ProposalRoundCollector<T>\n\nwhere\n\n T: Codec,\n\n{\n\n fn new() -> Self {\n\n ProposalRoundCollector(HashMap::new())\n\n }\n\n\n\n fn insert(&mut self, round: u64, proposal: SignedProposal<T>) -> ConsensusResult<()> {\n\n if let Some(sp) = self.0.get(&round) {\n\n if sp == &proposal {\n\n return Ok(());\n\n }\n\n return Err(ConsensusError::Other(\"_\".to_string()));\n\n }\n\n self.0.insert(round, proposal);\n\n Ok(())\n\n }\n", "file_path": "src/state/collection.rs", "rank": 15, "score": 48622.52173503536 }, { "content": "fn timer_config() -> Option<DurationConfig> {\n\n Some(DurationConfig::new(10, 10, 10, 3))\n\n}\n", "file_path": "examples/salon.rs", "rank": 16, "score": 48174.01211930377 }, { "content": "pub fn gen_random_bytes() -> Bytes {\n\n let vec: Vec<u8> = (0..10).map(|_| random::<u8>()).collect();\n\n Bytes::from(vec)\n\n}\n\n\n", "file_path": "tests/integration_tests/utils.rs", "rank": 17, "score": 45915.66861471359 }, { "content": "pub fn timer_config() -> Option<DurationConfig> {\n\n Some(DurationConfig::new(20, 10, 5, 10))\n\n}\n\n\n", "file_path": "tests/integration_tests/utils.rs", "rank": 18, "score": 43452.38651658394 }, { "content": "pub fn to_hex(address: &Bytes) -> String {\n\n hex::encode(address)\n\n}\n\n\n", "file_path": "tests/integration_tests/utils.rs", "rank": 19, "score": 43232.99169997537 }, { "content": "pub fn to_hex_strings(nodes: &[Node]) -> Vec<String> {\n\n nodes\n\n .iter()\n\n .map(|node| hex::encode(&node.address))\n\n .collect()\n\n}\n\n\n", "file_path": "tests/integration_tests/utils.rs", "rank": 20, "score": 40230.464584131165 }, { "content": "pub fn create_alive_nodes(nodes: Vec<Node>) -> Vec<Node> {\n\n let node_num = nodes.len();\n\n let thresh_num = node_num * 2 / 3 + 1;\n\n let rand_num = 0;\n\n // let rand_num = random::<usize>() % (node_num - thresh_num + 1);\n\n let mut alive_nodes = nodes;\n\n alive_nodes.shuffle(&mut thread_rng());\n\n while alive_nodes.len() > thresh_num + rand_num {\n\n alive_nodes.pop();\n\n }\n\n alive_nodes\n\n}\n\n\n", "file_path": "tests/integration_tests/utils.rs", "rank": 21, "score": 37618.10404028393 }, { "content": "/// deserialize Bytes with hex\n\npub fn deserialize<'de, D>(deserializer: D) -> Result<Bytes, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n deserializer.deserialize_str(StringVisit)\n\n}\n\n\n\nimpl<'de> de::Visitor<'de> for StringVisit {\n\n type Value = Bytes;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"byte array\")\n\n }\n\n\n\n #[inline]\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n let value = hex::decode(v).map_err(de::Error::custom)?;\n\n Ok(Bytes::from(value))\n\n }\n\n}\n", "file_path": "src/serde_hex.rs", "rank": 22, "score": 35493.847634035585 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq)]\n\nstruct RoundCollector {\n\n qc: QuorumCertificate,\n\n prevote: Votes,\n\n precommit: Votes,\n\n}\n\n\n\nimpl RoundCollector {\n\n fn new() -> Self {\n\n RoundCollector {\n\n qc: QuorumCertificate::new(),\n\n prevote: Votes::new(),\n\n precommit: Votes::new(),\n\n }\n\n }\n\n\n\n fn insert_vote(&mut self, hash: Hash, vote: SignedVote, addr: Address) {\n\n if vote.is_prevote() {\n\n self.prevote.insert(hash, addr, vote);\n\n } else {\n\n self.precommit.insert(hash, addr, vote);\n", "file_path": "src/state/collection.rs", "rank": 23, "score": 33710.19452085813 }, { "content": "/// serialize Bytes with hex\n\npub fn serialize<S>(val: &Bytes, s: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n s.serialize_str(&hex::encode(val))\n\n}\n\n\n", "file_path": "src/serde_hex.rs", "rank": 24, "score": 33661.504947697584 }, { "content": "pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<Bytes>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n #[derive(Deserialize)]\n\n #[serde(field_identifier, rename_all = \"lowercase\")]\n\n enum Field {\n\n Inner,\n\n }\n\n\n\n struct VecTVisitor;\n\n\n\n impl<'de> de::Visitor<'de> for VecTVisitor {\n\n type Value = VecT;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"serde multi\")\n\n }\n\n\n\n fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>\n", "file_path": "src/serde_multi_hex.rs", "rank": 25, "score": 33438.752511342274 }, { "content": "pub fn serialize<S>(val: &[Bytes], s: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let inner = val\n\n .iter()\n\n .map(|t| TWrapper {\n\n inner: t.to_owned(),\n\n })\n\n .collect::<Vec<TWrapper>>();\n\n\n\n let vec_t = VecT { inner };\n\n\n\n let mut state = s.serialize_struct(\"VecT\", 1)?;\n\n state.serialize_field(\"inner\", &vec_t.inner)?;\n\n state.end()\n\n}\n\n\n", "file_path": "src/serde_multi_hex.rs", "rank": 26, "score": 32936.346926652805 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq)]\n\nstruct VoteRoundCollector {\n\n general: HashMap<u64, RoundCollector>,\n\n qc_by_hash: HashMap<Hash, QuorumCertificate>,\n\n}\n\n\n\nimpl VoteRoundCollector {\n\n fn new() -> Self {\n\n VoteRoundCollector {\n\n general: HashMap::new(),\n\n qc_by_hash: HashMap::new(),\n\n }\n\n }\n\n\n\n fn insert_vote(&mut self, hash: Hash, vote: SignedVote, addr: Address) {\n\n self.general\n\n .entry(vote.get_round())\n\n .or_insert_with(RoundCollector::new)\n\n .insert_vote(hash, vote, addr);\n\n }\n\n\n", "file_path": "src/state/collection.rs", "rank": 27, "score": 32323.25779570454 }, { "content": "use crate::smr::smr_types::{FromWhere, Lock, SMREvent, SMRStatus, SMRTrigger, Step, TriggerType};\n\nuse crate::smr::tests::{gen_hash, trigger_test, InnerState, StateMachineTestCase};\n\nuse crate::{error::ConsensusError, types::Hash};\n\n\n\n#[tokio::test(threaded_scheduler)]\n\nasync fn test_new_height() {\n\n let mut index = 1;\n\n let mut test_cases: Vec<StateMachineTestCase> = Vec::new();\n\n\n\n // Test case 01:\n\n // In propose step, self proposal is empty and no lock, goto new height.\n\n // The output should be new round info.\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Propose, Hash::new(), None),\n\n SMRTrigger::new(\n\n Hash::new(),\n\n TriggerType::NewHeight(SMRStatus::new(1)),\n\n None,\n\n 0,\n\n ),\n", "file_path": "src/smr/tests/new_height_test.rs", "rank": 28, "score": 32251.980239837758 }, { "content": " SMREvent::NewRoundInfo {\n\n height: 1u64,\n\n round: 0u64,\n\n lock_round: None,\n\n lock_proposal: None,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(u64::max_value()),\n\n },\n\n None,\n\n None,\n\n ));\n\n\n\n // Test case 02:\n\n // In propose step, self proposal is not empty and with a lock, goto new height.\n\n // The output should be new round info.\n\n let hash = gen_hash();\n\n let lock = Lock {\n\n round: 0u64,\n\n hash: hash.clone(),\n", "file_path": "src/smr/tests/new_height_test.rs", "rank": 29, "score": 32247.38153772097 }, { "content": " None,\n\n ));\n\n\n\n // Test case 07:\n\n // In prevote step, self proposal is not empty and not lock, goto new height.\n\n // The output should be new round info.\n\n let hash = gen_hash();\n\n let lock = Lock::new(0u64, hash.clone());\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(1, Step::Prevote, hash, Some(lock)),\n\n SMRTrigger::new(\n\n Hash::new(),\n\n TriggerType::NewHeight(SMRStatus::new(1)),\n\n None,\n\n 0,\n\n ),\n\n SMREvent::NewRoundInfo {\n\n height: 1u64,\n\n round: 0u64,\n\n lock_round: None,\n", "file_path": "src/smr/tests/new_height_test.rs", "rank": 30, "score": 32247.11729833996 }, { "content": " ));\n\n\n\n // Test case 03:\n\n // In propose step, self proposal is empty but with a lock, goto new height.\n\n // This is an incorrect situation, the process cannot pass self check.\n\n let hash = gen_hash();\n\n let lock = Lock::new(0u64, hash.clone());\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Propose, Hash::new(), Some(lock)),\n\n SMRTrigger::new(\n\n Hash::new(),\n\n TriggerType::NewHeight(SMRStatus::new(1)),\n\n None,\n\n 0,\n\n ),\n\n SMREvent::NewRoundInfo {\n\n height: 1u64,\n\n round: 0u64,\n\n lock_round: None,\n\n lock_proposal: None,\n", "file_path": "src/smr/tests/new_height_test.rs", "rank": 31, "score": 32246.6332072965 }, { "content": " SMREvent::NewRoundInfo {\n\n height: 1u64,\n\n round: 0u64,\n\n lock_round: None,\n\n lock_proposal: None,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(u64::max_value()),\n\n },\n\n Some(ConsensusError::SelfCheckErr(\"\".to_string())),\n\n None,\n\n ));\n\n\n\n // Test case 05:\n\n // In prevote step, self proposal is empty and not lock, goto new height.\n\n // The output should be new round info.\n\n let hash = Hash::new();\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Prevote, hash, None),\n\n SMRTrigger::new(\n", "file_path": "src/smr/tests/new_height_test.rs", "rank": 32, "score": 32245.90924131128 }, { "content": " Hash::new(),\n\n TriggerType::NewHeight(SMRStatus::new(1)),\n\n None,\n\n 0,\n\n ),\n\n SMREvent::NewRoundInfo {\n\n height: 1u64,\n\n round: 0u64,\n\n lock_round: None,\n\n lock_proposal: None,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(u64::max_value()),\n\n },\n\n None,\n\n None,\n\n ));\n\n\n\n // Test case 06:\n\n // In prevote step, self proposal is not empty and not lock, goto new height.\n", "file_path": "src/smr/tests/new_height_test.rs", "rank": 33, "score": 32245.623277906056 }, { "content": " // The output should be new round info.\n\n let hash = gen_hash();\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Prevote, hash, None),\n\n SMRTrigger::new(\n\n Hash::new(),\n\n TriggerType::NewHeight(SMRStatus::new(1)),\n\n None,\n\n 0,\n\n ),\n\n SMREvent::NewRoundInfo {\n\n height: 1u64,\n\n round: 0u64,\n\n lock_round: None,\n\n lock_proposal: None,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(u64::max_value()),\n\n },\n\n None,\n", "file_path": "src/smr/tests/new_height_test.rs", "rank": 34, "score": 32245.31525304985 }, { "content": " };\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(1, Step::Propose, hash, Some(lock)),\n\n SMRTrigger::new(\n\n Hash::new(),\n\n TriggerType::NewHeight(SMRStatus::new(1)),\n\n None,\n\n 0,\n\n ),\n\n SMREvent::NewRoundInfo {\n\n height: 1u64,\n\n round: 0u64,\n\n lock_round: None,\n\n lock_proposal: None,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(u64::max_value()),\n\n },\n\n Some(ConsensusError::SelfCheckErr(\"\".to_string())),\n\n None,\n", "file_path": "src/smr/tests/new_height_test.rs", "rank": 35, "score": 32244.66993689016 }, { "content": " new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(u64::max_value()),\n\n },\n\n Some(ConsensusError::SelfCheckErr(\"\".to_string())),\n\n None,\n\n ));\n\n\n\n // Test case 04:\n\n // In propose step, self proposal is not empty and not lock, goto new height.\n\n // This is an incorrect situation, the process cannot pass self check.\n\n let hash = gen_hash();\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Propose, hash, None),\n\n SMRTrigger::new(\n\n Hash::new(),\n\n TriggerType::NewHeight(SMRStatus::new(1)),\n\n None,\n\n 0,\n\n ),\n", "file_path": "src/smr/tests/new_height_test.rs", "rank": 36, "score": 32240.797612684462 }, { "content": " lock_proposal: None,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(u64::max_value()),\n\n },\n\n None,\n\n None,\n\n ));\n\n\n\n for case in test_cases.into_iter() {\n\n println!(\"New height test {}/16\", index);\n\n index += 1;\n\n trigger_test(\n\n case.base,\n\n case.input,\n\n case.output,\n\n case.err,\n\n case.should_lock,\n\n )\n\n .await;\n\n }\n\n println!(\"New height test success\");\n\n}\n", "file_path": "src/smr/tests/new_height_test.rs", "rank": 37, "score": 32239.24296224292 }, { "content": "type Pile<T> = RwLock<Option<T>>;\n\n\n\n/// An overlord consensus instance.\n\npub struct Overlord<T: Codec, F: Consensus<T>, C: Crypto, W: Wal> {\n\n sender: Pile<UnboundedSender<(Context, OverlordMsg<T>)>>,\n\n state_rx: Pile<UnboundedReceiver<(Context, OverlordMsg<T>)>>,\n\n address: Pile<Address>,\n\n consensus: Pile<Arc<F>>,\n\n crypto: Pile<Arc<C>>,\n\n wal: Pile<Arc<W>>,\n\n}\n\n\n\nimpl<T, F, C, W> Overlord<T, F, C, W>\n\nwhere\n\n T: Codec + Send + Sync + 'static,\n\n F: Consensus<T> + 'static,\n\n C: Crypto + Send + Sync + 'static,\n\n W: Wal + 'static,\n\n{\n\n /// Create a new overlord and return an overlord instance with an unbounded receiver.\n", "file_path": "src/overlord.rs", "rank": 38, "score": 29885.576860960275 }, { "content": "#[derive(Serialize, Deserialize, Clone)]\n\nstruct TupleHeightRecord(#[serde(with = \"overlord::serde_hex\")] Bytes, u64);\n\n\n", "file_path": "tests/integration_tests/wal.rs", "rank": 39, "score": 25017.83159465822 }, { "content": " self.height = height;\n\n self.round = INIT_ROUND;\n\n trace::start_step((Step::Propose).to_string(), self.round, height);\n\n self.goto_step(Step::Propose);\n\n self.block_hash = Hash::new();\n\n self.lock = None;\n\n }\n\n\n\n /// Keep the lock, if any, when go to the next round.\n\n fn goto_next_round(&mut self) {\n\n info!(\"Overlord: SMR goto next round {}\", self.round + 1);\n\n self.round += 1;\n\n self.goto_step(Step::Propose);\n\n }\n\n\n\n fn set_timer_after_wal(&mut self) -> ConsensusResult<()> {\n\n let (lock_round, lock_proposal) = if let Some(lock) = &self.lock {\n\n (Some(lock.round), Some(lock.hash.clone()))\n\n } else {\n\n (None, None)\n", "file_path": "src/smr/state_machine.rs", "rank": 40, "score": 29.627392115520912 }, { "content": "use crate::smr::smr_types::{FromWhere, Lock, SMREvent, SMRTrigger, Step, TriggerType};\n\nuse crate::smr::tests::{gen_hash, trigger_test, InnerState, StateMachineTestCase};\n\nuse crate::{error::ConsensusError, types::Hash};\n\n\n\n/// Test state machine handle precommitQC trigger.\n\n/// There are a total of *2 × 4 + 3 = 11* test cases.\n\n#[tokio::test(threaded_scheduler)]\n\nasync fn test_precommit_trigger() {\n\n let mut index = 1;\n\n let mut test_cases: Vec<StateMachineTestCase> = Vec::new();\n\n\n\n // Test case 02:\n\n // self proposal is not empty and not lock, precommit is nil.\n\n // The output should be new round info without lock.\n\n let hash = Hash::new();\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Precommit, Hash::new(), None),\n\n SMRTrigger::new(hash.clone(), TriggerType::PrecommitQC, Some(0), 0),\n\n SMREvent::NewRoundInfo {\n\n height: 0u64,\n", "file_path": "src/smr/tests/precommit_test.rs", "rank": 41, "score": 26.50527298736181 }, { "content": " if self.height != height {\n\n return Ok(());\n\n }\n\n\n\n if self.step == Step::Commit {\n\n return Ok(());\n\n }\n\n\n\n info!(\n\n \"Overlord: SMR triggered by precommit QC hash {:?}, from {:?}, height {}, round {}\",\n\n hex::encode(precommit_hash.clone()),\n\n source,\n\n self.height,\n\n self.round\n\n );\n\n\n\n let (lock_round, lock_proposal) = self\n\n .lock\n\n .clone()\n\n .map_or_else(|| (None, None), |lock| (Some(lock.round), Some(lock.hash)));\n", "file_path": "src/smr/state_machine.rs", "rank": 42, "score": 25.919913901046094 }, { "content": " height: self.height,\n\n round: self.round + 1,\n\n lock_round,\n\n lock_proposal,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(precommit_round),\n\n })?;\n\n\n\n self.goto_next_round();\n\n return Ok(());\n\n }\n\n\n\n self.check()?;\n\n self.throw_event(SMREvent::Commit(precommit_hash))?;\n\n self.goto_step(Step::Commit);\n\n Ok(())\n\n }\n\n\n\n fn throw_event(&mut self, event: SMREvent) -> ConsensusResult<()> {\n", "file_path": "src/smr/state_machine.rs", "rank": 43, "score": 25.630823404388646 }, { "content": " };\n\n\n\n let event = match self.step {\n\n Step::Propose => SMREvent::NewRoundInfo {\n\n height: self.height,\n\n round: self.round,\n\n lock_round,\n\n lock_proposal,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(u64::max_value()),\n\n },\n\n Step::Prevote => SMREvent::PrevoteVote {\n\n height: self.height,\n\n round: self.round,\n\n block_hash: Hash::new(),\n\n lock_round,\n\n },\n\n Step::Precommit => SMREvent::PrecommitVote {\n\n height: self.height,\n", "file_path": "src/smr/state_machine.rs", "rank": 44, "score": 25.00028167495414 }, { "content": " // {\n\n // return Err(ConsensusError::SelfCheckErr(\"Lock\".to_string()));\n\n // }\n\n\n\n // // While self step lt precommit and round is 0, self lock must be none.\n\n // if self.step < Step::Precommit && self.round == 0 && self.lock.is_some() {\n\n // return Err(ConsensusError::SelfCheckErr(format!(\n\n // \"Invalid lock, height {}, round {}\",\n\n // self.height, self.round\n\n // )));\n\n // }\n\n\n\n // // While in precommit step, the lock and the proposal hash must be NOR.\n\n // if self.step == Step::Precommit &&\n\n // (self.block_hash.is_empty().bitxor(self.lock.is_none())) {\n\n // return Err(ConsensusError::SelfCheckErr(format!(\n\n // \"Invalid status in precommit, height {}, round {}\",\n\n // self.height, self.round\n\n // )));\n\n // }\n", "file_path": "src/smr/state_machine.rs", "rank": 45, "score": 24.87984752273005 }, { "content": " Ok(())\n\n }\n\n\n\n #[cfg(test)]\n\n pub fn set_status(&mut self, round: u64, step: Step, proposal_hash: Hash, lock: Option<Lock>) {\n\n self.round = round;\n\n self.goto_step(step);\n\n self.set_proposal(proposal_hash);\n\n self.lock = lock;\n\n }\n\n\n\n #[cfg(test)]\n\n pub fn get_lock(&mut self) -> Option<Lock> {\n\n self.lock.clone()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use bytes::Bytes;\n", "file_path": "src/smr/state_machine.rs", "rank": 46, "score": 24.641995276706098 }, { "content": " source: TriggerSource::State,\n\n hash: hash.clone(),\n\n round: lock_round,\n\n height: self.height,\n\n wal_info: None,\n\n })?;\n\n\n\n debug!(\"Overlord: state check the whole block\");\n\n self.check_block(ctx, hash, block).await;\n\n Ok(())\n\n }\n\n\n\n async fn handle_vote_event(\n\n &mut self,\n\n hash: Hash,\n\n vote_type: VoteType,\n\n lock_round: Option<u64>,\n\n ) -> ConsensusResult<()> {\n\n info!(\n\n \"Overlord: state receive {:?} vote event height {}, round {}, hash {:?}\",\n", "file_path": "src/state/process.rs", "rank": 47, "score": 24.209278942340966 }, { "content": " round: self.round,\n\n block_hash: Hash::new(),\n\n lock_round,\n\n },\n\n Step::Brake => SMREvent::Brake {\n\n height: self.height,\n\n round: self.round,\n\n lock_round,\n\n },\n\n _ => unreachable!(),\n\n };\n\n self.throw_timer_event(event)\n\n }\n\n\n\n /// Goto the given step.\n\n #[inline]\n\n fn goto_step(&mut self, step: Step) {\n\n debug!(\"Overlord: SMR goto step {:?}\", step);\n\n trace::start_step(step.clone().to_string(), self.round, self.height);\n\n self.step = step;\n", "file_path": "src/smr/state_machine.rs", "rank": 48, "score": 24.076369658677535 }, { "content": "use crate::smr::smr_types::{Lock, SMREvent, SMRTrigger, Step, TriggerType};\n\nuse crate::smr::tests::{gen_hash, trigger_test, InnerState, StateMachineTestCase};\n\nuse crate::{error::ConsensusError, types::Hash};\n\n\n\n/// Test state machine handle prevoteQC trigger.\n\n/// There are a total of *2 × 4 + 2 = 10* test cases.\n\n#[tokio::test(threaded_scheduler)]\n\nasync fn test_prevote_trigger() {\n\n let mut index = 1;\n\n let mut test_cases: Vec<StateMachineTestCase> = Vec::new();\n\n\n\n // Test case 01:\n\n // self proposal is not empty and not lock, prevote is not nil.\n\n // The output should be precommit vote to the prevote hash.\n\n let hash = gen_hash();\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Prevote, Hash::new(), None),\n\n SMRTrigger::new(hash.clone(), TriggerType::PrevoteQC, Some(0), 0),\n\n SMREvent::PrecommitVote {\n\n height: 0u64,\n", "file_path": "src/smr/tests/prevote_test.rs", "rank": 50, "score": 23.657882097824597 }, { "content": " \"round\": self.round,\n\n \"step\": step.to_string(),\n\n \"error\": e.to_string(),\n\n })),\n\n );\n\n\n\n error!(\"Overlord: state save wal error {:?}\", e);\n\n ConsensusError::SaveWalErr {\n\n height: self.height,\n\n round: self.round,\n\n step: step.to_string(),\n\n }\n\n })?;\n\n Ok(())\n\n }\n\n\n\n async fn save_wal_with_lock_round(\n\n &mut self,\n\n step: Step,\n\n lock_round: Option<u64>,\n", "file_path": "src/state/process.rs", "rank": 51, "score": 23.596478431599817 }, { "content": " (None, Hash::new())\n\n };\n\n\n\n self.throw_event(SMREvent::PrevoteVote {\n\n height: self.height,\n\n round: self.round,\n\n block_hash: hash,\n\n lock_round: round,\n\n })?;\n\n self.goto_step(Step::Prevote);\n\n return Ok(());\n\n } else if proposal_hash.is_empty() {\n\n return Err(ConsensusError::ProposalErr(\"Empty proposal\".to_string()));\n\n }\n\n\n\n // update PoLC\n\n self.check()?;\n\n if let Some(lock_round) = lock_round {\n\n if let Some(lock) = self.lock.clone() {\n\n debug!(\"Overlord: SMR handle proposal with a lock\");\n", "file_path": "src/smr/state_machine.rs", "rank": 52, "score": 23.578586595794295 }, { "content": " new_config: None,\n\n from_where: FromWhere::PrecommitQC(0),\n\n };\n\n\n\n let prevote_event = SMREvent::PrevoteVote {\n\n height: 0u64,\n\n round: 0u64,\n\n block_hash: Hash::new(),\n\n lock_round: None,\n\n };\n\n\n\n let precommit_event = SMREvent::PrecommitVote {\n\n height: 0u64,\n\n round: 0u64,\n\n block_hash: Hash::new(),\n\n lock_round: None,\n\n };\n\n\n\n tokio::spawn(async move {\n\n loop {\n", "file_path": "src/timer.rs", "rank": 53, "score": 23.346702167841876 }, { "content": " let lock_round = wal_info.lock.clone().map(|lock| lock.lock_round);\n\n self.handle_brake(self.round, lock_round).await?;\n\n }\n\n\n\n self.state_machine.trigger(SMRTrigger {\n\n trigger_type: TriggerType::WalInfo,\n\n source: TriggerSource::State,\n\n hash: Hash::new(),\n\n round: None,\n\n height: self.height,\n\n wal_info: Some(wal_info.into_smr_base()),\n\n })?;\n\n Ok(())\n\n }\n\n\n\n async fn load_wal(&mut self) -> ConsensusResult<Option<WalInfo<T>>> {\n\n let tmp = self\n\n .wal\n\n .load()\n\n .await\n", "file_path": "src/state/process.rs", "rank": 54, "score": 23.30322883361439 }, { "content": " self.goto_step(Step::Precommit);\n\n return Ok(());\n\n }\n\n\n\n // A prevote QC from timer which means prevote timeout can not lead to unlock. Therefore,\n\n // only prevote QCs from state will update the PoLC. If the prevote QC is from timer, goto\n\n // precommit step directly.\n\n self.check()?;\n\n let vote_round = prevote_round;\n\n self.update_polc(prevote_hash, vote_round);\n\n\n\n if vote_round > self.round {\n\n let (lock_round, lock_proposal) = self\n\n .lock\n\n .clone()\n\n .map_or_else(|| (None, None), |lock| (Some(lock.round), Some(lock.hash)));\n\n\n\n self.round = vote_round;\n\n self.throw_event(SMREvent::NewRoundInfo {\n\n height: self.height,\n", "file_path": "src/smr/state_machine.rs", "rank": 55, "score": 23.006899844878262 }, { "content": " });\n\n }\n\n\n\n async fn save_wal(&mut self, step: Step, lock: Option<WalLock<T>>) -> ConsensusResult<()> {\n\n let wal_info = WalInfo {\n\n height: self.height,\n\n round: self.round,\n\n step: step.clone(),\n\n from: self.update_from_where.clone(),\n\n lock,\n\n };\n\n\n\n self.wal\n\n .save(Bytes::from(encode(&wal_info)))\n\n .await\n\n .map_err(|e| {\n\n trace::error(\n\n \"save_wal\".to_string(),\n\n Some(json!({\n\n \"height\": self.height,\n", "file_path": "src/state/process.rs", "rank": 57, "score": 22.693248766190674 }, { "content": "\n\n self.throw_event(SMREvent::PrevoteVote {\n\n height: self.height,\n\n round: self.round,\n\n block_hash: self.block_hash.clone(),\n\n lock_round: round,\n\n })?;\n\n self.goto_step(Step::Prevote);\n\n Ok(())\n\n }\n\n\n\n /// Handle a prevote quorum certificate trigger. Only if self step is prevote, the prevote QC is\n\n /// valid. \n\n /// The prevote round must be some. If the vote round is higher than self lock round, update\n\n /// PoLC. Fianlly throw precommit vote event.\n\n fn handle_prevote(\n\n &mut self,\n\n prevote_hash: Hash,\n\n prevote_round: Option<u64>,\n\n source: TriggerSource,\n", "file_path": "src/smr/state_machine.rs", "rank": 58, "score": 22.224509531626563 }, { "content": " SMREvent::PrevoteVote {\n\n height: 0u64,\n\n round: 0u64,\n\n block_hash: Hash::new(),\n\n lock_round: None,\n\n },\n\n gen_output(TriggerType::PrevoteQC, Some(0), 0),\n\n )\n\n .await;\n\n\n\n // Test precommit step timer.\n\n test_timer_trigger(\n\n SMREvent::PrecommitVote {\n\n height: 0u64,\n\n round: 0u64,\n\n block_hash: Hash::new(),\n\n lock_round: None,\n\n },\n\n gen_output(TriggerType::PrecommitQC, Some(0), 0),\n\n )\n", "file_path": "src/timer.rs", "rank": 59, "score": 22.128429631619664 }, { "content": " source: TriggerSource::State,\n\n hash: hash.clone(),\n\n round: lock_round,\n\n height: self.height,\n\n wal_info: None,\n\n })?;\n\n\n\n self.check_block(ctx, hash, block).await;\n\n self.vote_process(VoteType::Prevote).await?;\n\n Ok(())\n\n }\n\n\n\n /// This function only handle signed proposals which height and round are equal to current.\n\n /// Others will be ignored or stored in the proposal collector.\n\n async fn handle_signed_proposal(\n\n &mut self,\n\n ctx: Context,\n\n signed_proposal: SignedProposal<T>,\n\n ) -> ConsensusResult<()> {\n\n let height = signed_proposal.proposal.height;\n", "file_path": "src/state/process.rs", "rank": 60, "score": 21.9059531879617 }, { "content": " source: TriggerSource::State,\n\n hash: Hash::new(),\n\n round: Some(round + 1),\n\n height: self.height,\n\n wal_info: None,\n\n })?;\n\n }\n\n Ok(())\n\n }\n\n\n\n async fn check_block(&mut self, ctx: Context, hash: Hash, block: T) {\n\n let height = self.height;\n\n let round = self.round;\n\n let function = Arc::clone(&self.function);\n\n let resp_tx = self.resp_tx.clone();\n\n\n\n trace::custom(\n\n \"check_block\".to_string(),\n\n Some(json!({\n\n \"height\": height,\n", "file_path": "src/state/process.rs", "rank": 61, "score": 21.718302276105206 }, { "content": "\n\n #[tokio::test(threaded_scheduler)]\n\n async fn test_correctness() {\n\n // Test propose step timer.\n\n test_timer_trigger(\n\n SMREvent::NewRoundInfo {\n\n height: 0,\n\n round: 0,\n\n lock_round: None,\n\n lock_proposal: None,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(0),\n\n },\n\n gen_output(TriggerType::Proposal, None, 0),\n\n )\n\n .await;\n\n\n\n // Test prevote step timer.\n\n test_timer_trigger(\n", "file_path": "src/timer.rs", "rank": 62, "score": 21.655620352656136 }, { "content": " round: 0u64,\n\n block_hash: hash.clone(),\n\n lock_round: Some(0),\n\n },\n\n None,\n\n Some((0, hash)),\n\n ));\n\n\n\n // Test case 02:\n\n // self proposal is not empty and not lock, prevote is nil.\n\n // The output should be precommit vote to the prevote hash.\n\n let hash = Hash::new();\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Prevote, Hash::new(), None),\n\n SMRTrigger::new(hash.clone(), TriggerType::PrevoteQC, Some(0), 0),\n\n SMREvent::PrecommitVote {\n\n height: 0u64,\n\n round: 0u64,\n\n block_hash: hash,\n\n lock_round: None,\n", "file_path": "src/smr/tests/prevote_test.rs", "rank": 63, "score": 21.63705808988466 }, { "content": " round: self.round + 1,\n\n lock_round,\n\n lock_proposal,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrevoteQC(vote_round),\n\n })?;\n\n self.goto_next_round();\n\n }\n\n\n\n // throw precommit vote event\n\n let round = if let Some(lock) = &self.lock {\n\n Some(lock.round)\n\n } else {\n\n None\n\n };\n\n self.throw_event(SMREvent::PrecommitVote {\n\n height: self.height,\n\n round: self.round,\n\n block_hash: self.block_hash.clone(),\n", "file_path": "src/smr/state_machine.rs", "rank": 65, "score": 21.371806179570434 }, { "content": "\n\n if height != self.height || round <= self.round {\n\n return Ok(());\n\n }\n\n\n\n info!(\"Overlord: SMR continue round {}\", round);\n\n\n\n self.round = round - 1;\n\n let (lock_round, lock_proposal) = self\n\n .lock\n\n .clone()\n\n .map_or_else(|| (None, None), |lock| (Some(lock.round), Some(lock.hash)));\n\n self.throw_event(SMREvent::NewRoundInfo {\n\n height: self.height,\n\n round: self.round + 1,\n\n lock_round,\n\n lock_proposal,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::ChokeQC(round - 1),\n", "file_path": "src/smr/state_machine.rs", "rank": 66, "score": 21.356376026517232 }, { "content": " .await;\n\n }\n\n\n\n #[tokio::test(threaded_scheduler)]\n\n async fn test_order() {\n\n let (trigger_tx, mut trigger_rx) = unbounded();\n\n let (event_tx, event_rx) = unbounded();\n\n let mut timer = Timer::new(\n\n Event::new(event_rx),\n\n SMRHandler::new(trigger_tx),\n\n 3000,\n\n None,\n\n );\n\n\n\n let new_round_event = SMREvent::NewRoundInfo {\n\n height: 0,\n\n round: 0,\n\n lock_round: None,\n\n lock_proposal: None,\n\n new_interval: None,\n", "file_path": "src/timer.rs", "rank": 68, "score": 21.34127635894221 }, { "content": " new_interval: status.new_interval,\n\n new_config: status.new_config,\n\n from_where: FromWhere::PrecommitQC(u64::max_value()),\n\n })?;\n\n Ok(())\n\n }\n\n\n\n /// Handle a proposal trigger. Only if self step is propose, the proposal is valid.\n\n /// If proposal hash is empty, prevote to an empty hash. If the lock round is some, and the lock\n\n /// round is higher than self lock round, remove PoLC. Fianlly throw prevote vote event. It is\n\n /// impossible that the proposal hash is empty with the lock round is some.\n\n fn handle_proposal(\n\n &mut self,\n\n proposal_hash: Hash,\n\n lock_round: Option<u64>,\n\n source: TriggerSource,\n\n height: u64,\n\n ) -> ConsensusResult<()> {\n\n if self.height != height {\n\n return Ok(());\n", "file_path": "src/smr/state_machine.rs", "rank": 69, "score": 21.331262981239096 }, { "content": " Some((0, hash)),\n\n ));\n\n\n\n // Test case 08:\n\n // self proposal is not empty and without lock, precommit is nil.\n\n // This is an incorrect situation, the process can not pass self check.\n\n let hash = gen_hash();\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Precommit, hash.clone(), None),\n\n SMRTrigger::new(Hash::new(), TriggerType::PrecommitQC, Some(0), 0),\n\n SMREvent::NewRoundInfo {\n\n height: 0u64,\n\n round: 1u64,\n\n lock_round: None,\n\n lock_proposal: None,\n\n new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(0),\n\n },\n\n None,\n", "file_path": "src/smr/tests/precommit_test.rs", "rank": 70, "score": 21.295362692804012 }, { "content": "pub struct StateMachine {\n\n height: u64,\n\n round: u64,\n\n step: Step,\n\n block_hash: Hash,\n\n lock: Option<Lock>,\n\n\n\n event: (UnboundedSender<SMREvent>, UnboundedSender<SMREvent>),\n\n trigger: UnboundedReceiver<SMRTrigger>,\n\n}\n\n\n\nimpl Stream for StateMachine {\n\n type Item = ConsensusResult<()>;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {\n\n match Stream::poll_next(Pin::new(&mut self.trigger), cx) {\n\n Poll::Pending => Poll::Pending,\n\n\n\n Poll::Ready(msg) => {\n\n if msg.is_none() {\n", "file_path": "src/smr/state_machine.rs", "rank": 71, "score": 21.212349776439464 }, { "content": " }\n\n\n\n if self.step > Step::Propose {\n\n return Ok(());\n\n }\n\n\n\n info!(\n\n \"Overlord: SMR triggered by a proposal hash {:?}, from {:?}, height {}, round {}\",\n\n hex::encode(proposal_hash.clone()),\n\n source,\n\n self.height,\n\n self.round\n\n );\n\n\n\n // If the proposal trigger is from timer, goto prevote step directly.\n\n if source == TriggerSource::Timer {\n\n // This event is for timer to set a prevote timer.\n\n let (round, hash) = if let Some(lock) = &self.lock {\n\n (Some(lock.round), lock.hash.clone())\n\n } else {\n", "file_path": "src/smr/state_machine.rs", "rank": 73, "score": 21.152974419299884 }, { "content": " None,\n\n Some((0, hash)),\n\n ));\n\n\n\n // Test case 04:\n\n // self proposal is not empty and with a lock, precommit is nil.\n\n // The output should be new round info with a lock.\n\n let hash = gen_hash();\n\n let lock = Lock {\n\n round: 0,\n\n hash: hash.clone(),\n\n };\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Precommit, hash.clone(), Some(lock)),\n\n SMRTrigger::new(Hash::new(), TriggerType::PrecommitQC, Some(0), 0),\n\n SMREvent::NewRoundInfo {\n\n height: 0u64,\n\n round: 1u64,\n\n lock_round: Some(0),\n\n lock_proposal: Some(hash.clone()),\n", "file_path": "src/smr/tests/precommit_test.rs", "rank": 74, "score": 21.109576463698268 }, { "content": " })?;\n\n self.goto_next_round();\n\n Ok(())\n\n }\n\n\n\n fn handle_wal(&mut self, info: SMRBase) -> ConsensusResult<()> {\n\n self.height = info.height;\n\n self.round = info.round;\n\n self.step = info.step;\n\n if let Some(polc) = &info.polc {\n\n self.set_proposal(polc.hash.clone());\n\n }\n\n self.lock = info.polc;\n\n self.set_timer_after_wal()\n\n }\n\n\n\n /// Handle a new height trigger. If new height is higher than current, goto new height and\n\n /// throw a new round info event.\n\n fn handle_new_height(\n\n &mut self,\n", "file_path": "src/smr/state_machine.rs", "rank": 75, "score": 21.092449248398847 }, { "content": " height: 0u64,\n\n round: 1u64,\n\n block_hash: hash.clone(),\n\n lock_round: Some(1),\n\n },\n\n None,\n\n Some((1, hash)),\n\n ));\n\n\n\n // Test case 05:\n\n // self proposal is not empty and no lock, prevote is nil.\n\n // The output should be precommit vote to the prevote hash which is nil.\n\n let hash = gen_hash();\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(1, Step::Prevote, hash.clone(), None),\n\n SMRTrigger::new(Hash::new(), TriggerType::PrevoteQC, Some(1), 0),\n\n SMREvent::PrecommitVote {\n\n height: 0u64,\n\n round: 1u64,\n\n block_hash: Hash::new(),\n", "file_path": "src/smr/tests/prevote_test.rs", "rank": 78, "score": 21.044054253426832 }, { "content": " None,\n\n ));\n\n\n\n // Test case 08:\n\n // self proposal is not empty but with a lock, prevote is not nil.\n\n // The output should be prevote vote to the prevote hash and lock it.\n\n let hash = gen_hash();\n\n let lock_hash = gen_hash();\n\n let lock = Lock::new(0, lock_hash.clone());\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(1, Step::Prevote, lock_hash.clone(), Some(lock)),\n\n SMRTrigger::new(hash.clone(), TriggerType::PrevoteQC, Some(1), 0),\n\n SMREvent::PrecommitVote {\n\n height: 0u64,\n\n round: 1u64,\n\n block_hash: hash.clone(),\n\n lock_round: Some(1),\n\n },\n\n None,\n\n Some((1, hash)),\n", "file_path": "src/smr/tests/prevote_test.rs", "rank": 80, "score": 20.969096120014623 }, { "content": " lock_round: None,\n\n },\n\n Some(ConsensusError::PrevoteErr(\"Empty qc\".to_string())),\n\n None,\n\n ));\n\n\n\n // Test case 06:\n\n // self proposal is not empty and no lock, prevote is not nil.\n\n // The output should be precommit vote to the prevote hash and lock on it.\n\n let hash = gen_hash();\n\n let vote_hash = gen_hash();\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(1, Step::Prevote, hash.clone(), None),\n\n SMRTrigger::new(vote_hash.clone(), TriggerType::PrevoteQC, Some(1), 0),\n\n SMREvent::PrecommitVote {\n\n height: 0u64,\n\n round: 1u64,\n\n block_hash: vote_hash.clone(),\n\n lock_round: Some(1),\n\n },\n", "file_path": "src/smr/tests/prevote_test.rs", "rank": 83, "score": 20.933462000395856 }, { "content": " Some(WalLock {\n\n lock_round: polc.lock_round,\n\n lock_votes: polc.lock_votes,\n\n content: tmp,\n\n })\n\n } else {\n\n None\n\n };\n\n\n\n let height = random::<u64>();\n\n let round = random::<u64>();\n\n let step = Step::Precommit;\n\n let from = UpdateFrom::ChokeQC(AggregatedChoke::new());\n\n WalInfo {\n\n height,\n\n round,\n\n step,\n\n lock,\n\n from,\n\n }\n", "file_path": "src/codec.rs", "rank": 84, "score": 20.85787770255214 }, { "content": " None,\n\n ));\n\n\n\n // // Test case 10:\n\n // // the precommit round is not equal to self round.\n\n // // This is an incorrect situation, the process will return round diff err.\n\n // let hash = gen_hash();\n\n // let lock = Lock::new(0, hash.clone());\n\n // test_cases.push(StateMachineTestCase::new(\n\n // InnerState::new(1, Step::Precommit, hash.clone(), Some(lock)),\n\n // SMRTrigger::new(hash.clone(), TriggerType::PrecommitQC, Some(0), 0),\n\n // SMREvent::Commit(hash.clone()),\n\n // Some(ConsensusError::RoundDiff { local: 1, vote: 0 }),\n\n // Some((0, hash)),\n\n // ));\n\n\n\n for case in test_cases.into_iter() {\n\n println!(\"Precommit test {}/9\", index);\n\n index += 1;\n\n trigger_test(\n", "file_path": "src/smr/tests/precommit_test.rs", "rank": 87, "score": 20.694904511988188 }, { "content": "\n\n if source == TriggerSource::Timer {\n\n if prevote_round != self.round {\n\n return Ok(());\n\n }\n\n\n\n // This event is for timer to set a precommit timer.\n\n let round = if let Some(lock) = &self.lock {\n\n Some(lock.round)\n\n } else {\n\n self.block_hash = Hash::new();\n\n None\n\n };\n\n\n\n self.throw_event(SMREvent::PrecommitVote {\n\n height: self.height,\n\n round: self.round,\n\n block_hash: Hash::new(),\n\n lock_round: round,\n\n })?;\n", "file_path": "src/smr/state_machine.rs", "rank": 90, "score": 20.624511247241248 }, { "content": " None,\n\n Some((1, vote_hash)),\n\n ));\n\n\n\n // Test case 07:\n\n // self proposal is not empty but with a lock, prevote is nil.\n\n // The output should be prevote vote to the nil hash.\n\n let hash = Hash::new();\n\n let lock_hash = gen_hash();\n\n let lock = Lock::new(0, lock_hash.clone());\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(1, Step::Prevote, lock_hash.clone(), Some(lock)),\n\n SMRTrigger::new(hash.clone(), TriggerType::PrevoteQC, Some(1), 0),\n\n SMREvent::PrecommitVote {\n\n height: 0u64,\n\n round: 1u64,\n\n block_hash: hash.clone(),\n\n lock_round: None,\n\n },\n\n Some(ConsensusError::PrevoteErr(\"Empty qc\".to_string())),\n", "file_path": "src/smr/tests/prevote_test.rs", "rank": 94, "score": 20.346354086986768 }, { "content": " new_interval: None,\n\n new_config: None,\n\n from_where: FromWhere::PrecommitQC(0),\n\n },\n\n Some(ConsensusError::PrecommitErr(\"Empty qc\".to_string())),\n\n Some((0, hash)),\n\n ));\n\n\n\n // Test case 05:\n\n // self proposal is not empty and with a lock, precommit is nil.\n\n // This is an incorrect situation, the process can not pass self check.\n\n let hash = gen_hash();\n\n let lock = Lock::new(0, hash.clone());\n\n test_cases.push(StateMachineTestCase::new(\n\n InnerState::new(0, Step::Precommit, Hash::new(), Some(lock)),\n\n SMRTrigger::new(Hash::new(), TriggerType::PrecommitQC, Some(0), 0),\n\n SMREvent::NewRoundInfo {\n\n height: 0u64,\n\n round: 1u64,\n\n lock_round: Some(0),\n", "file_path": "src/smr/tests/precommit_test.rs", "rank": 96, "score": 20.213971779311063 }, { "content": " // certificate form proposal collector and vote collector. Some necessary checks should be\n\n // done by doing this. These things consititute a Proposal. Then sign it and broadcast it to\n\n // other nodes.\n\n trace::start_step(\"become_leader\".to_string(), self.round, self.height);\n\n self.is_leader = true;\n\n let ctx = Context::new();\n\n let (block, hash, polc) = if lock_round.is_none() {\n\n let (new_block, new_hash) = self\n\n .function\n\n .get_block(ctx.clone(), self.height)\n\n .await\n\n .map_err(|err| ConsensusError::Other(format!(\"get block error {:?}\", err)))?;\n\n (new_block, new_hash, None)\n\n } else {\n\n let round = lock_round.clone().unwrap();\n\n let hash = lock_proposal.unwrap();\n\n let block = self.hash_with_block.get(&hash).ok_or_else(|| {\n\n ConsensusError::ProposalErr(format!(\"Lose whole block that hash is {:?}\", hash))\n\n })?;\n\n\n", "file_path": "src/state/process.rs", "rank": 97, "score": 20.0968048510761 }, { "content": " SMRTrigger {\n\n trigger_type: t_type,\n\n source: TriggerSource::State,\n\n hash: proposal_hash,\n\n round: lock_round,\n\n height,\n\n wal_info: None,\n\n }\n\n }\n\n}\n\n\n\nimpl Lock {\n\n fn new(round: u64, hash: Hash) -> Self {\n\n Lock { round, hash }\n\n }\n\n}\n\n\n", "file_path": "src/smr/tests/mod.rs", "rank": 98, "score": 20.08301539080805 }, { "content": " }\n\n\n\n /// Update the PoLC. Firstly set self proposal as the given hash. Secondly update the PoLC. If\n\n /// the hash is empty, remove it. Otherwise, set lock round and hash as the given round and\n\n /// hash.\n\n fn update_polc(&mut self, hash: Hash, round: u64) {\n\n debug!(\"Overlord: SMR update PoLC at round {}\", round);\n\n self.set_proposal(hash.clone());\n\n\n\n if hash.is_empty() {\n\n self.remove_polc();\n\n } else {\n\n self.lock = Some(Lock { round, hash });\n\n }\n\n }\n\n\n\n #[inline]\n\n fn remove_polc(&mut self) {\n\n self.lock = None;\n\n }\n", "file_path": "src/smr/state_machine.rs", "rank": 99, "score": 20.02034003256514 } ]
Rust
tracing-journald/tests/journal.rs
jswrenn/tracing
388fff8371fef1ec0f75bc495fe1ad4cfa045f7b
#![cfg(target_os = "linux")] use std::collections::HashMap; use std::process::Command; use std::time::Duration; use serde::Deserialize; use tracing::{debug, error, info, info_span, warn}; use tracing_journald::Subscriber; use tracing_subscriber::subscribe::CollectExt; use tracing_subscriber::Registry; fn journalctl_version() -> std::io::Result<String> { let output = Command::new("journalctl").arg("--version").output()?; Ok(String::from_utf8_lossy(&output.stdout).to_string()) } fn with_journald(f: impl FnOnce()) { with_journald_subscriber(Subscriber::new().unwrap().with_field_prefix(None), f) } fn with_journald_subscriber(subscriber: Subscriber, f: impl FnOnce()) { match journalctl_version() { Ok(_) => { let sub = Registry::default().with(subscriber); tracing::collect::with_default(sub, f); } Err(error) => eprintln!( "SKIPPING TEST: journalctl --version failed with error: {}", error ), } } #[derive(Debug, PartialEq, Deserialize)] #[serde(untagged)] enum Field { Text(String), Array(Vec<String>), Binary(Vec<u8>), } impl Field { fn as_array(&self) -> Option<&[String]> { match self { Field::Text(_) => None, Field::Binary(_) => None, Field::Array(v) => Some(v), } } fn as_text(&self) -> Option<&str> { match self { Field::Text(v) => Some(v.as_str()), Field::Binary(_) => None, Field::Array(_) => None, } } } impl PartialEq<&str> for Field { fn eq(&self, other: &&str) -> bool { match self { Field::Text(s) => s == other, Field::Binary(_) => false, Field::Array(_) => false, } } } impl PartialEq<[u8]> for Field { fn eq(&self, other: &[u8]) -> bool { match self { Field::Text(s) => s.as_bytes() == other, Field::Binary(data) => data == other, Field::Array(_) => false, } } } impl PartialEq<Vec<&str>> for Field { fn eq(&self, other: &Vec<&str>) -> bool { match self { Field::Text(_) => false, Field::Binary(_) => false, Field::Array(data) => data == other, } } } fn retry<T, E>(f: impl Fn() -> Result<T, E>) -> Result<T, E> { let attempts = 30; let interval = Duration::from_millis(100); for attempt in (0..attempts).rev() { match f() { Ok(result) => return Ok(result), Err(e) if attempt == 0 => return Err(e), Err(_) => std::thread::sleep(interval), } } unreachable!() } fn read_from_journal(test_name: &str) -> Vec<HashMap<String, Field>> { let stdout = String::from_utf8( Command::new("journalctl") .args(&["--user", "--output=json", "--all"]) .arg(format!("_PID={}", std::process::id())) .arg(format!("TEST_NAME={}", test_name)) .output() .unwrap() .stdout, ) .unwrap(); stdout .lines() .map(|l| serde_json::from_str(l).unwrap()) .collect() } fn retry_read_one_line_from_journal(testname: &str) -> HashMap<String, Field> { retry(|| { let mut messages = read_from_journal(testname); if messages.len() == 1 { Ok(messages.pop().unwrap()) } else { Err(format!( "one messages expected, got {} messages", messages.len() )) } }) .unwrap() } #[test] fn simple_message() { with_journald(|| { info!(test.name = "simple_message", "Hello World"); let message = retry_read_one_line_from_journal("simple_message"); assert_eq!(message["MESSAGE"], "Hello World"); assert_eq!(message["PRIORITY"], "5"); }); } #[test] fn multiline_message() { with_journald(|| { warn!(test.name = "multiline_message", "Hello\nMultiline\nWorld"); let message = retry_read_one_line_from_journal("multiline_message"); assert_eq!(message["MESSAGE"], "Hello\nMultiline\nWorld"); assert_eq!(message["PRIORITY"], "4"); }); } #[test] fn multiline_message_trailing_newline() { with_journald(|| { error!( test.name = "multiline_message_trailing_newline", "A trailing newline\n" ); let message = retry_read_one_line_from_journal("multiline_message_trailing_newline"); assert_eq!(message["MESSAGE"], "A trailing newline\n"); assert_eq!(message["PRIORITY"], "3"); }); } #[test] fn internal_null_byte() { with_journald(|| { debug!(test.name = "internal_null_byte", "An internal\x00byte"); let message = retry_read_one_line_from_journal("internal_null_byte"); assert_eq!(message["MESSAGE"], b"An internal\x00byte"[..]); assert_eq!(message["PRIORITY"], "6"); }); } #[test] fn large_message() { let large_string = "b".repeat(512_000); with_journald(|| { debug!(test.name = "large_message", "Message: {}", large_string); let message = retry_read_one_line_from_journal("large_message"); assert_eq!( message["MESSAGE"], format!("Message: {}", large_string).as_str() ); assert_eq!(message["PRIORITY"], "6"); }); } #[test] fn simple_metadata() { let sub = Subscriber::new() .unwrap() .with_field_prefix(None) .with_syslog_identifier("test_ident".to_string()); with_journald_subscriber(sub, || { info!(test.name = "simple_metadata", "Hello World"); let message = retry_read_one_line_from_journal("simple_metadata"); assert_eq!(message["MESSAGE"], "Hello World"); assert_eq!(message["PRIORITY"], "5"); assert_eq!(message["TARGET"], "journal"); assert_eq!(message["SYSLOG_IDENTIFIER"], "test_ident"); assert!(message["CODE_FILE"].as_text().is_some()); assert!(message["CODE_LINE"].as_text().is_some()); }); } #[test] fn span_metadata() { with_journald(|| { let s1 = info_span!("span1", span_field1 = "foo1"); let _g1 = s1.enter(); info!(test.name = "span_metadata", "Hello World"); let message = retry_read_one_line_from_journal("span_metadata"); assert_eq!(message["MESSAGE"], "Hello World"); assert_eq!(message["PRIORITY"], "5"); assert_eq!(message["TARGET"], "journal"); assert_eq!(message["SPAN_FIELD1"].as_text(), Some("foo1")); assert_eq!(message["SPAN_NAME"].as_text(), Some("span1")); assert!(message["CODE_FILE"].as_text().is_some()); assert!(message["CODE_LINE"].as_text().is_some()); assert!(message["SPAN_CODE_FILE"].as_text().is_some()); assert!(message["SPAN_CODE_LINE"].as_text().is_some()); }); } #[test] fn multiple_spans_metadata() { with_journald(|| { let s1 = info_span!("span1", span_field1 = "foo1"); let _g1 = s1.enter(); let s2 = info_span!("span2", span_field1 = "foo2"); let _g2 = s2.enter(); info!(test.name = "multiple_spans_metadata", "Hello World"); let message = retry_read_one_line_from_journal("multiple_spans_metadata"); assert_eq!(message["MESSAGE"], "Hello World"); assert_eq!(message["PRIORITY"], "5"); assert_eq!(message["TARGET"], "journal"); assert_eq!(message["SPAN_FIELD1"], vec!["foo1", "foo2"]); assert_eq!(message["SPAN_NAME"], vec!["span1", "span2"]); assert!(message["CODE_FILE"].as_text().is_some()); assert!(message["CODE_LINE"].as_text().is_some()); assert!(message.contains_key("SPAN_CODE_FILE")); assert_eq!(message["SPAN_CODE_LINE"].as_array().unwrap().len(), 2); }); } #[test] fn spans_field_collision() { with_journald(|| { let s1 = info_span!("span1", span_field = "foo1"); let _g1 = s1.enter(); let s2 = info_span!("span2", span_field = "foo2"); let _g2 = s2.enter(); info!( test.name = "spans_field_collision", span_field = "foo3", "Hello World" ); let message = retry_read_one_line_from_journal("spans_field_collision"); assert_eq!(message["MESSAGE"], "Hello World"); assert_eq!(message["SPAN_NAME"], vec!["span1", "span2"]); assert_eq!(message["SPAN_FIELD"], vec!["foo1", "foo2", "foo3"]); }); }
#![cfg(target_os = "linux")] use std::collections::HashMap; use std::process::Command; use std::time::Duration; use serde::Deserialize; use tracing::{debug, error, info, info_span, warn}; use tracing_journald::Subscriber; use tracing_subscriber::subscribe::CollectExt; use tracing_subscriber::Registry; fn journalctl_version() -> std::io::Result<String> { let output = Command::new("journalctl").arg("--version").output()?; Ok(String::from_utf8_lossy(&output.stdout).to_string()) } fn with_journald(f: impl FnOnce()) { with_journald_subscriber(Subscriber::new().unwrap().with_field_prefix(None), f) } fn with_journald_subscriber(subscriber: Subscriber, f: impl FnOnce()) { match journalctl_version() { Ok(_) => { let sub = Registry::default().with(subscriber); tracing::collect::with_default(sub, f); } Err(error) => eprintln!( "SKIPPING TEST: journalctl --version failed with error: {}", error ), } } #[derive(Debug, PartialEq, Deserialize)] #[serde(untagged)] enum Field { Text(String), Array(Vec<String>), Binary(Vec<u8>), } impl Field { fn as_array(&self) -> Option<&[String]> { match self { Field::Text(_) => None, Field::Binary(_) => None, Field::Array(v) => Some(v), } } fn as_text(&self) -> Option<&str> { match self { Field::Text(v) => Some(v.as_str()), Field::Binary(_) => None, Field::Array(_) => None,
) { with_journald(|| { error!( test.name = "multiline_message_trailing_newline", "A trailing newline\n" ); let message = retry_read_one_line_from_journal("multiline_message_trailing_newline"); assert_eq!(message["MESSAGE"], "A trailing newline\n"); assert_eq!(message["PRIORITY"], "3"); }); } #[test] fn internal_null_byte() { with_journald(|| { debug!(test.name = "internal_null_byte", "An internal\x00byte"); let message = retry_read_one_line_from_journal("internal_null_byte"); assert_eq!(message["MESSAGE"], b"An internal\x00byte"[..]); assert_eq!(message["PRIORITY"], "6"); }); } #[test] fn large_message() { let large_string = "b".repeat(512_000); with_journald(|| { debug!(test.name = "large_message", "Message: {}", large_string); let message = retry_read_one_line_from_journal("large_message"); assert_eq!( message["MESSAGE"], format!("Message: {}", large_string).as_str() ); assert_eq!(message["PRIORITY"], "6"); }); } #[test] fn simple_metadata() { let sub = Subscriber::new() .unwrap() .with_field_prefix(None) .with_syslog_identifier("test_ident".to_string()); with_journald_subscriber(sub, || { info!(test.name = "simple_metadata", "Hello World"); let message = retry_read_one_line_from_journal("simple_metadata"); assert_eq!(message["MESSAGE"], "Hello World"); assert_eq!(message["PRIORITY"], "5"); assert_eq!(message["TARGET"], "journal"); assert_eq!(message["SYSLOG_IDENTIFIER"], "test_ident"); assert!(message["CODE_FILE"].as_text().is_some()); assert!(message["CODE_LINE"].as_text().is_some()); }); } #[test] fn span_metadata() { with_journald(|| { let s1 = info_span!("span1", span_field1 = "foo1"); let _g1 = s1.enter(); info!(test.name = "span_metadata", "Hello World"); let message = retry_read_one_line_from_journal("span_metadata"); assert_eq!(message["MESSAGE"], "Hello World"); assert_eq!(message["PRIORITY"], "5"); assert_eq!(message["TARGET"], "journal"); assert_eq!(message["SPAN_FIELD1"].as_text(), Some("foo1")); assert_eq!(message["SPAN_NAME"].as_text(), Some("span1")); assert!(message["CODE_FILE"].as_text().is_some()); assert!(message["CODE_LINE"].as_text().is_some()); assert!(message["SPAN_CODE_FILE"].as_text().is_some()); assert!(message["SPAN_CODE_LINE"].as_text().is_some()); }); } #[test] fn multiple_spans_metadata() { with_journald(|| { let s1 = info_span!("span1", span_field1 = "foo1"); let _g1 = s1.enter(); let s2 = info_span!("span2", span_field1 = "foo2"); let _g2 = s2.enter(); info!(test.name = "multiple_spans_metadata", "Hello World"); let message = retry_read_one_line_from_journal("multiple_spans_metadata"); assert_eq!(message["MESSAGE"], "Hello World"); assert_eq!(message["PRIORITY"], "5"); assert_eq!(message["TARGET"], "journal"); assert_eq!(message["SPAN_FIELD1"], vec!["foo1", "foo2"]); assert_eq!(message["SPAN_NAME"], vec!["span1", "span2"]); assert!(message["CODE_FILE"].as_text().is_some()); assert!(message["CODE_LINE"].as_text().is_some()); assert!(message.contains_key("SPAN_CODE_FILE")); assert_eq!(message["SPAN_CODE_LINE"].as_array().unwrap().len(), 2); }); } #[test] fn spans_field_collision() { with_journald(|| { let s1 = info_span!("span1", span_field = "foo1"); let _g1 = s1.enter(); let s2 = info_span!("span2", span_field = "foo2"); let _g2 = s2.enter(); info!( test.name = "spans_field_collision", span_field = "foo3", "Hello World" ); let message = retry_read_one_line_from_journal("spans_field_collision"); assert_eq!(message["MESSAGE"], "Hello World"); assert_eq!(message["SPAN_NAME"], vec!["span1", "span2"]); assert_eq!(message["SPAN_FIELD"], vec!["foo1", "foo2", "foo3"]); }); }
} } } impl PartialEq<&str> for Field { fn eq(&self, other: &&str) -> bool { match self { Field::Text(s) => s == other, Field::Binary(_) => false, Field::Array(_) => false, } } } impl PartialEq<[u8]> for Field { fn eq(&self, other: &[u8]) -> bool { match self { Field::Text(s) => s.as_bytes() == other, Field::Binary(data) => data == other, Field::Array(_) => false, } } } impl PartialEq<Vec<&str>> for Field { fn eq(&self, other: &Vec<&str>) -> bool { match self { Field::Text(_) => false, Field::Binary(_) => false, Field::Array(data) => data == other, } } } fn retry<T, E>(f: impl Fn() -> Result<T, E>) -> Result<T, E> { let attempts = 30; let interval = Duration::from_millis(100); for attempt in (0..attempts).rev() { match f() { Ok(result) => return Ok(result), Err(e) if attempt == 0 => return Err(e), Err(_) => std::thread::sleep(interval), } } unreachable!() } fn read_from_journal(test_name: &str) -> Vec<HashMap<String, Field>> { let stdout = String::from_utf8( Command::new("journalctl") .args(&["--user", "--output=json", "--all"]) .arg(format!("_PID={}", std::process::id())) .arg(format!("TEST_NAME={}", test_name)) .output() .unwrap() .stdout, ) .unwrap(); stdout .lines() .map(|l| serde_json::from_str(l).unwrap()) .collect() } fn retry_read_one_line_from_journal(testname: &str) -> HashMap<String, Field> { retry(|| { let mut messages = read_from_journal(testname); if messages.len() == 1 { Ok(messages.pop().unwrap()) } else { Err(format!( "one messages expected, got {} messages", messages.len() )) } }) .unwrap() } #[test] fn simple_message() { with_journald(|| { info!(test.name = "simple_message", "Hello World"); let message = retry_read_one_line_from_journal("simple_message"); assert_eq!(message["MESSAGE"], "Hello World"); assert_eq!(message["PRIORITY"], "5"); }); } #[test] fn multiline_message() { with_journald(|| { warn!(test.name = "multiline_message", "Hello\nMultiline\nWorld"); let message = retry_read_one_line_from_journal("multiline_message"); assert_eq!(message["MESSAGE"], "Hello\nMultiline\nWorld"); assert_eq!(message["PRIORITY"], "4"); }); } #[test] fn multiline_message_trailing_newline(
random
[ { "content": "#[allow(clippy::manual_async_fn)]\n\n#[instrument(level = \"debug\")]\n\nfn instrumented_manual_async() -> impl Future<Output = ()> {\n\n async move {}\n\n}\n\n\n", "file_path": "tracing/test_static_max_level_features/tests/test.rs", "rank": 1, "score": 282858.3962827644 }, { "content": "#[instrument(ret, err)]\n\n#[deny(unused_braces)]\n\n#[allow(clippy::manual_async_fn)]\n\nfn repro_1831_2() -> impl Future<Output = Result<(), Infallible>> {\n\n async { Ok(()) }\n\n}\n\n\n", "file_path": "tracing-attributes/tests/async_fn.rs", "rank": 2, "score": 276361.6522170686 }, { "content": "/// Returns a [`FormatFields`] implementation that formats fields using the\n\n/// provided function or closure.\n\n///\n\npub fn debug_fn<F>(f: F) -> FieldFn<F>\n\nwhere\n\n F: Fn(&mut Writer<'_>, &Field, &dyn fmt::Debug) -> fmt::Result + Clone,\n\n{\n\n FieldFn(f)\n\n}\n\n\n\n/// A writer to which formatted representations of spans and events are written.\n\n///\n\n/// This type is provided as input to the [`FormatEvent::format_event`] and\n\n/// [`FormatFields::format_fields`] methods, which will write formatted\n\n/// representations of [`Event`]s and [fields] to the `Writer`.\n\n///\n\n/// This type implements the [`std::fmt::Write`] trait, allowing it to be used\n\n/// with any function that takes an instance of [`std::fmt::Write`].\n\n/// Additionally, it can be used with the standard library's [`std::write!`] and\n\n/// [`std::writeln!`] macros.\n\n///\n\n/// Additionally, a `Writer` may expose additional `tracing`-specific\n\n/// information to the formatter implementation.\n", "file_path": "tracing-subscriber/src/fmt/format/mod.rs", "rank": 3, "score": 276314.5809077788 }, { "content": "fn run_test<F: FnOnce() -> T, T>(span: NewSpan, fun: F) {\n\n let (collector, handle) = collector::mock()\n\n .new_span(span)\n\n .enter(span::mock())\n\n .exit(span::mock())\n\n .done()\n\n .run_with_handle();\n\n\n\n with_default(collector, fun);\n\n handle.assert_finished();\n\n}\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 4, "score": 261233.90834818096 }, { "content": "fn assert_collector(_s: impl Collect) {}\n\n\n", "file_path": "tracing-subscriber/src/subscribe/tests.rs", "rank": 5, "score": 255608.96258657443 }, { "content": "#[cfg(feature = \"tokio-test\")]\n\npub fn block_on_future<F>(future: F) -> F::Output\n\nwhere\n\n F: std::future::Future,\n\n{\n\n use tokio_test::task;\n\n\n\n let mut task = task::spawn(future);\n\n loop {\n\n if let Poll::Ready(v) = task.poll() {\n\n break v;\n\n }\n\n }\n\n}\n", "file_path": "tracing-mock/src/lib.rs", "rank": 7, "score": 248950.72178959794 }, { "content": "#[test]\n\nfn self_expr_field() {\n\n let span = span::mock().with_field(mock(\"my_field\").with_value(&\"hello world\").only());\n\n run_test(span, || {\n\n let has_field = HasField {\n\n my_field: \"hello world\",\n\n };\n\n has_field.self_expr_field();\n\n });\n\n}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 8, "score": 238877.1890234232 }, { "content": "/// Constructs a [`FilterFn`], from a function or closure that returns `true` if\n\n/// a span or event should be enabled, based on its [`Metadata`].\n\n///\n\n/// The returned [`FilterFn`] can be used for both [per-subscriber filtering][plf]\n\n/// (using its [`Filter`] implementation) and [global filtering][global] (using\n\n/// its [`Subscribe`] implementation).\n\n///\n\n/// See the [documentation on filtering with subscribers][filtering] for details.\n\n///\n\n/// This is equivalent to calling [`FilterFn::new`].\n\n///\n\n/// [`Metadata`]: tracing_core::Metadata\n\n/// [`Filter`]: crate::subscribe::Filter\n\n/// [`Subscribe`]: crate::subscribe::Subscribe\n\n/// [plf]: crate::subscribe#per-subscriber-filtering\n\n/// [global]: crate::subscribe#global-filtering\n\n/// [filtering]: crate::subscribe#filtering-with-subscribers\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tracing_subscriber::{\n\n/// subscribe::{Subscribe, CollectExt},\n\n/// filter,\n\n/// util::SubscriberInitExt,\n\n/// };\n\n///\n\n/// let my_filter = filter::filter_fn(|metadata| {\n\n/// // Only enable spans or events with the target \"interesting_things\"\n\n/// metadata.target() == \"interesting_things\"\n\n/// });\n\n///\n\n/// let my_subscriber = tracing_subscriber::fmt::subscriber();\n\n///\n\n/// tracing_subscriber::registry()\n\n/// .with(my_subscriber.with_filter(my_filter))\n\n/// .init();\n\n///\n\n/// // This event will not be enabled.\n\n/// tracing::warn!(\"something important but uninteresting happened!\");\n\n///\n\n/// // This event will be enabled.\n\n/// tracing::debug!(target: \"interesting_things\", \"an interesting minor detail...\");\n\n/// ```\n\npub fn filter_fn<F>(f: F) -> FilterFn<F>\n\nwhere\n\n F: Fn(&Metadata<'_>) -> bool,\n\n{\n\n FilterFn::new(f)\n\n}\n\n\n", "file_path": "tracing-subscriber/src/filter/filter_fn.rs", "rank": 9, "score": 238301.20178298966 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn warn() {\n\n warn!(foo = ?3, bar.baz = %2, quux = false);\n\n warn!(foo = 3, bar.baz = 2, quux = false);\n\n warn!(foo = 3, bar.baz = 3,);\n\n warn!(\"foo\");\n\n warn!(\"foo: {}\", 3);\n\n warn!(foo = ?3, bar.baz = %2, quux = false, \"hello world {:?}\", 42);\n\n warn!(foo = 3, bar.baz = 2, quux = false, \"hello world {:?}\", 42);\n\n warn!(foo = 3, bar.baz = 3, \"hello world {:?}\", 42,);\n\n warn!({ foo = 3, bar.baz = 80 }, \"quux\");\n\n warn!({ foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n warn!({ foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n warn!({ foo = 2, bar.baz = 78 }, \"quux\");\n\n warn!({ foo = ?2, bar.baz = %78 }, \"quux\");\n\n warn!(target: \"foo_events\", foo = 3, bar.baz = 2, quux = false);\n\n warn!(target: \"foo_events\", foo = 3, bar.baz = 3,);\n\n warn!(target: \"foo_events\", \"foo\");\n\n warn!(target: \"foo_events\", \"foo: {}\", 3);\n\n warn!(target: \"foo_events\", { foo = 3, bar.baz = 80 }, \"quux\");\n\n warn!(target: \"foo_events\", { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n", "file_path": "tracing/tests/macros.rs", "rank": 11, "score": 231939.99562684487 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn info() {\n\n info!(foo = ?3, bar.baz = %2, quux = false);\n\n info!(foo = 3, bar.baz = 2, quux = false);\n\n info!(foo = 3, bar.baz = 3,);\n\n info!(\"foo\");\n\n info!(\"foo: {}\", 3);\n\n info!(foo = ?3, bar.baz = %2, quux = false, \"hello world {:?}\", 42);\n\n info!(foo = 3, bar.baz = 2, quux = false, \"hello world {:?}\", 42);\n\n info!(foo = 3, bar.baz = 3, \"hello world {:?}\", 42,);\n\n info!({ foo = 3, bar.baz = 80 }, \"quux\");\n\n info!({ foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n info!({ foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n info!({ foo = 2, bar.baz = 78 }, \"quux\");\n\n info!({ foo = ?2, bar.baz = %78 }, \"quux\");\n\n info!(target: \"foo_events\", foo = 3, bar.baz = 2, quux = false);\n\n info!(target: \"foo_events\", foo = 3, bar.baz = 3,);\n\n info!(target: \"foo_events\", \"foo\");\n\n info!(target: \"foo_events\", \"foo: {}\", 3);\n\n info!(target: \"foo_events\", { foo = 3, bar.baz = 80 }, \"quux\");\n\n info!(target: \"foo_events\", { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n", "file_path": "tracing/tests/macros.rs", "rank": 12, "score": 231937.94316543895 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn error() {\n\n error!(foo = ?3, bar.baz = %2, quux = false);\n\n error!(foo = 3, bar.baz = 2, quux = false);\n\n error!(foo = 3, bar.baz = 3,);\n\n error!(\"foo\");\n\n error!(\"foo: {}\", 3);\n\n error!(foo = ?3, bar.baz = %2, quux = false, \"hello world {:?}\", 42);\n\n error!(foo = 3, bar.baz = 2, quux = false, \"hello world {:?}\", 42);\n\n error!(foo = 3, bar.baz = 3, \"hello world {:?}\", 42,);\n\n error!({ foo = 3, bar.baz = 80 }, \"quux\");\n\n error!({ foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n error!({ foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n error!({ foo = 2, bar.baz = 78, }, \"quux\");\n\n error!({ foo = ?2, bar.baz = %78 }, \"quux\");\n\n error!(target: \"foo_events\", foo = 3, bar.baz = 2, quux = false);\n\n error!(target: \"foo_events\", foo = 3, bar.baz = 3,);\n\n error!(target: \"foo_events\", \"foo\");\n\n error!(target: \"foo_events\", \"foo: {}\", 3);\n\n error!(target: \"foo_events\", { foo = 3, bar.baz = 80 }, \"quux\");\n\n error!(target: \"foo_events\", { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n", "file_path": "tracing/tests/macros.rs", "rank": 13, "score": 231547.23753841672 }, { "content": "#[test]\n\n#[cfg_attr(not(flaky_tests), ignore)]\n\nfn field_filter_events() {\n\n let filter: EnvFilter = \"[{thing}]=debug\".parse().expect(\"filter should parse\");\n\n let (subscriber, finished) = collector::mock()\n\n .event(\n\n event::mock()\n\n .at_level(Level::INFO)\n\n .with_fields(field::mock(\"thing\")),\n\n )\n\n .event(\n\n event::mock()\n\n .at_level(Level::DEBUG)\n\n .with_fields(field::mock(\"thing\")),\n\n )\n\n .done()\n\n .run_with_handle();\n\n let subscriber = subscriber.with(filter);\n\n\n\n with_default(subscriber, || {\n\n tracing::trace!(disabled = true);\n\n tracing::info!(\"also disabled\");\n\n tracing::info!(thing = 1);\n\n tracing::debug!(thing = 2);\n\n tracing::trace!(thing = 3);\n\n });\n\n\n\n finished.assert_finished();\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/field_filter.rs", "rank": 14, "score": 231326.903736749 }, { "content": "#[test]\n\n#[cfg_attr(not(flaky_tests), ignore)]\n\nfn field_filter_spans() {\n\n let filter: EnvFilter = \"[{enabled=true}]=debug\"\n\n .parse()\n\n .expect(\"filter should parse\");\n\n let (subscriber, finished) = collector::mock()\n\n .enter(span::mock().named(\"span1\"))\n\n .event(\n\n event::mock()\n\n .at_level(Level::INFO)\n\n .with_fields(field::mock(\"something\")),\n\n )\n\n .exit(span::mock().named(\"span1\"))\n\n .enter(span::mock().named(\"span2\"))\n\n .exit(span::mock().named(\"span2\"))\n\n .enter(span::mock().named(\"span3\"))\n\n .event(\n\n event::mock()\n\n .at_level(Level::DEBUG)\n\n .with_fields(field::mock(\"something\")),\n\n )\n", "file_path": "tracing-subscriber/tests/field_filter.rs", "rank": 15, "score": 231326.903736749 }, { "content": "#[test]\n\nfn skip() {\n\n struct UnDebug(pub u32);\n\n\n\n #[instrument(target = \"my_target\", level = \"debug\", skip(_arg2, _arg3))]\n\n fn my_fn(arg1: usize, _arg2: UnDebug, _arg3: UnDebug) {}\n\n\n\n let span = span::mock()\n\n .named(\"my_fn\")\n\n .at_level(Level::DEBUG)\n\n .with_target(\"my_target\");\n\n\n\n let span2 = span::mock()\n\n .named(\"my_fn\")\n\n .at_level(Level::DEBUG)\n\n .with_target(\"my_target\");\n\n let (collector, handle) = collector::mock()\n\n .new_span(\n\n span.clone()\n\n .with_field(field::mock(\"arg1\").with_value(&2usize).only()),\n\n )\n", "file_path": "tracing-attributes/tests/instrument.rs", "rank": 16, "score": 228130.2871541776 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn info_span() {\n\n info_span!(target: \"foo_events\", \"foo\", bar.baz = ?2, quux = %3, quuux = 4);\n\n info_span!(target: \"foo_events\", \"foo\", bar.baz = 2, quux = 3);\n\n info_span!(target: \"foo_events\", \"foo\", bar.baz = 2, quux = 4,);\n\n info_span!(target: \"foo_events\", \"foo\");\n\n info_span!(target: \"foo_events\", \"bar\",);\n\n info_span!(\"foo\", bar.baz = 2, quux = 3);\n\n info_span!(\"foo\", bar.baz = 2, quux = 4,);\n\n info_span!(\"foo\", bar.baz = ?2);\n\n info_span!(\"foo\", bar.baz = %2);\n\n info_span!(\"bar\");\n\n info_span!(\"bar\",);\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 17, "score": 228116.9395571493 }, { "content": "/// Constructs a [`DynFilterFn`] from a function or closure that returns `true`\n\n/// if a span or event should be enabled within a particular [span context][`Context`].\n\n///\n\n/// This is equivalent to calling [`DynFilterFn::new`].\n\n///\n\n/// Unlike [`filter_fn`], this function takes a closure or function pointer\n\n/// taking the [`Metadata`] for a span or event *and* the current [`Context`].\n\n/// This means that a [`DynFilterFn`] can choose whether to enable spans or\n\n/// events based on information about the _current_ span (or its parents).\n\n///\n\n/// If this is *not* necessary, use [`filter_fn`] instead.\n\n///\n\n/// The returned [`DynFilterFn`] can be used for both [per-subscriber filtering][plf]\n\n/// (using its [`Filter`] implementation) and [global filtering][global] (using\n\n/// its [`Subscribe`] implementation).\n\n///\n\n/// See the [documentation on filtering with subscribers][filtering] for details.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tracing_subscriber::{\n\n/// subscribe::{Subscribe, CollectExt},\n\n/// filter,\n\n/// util::SubscriberInitExt,\n\n/// };\n\n///\n\n/// // Only enable spans or events within a span named \"interesting_span\".\n\n/// let my_filter = filter::dynamic_filter_fn(|metadata, cx| {\n\n/// // If this *is* \"interesting_span\", make sure to enable it.\n\n/// if metadata.is_span() && metadata.name() == \"interesting_span\" {\n\n/// return true;\n\n/// }\n\n///\n\n/// // Otherwise, are we in an interesting span?\n\n/// if let Some(current_span) = cx.lookup_current() {\n\n/// return current_span.name() == \"interesting_span\";\n\n/// }\n\n///\n\n/// false\n\n/// });\n\n///\n\n/// let my_subscriber = tracing_subscriber::fmt::subscriber();\n\n///\n\n/// tracing_subscriber::registry()\n\n/// .with(my_subscriber.with_filter(my_filter))\n\n/// .init();\n\n///\n\n/// // This event will not be enabled.\n\n/// tracing::info!(\"something happened\");\n\n///\n\n/// tracing::info_span!(\"interesting_span\").in_scope(|| {\n\n/// // This event will be enabled.\n\n/// tracing::debug!(\"something else happened\");\n\n/// });\n\n/// ```\n\n///\n\n/// [`Filter`]: crate::subscribe::Filter\n\n/// [`Subscribe`]: crate::subscribe::Subscribe\n\n/// [plf]: crate::subscribe#per-subscriber-filtering\n\n/// [global]: crate::subscribe#global-filtering\n\n/// [filtering]: crate::subscribe#filtering-with-subscribers\n\n/// [`Context`]: crate::subscribe::Context\n\n/// [`Metadata`]: tracing_core::Metadata\n\npub fn dynamic_filter_fn<C, F>(f: F) -> DynFilterFn<C, F>\n\nwhere\n\n F: Fn(&Metadata<'_>, &Context<'_, C>) -> bool,\n\n{\n\n DynFilterFn::new(f)\n\n}\n\n\n\nimpl<F> FilterFn<F>\n\nwhere\n\n F: Fn(&Metadata<'_>) -> bool,\n\n{\n\n /// Constructs a [`FilterFn`] from a function or closure that returns `true`\n\n /// if a span or event should be enabled, based on its [`Metadata`].\n\n ///\n\n /// If determining whether a span or event should be enabled also requires\n\n /// information about the current span context, use [`DynFilterFn`] instead.\n\n ///\n\n /// See the [documentation on per-subscriber filtering][plf] for details on using\n\n /// [`Filter`]s.\n\n ///\n", "file_path": "tracing-subscriber/src/filter/filter_fn.rs", "rank": 18, "score": 226868.24461842712 }, { "content": "#[test]\n\nfn record_after_created() {\n\n let filter: EnvFilter = \"[{enabled=true}]=debug\"\n\n .parse()\n\n .expect(\"filter should parse\");\n\n let (subscriber, finished) = collector::mock()\n\n .enter(span::mock().named(\"span\"))\n\n .exit(span::mock().named(\"span\"))\n\n .record(\n\n span::mock().named(\"span\"),\n\n field::mock(\"enabled\").with_value(&true),\n\n )\n\n .enter(span::mock().named(\"span\"))\n\n .event(event::mock().at_level(Level::DEBUG))\n\n .exit(span::mock().named(\"span\"))\n\n .done()\n\n .run_with_handle();\n\n let subscriber = subscriber.with(filter);\n\n\n\n with_default(subscriber, || {\n\n let span = tracing::info_span!(\"span\", enabled = false);\n", "file_path": "tracing-subscriber/tests/field_filter.rs", "rank": 19, "score": 223541.86870266247 }, { "content": "#[test]\n\nfn same_num_fields_event() {\n\n let filter: EnvFilter = \"[{foo}]=trace,[{bar}]=trace\"\n\n .parse()\n\n .expect(\"filter should parse\");\n\n let (subscriber, finished) = collector::mock()\n\n .event(\n\n event::mock()\n\n .at_level(Level::TRACE)\n\n .with_fields(field::mock(\"foo\")),\n\n )\n\n .event(\n\n event::mock()\n\n .at_level(Level::TRACE)\n\n .with_fields(field::mock(\"bar\")),\n\n )\n\n .done()\n\n .run_with_handle();\n\n let subscriber = subscriber.with(filter);\n\n with_default(subscriber, || {\n\n tracing::trace!(foo = 1);\n\n tracing::trace!(bar = 3);\n\n });\n\n\n\n finished.assert_finished();\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/same_len_filters.rs", "rank": 20, "score": 219199.58670142747 }, { "content": "#[instrument(ret)]\n\nfn ret_impl_type() -> impl Copy {\n\n 42\n\n}\n\n\n", "file_path": "tracing-attributes/tests/ret.rs", "rank": 21, "score": 216940.28453569007 }, { "content": "#[test]\n\nfn same_num_fields_and_name_len() {\n\n let filter: EnvFilter = \"[foo{bar=1}]=trace,[baz{boz=1}]=trace\"\n\n .parse()\n\n .expect(\"filter should parse\");\n\n let (subscriber, finished) = collector::mock()\n\n .new_span(\n\n span::mock()\n\n .named(\"foo\")\n\n .at_level(Level::TRACE)\n\n .with_field(field::mock(\"bar\")),\n\n )\n\n .new_span(\n\n span::mock()\n\n .named(\"baz\")\n\n .at_level(Level::TRACE)\n\n .with_field(field::mock(\"boz\")),\n\n )\n\n .done()\n\n .run_with_handle();\n\n let subscriber = subscriber.with(filter);\n\n with_default(subscriber, || {\n\n tracing::trace_span!(\"foo\", bar = 1);\n\n tracing::trace_span!(\"baz\", boz = 1);\n\n });\n\n\n\n finished.assert_finished();\n\n}\n", "file_path": "tracing-subscriber/tests/same_len_filters.rs", "rank": 22, "score": 215051.4581588558 }, { "content": "fn assert_send<F: Future + Send>(_f: F) {}\n", "file_path": "tracing/tests/future_send.rs", "rank": 23, "score": 214397.80448503132 }, { "content": "fn value_match_f64(v: f64) -> ValueMatch {\n\n if v.is_nan() {\n\n ValueMatch::NaN\n\n } else {\n\n ValueMatch::F64(v)\n\n }\n\n}\n\n\n\nimpl ValueMatch {\n\n /// Parse a `ValueMatch` that will match `fmt::Debug` fields using regular\n\n /// expressions.\n\n ///\n\n /// This returns an error if the string didn't contain a valid `bool`,\n\n /// `u64`, `i64`, or `f64` literal, and couldn't be parsed as a regular\n\n /// expression.\n\n fn parse_regex(s: &str) -> Result<Self, matchers::Error> {\n\n s.parse::<bool>()\n\n .map(ValueMatch::Bool)\n\n .or_else(|_| s.parse::<u64>().map(ValueMatch::U64))\n\n .or_else(|_| s.parse::<i64>().map(ValueMatch::I64))\n", "file_path": "tracing-subscriber/src/filter/env/field.rs", "rank": 24, "score": 211610.3676271639 }, { "content": "#[tracing::instrument]\n\nfn do_something(foo: &str) -> Result<&'static str, impl Error + Send + Sync + 'static> {\n\n // Results can be instrumented with a `SpanTrace` via the `InstrumentResult` trait\n\n do_another_thing(42, false).in_current_span()\n\n}\n\n\n", "file_path": "examples/examples/instrumented-error.rs", "rank": 27, "score": 204233.99472651747 }, { "content": "#[tracing::instrument]\n\nfn do_something(foo: &str) -> Result<&'static str, impl Error + Send + Sync + 'static> {\n\n do_another_thing(42, false)\n\n}\n\n\n", "file_path": "examples/examples/custom-error.rs", "rank": 28, "score": 204233.99472651747 }, { "content": "#[instrument(fields(foo = \"bar\", empty))]\n\nfn fn_empty_field() {}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 29, "score": 201554.37244544874 }, { "content": "#[instrument]\n\n#[deny(unused_braces)]\n\nfn repro_1831() -> Pin<Box<dyn Future<Output = ()>>> {\n\n Box::pin(async move {})\n\n}\n\n\n\n// This replicates the pattern used to implement async trait methods on nightly using the\n\n// `type_alias_impl_trait` feature\n", "file_path": "tracing-attributes/tests/async_fn.rs", "rank": 30, "score": 200566.581493292 }, { "content": " /// Extension trait implemented by visitors to indicate that they write to an\n\n /// `io::Write` instance, and allow access to that writer.\n\n pub trait VisitWrite: VisitOutput<Result<(), io::Error>> {\n\n /// Returns the writer that this visitor writes to.\n\n fn writer(&mut self) -> &mut dyn io::Write;\n\n }\n\n}\n\n\n", "file_path": "tracing-subscriber/src/field/mod.rs", "rank": 31, "score": 199413.6600482589 }, { "content": "#[instrument(follows_from = causes, skip(causes))]\n\nfn with_follows_from_sync(causes: impl IntoIterator<Item = impl Into<Option<Id>>>) {}\n\n\n\n#[instrument(follows_from = causes, skip(causes))]\n\nasync fn with_follows_from_async(causes: impl IntoIterator<Item = impl Into<Option<Id>>>) {}\n\n\n", "file_path": "tracing-attributes/tests/follows_from.rs", "rank": 32, "score": 198928.18618579267 }, { "content": "fn test_tracer() -> (Tracer, TracerProvider, TestExporter, impl Collect) {\n\n let exporter = TestExporter::default();\n\n let provider = TracerProvider::builder()\n\n .with_simple_exporter(exporter.clone())\n\n .build();\n\n let tracer = provider.tracer(\"test\");\n\n let subscriber = tracing_subscriber::registry().with(subscriber().with_tracer(tracer.clone()));\n\n\n\n (tracer, provider, exporter, subscriber)\n\n}\n\n\n", "file_path": "tracing-opentelemetry/tests/trace_state_propagation.rs", "rank": 33, "score": 196905.6006418789 }, { "content": "#[test]\n\nfn fields() {\n\n let span = span::mock().with_field(\n\n mock(\"foo\")\n\n .with_value(&\"bar\")\n\n .and(mock(\"dsa\").with_value(&true))\n\n .and(mock(\"num\").with_value(&1))\n\n .only(),\n\n );\n\n run_test(span, || {\n\n fn_no_param();\n\n });\n\n}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 34, "score": 196367.91676575004 }, { "content": "pub fn msg(message: impl fmt::Display) -> MockField {\n\n MockField {\n\n name: \"message\".to_string(),\n\n value: MockValue::Debug(message.to_string()),\n\n }\n\n}\n\n\n\nimpl MockField {\n\n /// Expect a field with the given name and value.\n\n pub fn with_value(self, value: &dyn Value) -> Self {\n\n Self {\n\n value: MockValue::from(value),\n\n ..self\n\n }\n\n }\n\n\n\n pub fn and(self, other: MockField) -> Expect {\n\n Expect {\n\n fields: HashMap::new(),\n\n only: false,\n", "file_path": "tracing-mock/src/field.rs", "rank": 35, "score": 195860.74216586724 }, { "content": "#[test]\n\nfn filter_fn() {\n\n let (all, all_handle) = subscriber::named(\"all_targets\")\n\n .event(event::msg(\"hello foo\"))\n\n .event(event::msg(\"hello bar\"))\n\n .done()\n\n .run_with_handle();\n\n\n\n let (foo, foo_handle) = subscriber::named(\"foo_target\")\n\n .event(event::msg(\"hello foo\"))\n\n .done()\n\n .run_with_handle();\n\n\n\n let (bar, bar_handle) = subscriber::named(\"bar_target\")\n\n .event(event::msg(\"hello bar\"))\n\n .done()\n\n .run_with_handle();\n\n\n\n let _subscriber = tracing_subscriber::registry()\n\n .with(all)\n\n .with(foo.with_filter(filter::filter_fn(|meta| meta.target().starts_with(\"foo\"))))\n", "file_path": "tracing-subscriber/tests/subscriber_filters/main.rs", "rank": 36, "score": 194993.20401394836 }, { "content": "#[instrument(fields(len = s.len()))]\n\nfn fn_expr_field(s: &str) {}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 37, "score": 194416.37458420443 }, { "content": "#[test]\n\nfn test_warn() {\n\n let span = span::mock().named(\"ret_warn\");\n\n let (collector, handle) = collector::mock()\n\n .new_span(span.clone())\n\n .enter(span.clone())\n\n .event(\n\n event::mock()\n\n .with_fields(field::mock(\"return\").with_value(&tracing::field::debug(42)))\n\n .at_level(Level::WARN),\n\n )\n\n .exit(span.clone())\n\n .drop_span(span)\n\n .done()\n\n .run_with_handle();\n\n\n\n with_default(collector, ret_warn);\n\n handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-attributes/tests/ret.rs", "rank": 38, "score": 193245.99843607063 }, { "content": "#[test]\n\nfn parameters_with_fields() {\n\n let span = span::mock().with_field(\n\n mock(\"foo\")\n\n .with_value(&\"bar\")\n\n .and(mock(\"param\").with_value(&1u32))\n\n .only(),\n\n );\n\n run_test(span, || {\n\n fn_param(1);\n\n });\n\n}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 39, "score": 193041.26898954323 }, { "content": "#[test]\n\nfn expr_field() {\n\n let span = span::mock().with_field(\n\n mock(\"s\")\n\n .with_value(&\"hello world\")\n\n .and(mock(\"len\").with_value(&\"hello world\".len()))\n\n .only(),\n\n );\n\n run_test(span, || {\n\n fn_expr_field(\"hello world\");\n\n });\n\n}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 40, "score": 193041.26898954323 }, { "content": "#[test]\n\nfn empty_field() {\n\n let span = span::mock().with_field(mock(\"foo\").with_value(&\"bar\").only());\n\n run_test(span, || {\n\n fn_empty_field();\n\n });\n\n}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 41, "score": 193041.26898954323 }, { "content": "#[instrument(fields(foo = \"bar\", dsa = true, num = 1))]\n\nfn fn_no_param() {}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 42, "score": 192862.49988486792 }, { "content": "#[instrument(level = \"debug\")]\n\nfn instrumented_manual_box_pin() -> Pin<Box<dyn Future<Output = ()>>> {\n\n Box::pin(async move {})\n\n}\n\n\n", "file_path": "tracing/test_static_max_level_features/tests/test.rs", "rank": 43, "score": 192831.4125810726 }, { "content": "#[instrument(fields(%s, s.len = s.len()))]\n\nfn fn_clashy_expr_field(s: &str) {\n\n let _ = s;\n\n}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 44, "score": 191697.11052026934 }, { "content": "#[instrument(fields(s.len = s.len(), s.is_empty = s.is_empty()))]\n\nfn fn_two_expr_fields(s: &str) {\n\n let _ = s;\n\n}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 45, "score": 191697.0130054782 }, { "content": "#[test]\n\nfn subscriber_is_collector() {\n\n let s = NopSubscriber.with_collector(NopCollector);\n\n assert_collector(s)\n\n}\n\n\n", "file_path": "tracing-subscriber/src/subscribe/tests.rs", "rank": 46, "score": 191084.48364755185 }, { "content": "#[test]\n\nfn downcasts_to_subscriber() {\n\n let s = StringSubscriber(\"subscriber_1\")\n\n .and_then(StringSubscriber2(\"subscriber_2\"))\n\n .and_then(StringSubscriber3(\"subscriber_3\"))\n\n .with_collector(NopCollector);\n\n let subscriber =\n\n <dyn Collect>::downcast_ref::<StringSubscriber>(&s).expect(\"subscriber 1 should downcast\");\n\n assert_eq!(subscriber.0, \"subscriber_1\");\n\n let subscriber =\n\n <dyn Collect>::downcast_ref::<StringSubscriber2>(&s).expect(\"subscriber 2 should downcast\");\n\n assert_eq!(subscriber.0, \"subscriber_2\");\n\n let subscriber =\n\n <dyn Collect>::downcast_ref::<StringSubscriber3>(&s).expect(\"subscriber 3 should downcast\");\n\n assert_eq!(subscriber.0, \"subscriber_3\");\n\n}\n\n\n\n#[cfg(all(feature = \"registry\", feature = \"std\"))]\n\nmod registry_tests {\n\n use super::*;\n\n use crate::registry::LookupSpan;\n", "file_path": "tracing-subscriber/src/subscribe/tests.rs", "rank": 47, "score": 191084.48364755185 }, { "content": "#[test]\n\nfn test_impl_type() {\n\n let span = span::mock().named(\"ret_impl_type\");\n\n let (collector, handle) = collector::mock()\n\n .new_span(span.clone())\n\n .enter(span.clone())\n\n .event(\n\n event::mock()\n\n .with_fields(field::mock(\"return\").with_value(&tracing::field::debug(42)))\n\n .at_level(Level::INFO),\n\n )\n\n .exit(span.clone())\n\n .drop_span(span)\n\n .done()\n\n .run_with_handle();\n\n\n\n with_default(collector, ret_impl_type);\n\n handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-attributes/tests/ret.rs", "rank": 48, "score": 190006.8669630313 }, { "content": "#[test]\n\nfn clashy_expr_field() {\n\n let span = span::mock().with_field(\n\n // Overriding the `s` field should record `s` as a `Display` value,\n\n // rather than as a `Debug` value.\n\n mock(\"s\")\n\n .with_value(&tracing::field::display(\"hello world\"))\n\n .and(mock(\"s.len\").with_value(&\"hello world\".len()))\n\n .only(),\n\n );\n\n run_test(span, || {\n\n fn_clashy_expr_field(\"hello world\");\n\n });\n\n\n\n let span = span::mock().with_field(mock(\"s\").with_value(&\"s\").only());\n\n run_test(span, || {\n\n fn_clashy_expr_field2(\"hello world\");\n\n });\n\n}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 49, "score": 189854.04120288804 }, { "content": "#[test]\n\nfn two_expr_fields() {\n\n let span = span::mock().with_field(\n\n mock(\"s\")\n\n .with_value(&\"hello world\")\n\n .and(mock(\"s.len\").with_value(&\"hello world\".len()))\n\n .and(mock(\"s.is_empty\").with_value(&false))\n\n .only(),\n\n );\n\n run_test(span, || {\n\n fn_two_expr_fields(\"hello world\");\n\n });\n\n}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 50, "score": 189854.04120288807 }, { "content": "#[test]\n\nfn out_of_scope_fields() {\n\n // Reproduces tokio-rs/tracing#1296\n\n\n\n struct Thing {\n\n metrics: Arc<()>,\n\n }\n\n\n\n impl Thing {\n\n #[instrument(skip(self, _req), fields(app_id))]\n\n fn call(&mut self, _req: ()) -> Pin<Box<dyn Future<Output = Arc<()>> + Send + Sync>> {\n\n // ...\n\n let metrics = self.metrics.clone();\n\n // ...\n\n Box::pin(async move {\n\n // ...\n\n metrics // cannot find value `metrics` in this scope\n\n })\n\n }\n\n }\n\n\n", "file_path": "tracing-attributes/tests/async_fn.rs", "rank": 52, "score": 189671.32914972186 }, { "content": "#[derive(Debug)]\n\nenum ErrorKind {\n\n CollectorGone,\n\n Poisoned,\n\n}\n\n\n\n// ===== impl Collect =====\n\n\n\nimpl<S, C> crate::Subscribe<C> for Subscriber<S>\n\nwhere\n\n S: crate::Subscribe<C> + 'static,\n\n C: Collect,\n\n{\n\n #[inline]\n\n fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {\n\n try_lock!(self.inner.read(), else return Interest::sometimes()).register_callsite(metadata)\n\n }\n\n\n\n #[inline]\n\n fn enabled(&self, metadata: &Metadata<'_>, ctx: subscribe::Context<'_, C>) -> bool {\n\n try_lock!(self.inner.read(), else return false).enabled(metadata, ctx)\n", "file_path": "tracing-subscriber/src/reload.rs", "rank": 53, "score": 189289.69375168442 }, { "content": "#[test]\n\nfn and() {\n\n let (subscriber, handle) = subscriber::mock()\n\n .event(\n\n event::msg(\"a very interesting event\")\n\n .at_level(tracing::Level::INFO)\n\n .with_target(\"interesting_target\"),\n\n )\n\n .done()\n\n .run_with_handle();\n\n\n\n // Enables spans and events with targets starting with `interesting_target`:\n\n let target_filter = filter::filter_fn(|meta| meta.target().starts_with(\"interesting_target\"));\n\n\n\n // Enables spans and events with levels `INFO` and below:\n\n let level_filter = LevelFilter::INFO;\n\n\n\n // Combine the two filters together, returning a filter that only enables\n\n // spans and events that *both* filters will enable:\n\n let filter = target_filter.and(level_filter);\n\n\n", "file_path": "tracing-subscriber/tests/subscriber_filters/combinators.rs", "rank": 54, "score": 189186.24212634956 }, { "content": "#[test]\n\nfn two_subscribers_are_collector() {\n\n let s = NopSubscriber\n\n .and_then(NopSubscriber)\n\n .with_collector(NopCollector);\n\n assert_collector(s)\n\n}\n\n\n", "file_path": "tracing-subscriber/src/subscribe/tests.rs", "rank": 55, "score": 188250.5591550262 }, { "content": "#[test]\n\nfn three_subscribers_are_collector() {\n\n let s = NopSubscriber\n\n .and_then(NopSubscriber)\n\n .and_then(NopSubscriber)\n\n .with_collector(NopCollector);\n\n assert_collector(s)\n\n}\n\n\n", "file_path": "tracing-subscriber/src/subscribe/tests.rs", "rank": 56, "score": 188250.5591550262 }, { "content": "fn build_sampled_context() -> (Context, impl Collect, TestExporter, TracerProvider) {\n\n let (tracer, provider, exporter, subscriber) = test_tracer();\n\n let span = tracer.start(\"sampled\");\n\n let cx = Context::current_with_span(span);\n\n\n\n (cx, subscriber, exporter, provider)\n\n}\n\n\n", "file_path": "tracing-opentelemetry/tests/trace_state_propagation.rs", "rank": 57, "score": 188206.81594391467 }, { "content": "#[test]\n\nfn test() {\n\n let span = span::mock().named(\"ret\");\n\n let (collector, handle) = collector::mock()\n\n .new_span(span.clone())\n\n .enter(span.clone())\n\n .event(\n\n event::mock()\n\n .with_fields(field::mock(\"return\").with_value(&tracing::field::debug(42)))\n\n .at_level(Level::INFO),\n\n )\n\n .exit(span.clone())\n\n .drop_span(span)\n\n .done()\n\n .run_with_handle();\n\n\n\n with_default(collector, ret);\n\n handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-attributes/tests/ret.rs", "rank": 58, "score": 187791.43933376903 }, { "content": "#[test]\n\nfn test() {\n\n let span = span::mock().named(\"err\");\n\n let (collector, handle) = collector::mock()\n\n .new_span(span.clone())\n\n .enter(span.clone())\n\n .event(event::mock().at_level(Level::ERROR))\n\n .exit(span.clone())\n\n .drop_span(span)\n\n .done()\n\n .run_with_handle();\n\n with_default(collector, || err().ok());\n\n handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-attributes/tests/err.rs", "rank": 59, "score": 187791.43933376903 }, { "content": "#[test]\n\nfn manual_impl_future() {\n\n #[allow(clippy::manual_async_fn)]\n\n #[instrument]\n\n fn manual_impl_future() -> impl Future<Output = ()> {\n\n async {\n\n tracing::trace!(poll = true);\n\n }\n\n }\n\n\n\n let span = span::mock().named(\"manual_impl_future\");\n\n let poll_event = || event::mock().with_fields(field::mock(\"poll\").with_value(&true));\n\n\n\n let (collector, handle) = collector::mock()\n\n // await manual_impl_future\n\n .new_span(span.clone())\n\n .enter(span.clone())\n\n .event(poll_event())\n\n .exit(span.clone())\n\n .drop_span(span)\n\n .done()\n", "file_path": "tracing-attributes/tests/async_fn.rs", "rank": 60, "score": 186952.47612867458 }, { "content": "#[test]\n\nfn downcasts_to_collector() {\n\n let s = NopSubscriber\n\n .and_then(NopSubscriber)\n\n .and_then(NopSubscriber)\n\n .with_collector(StringCollector(\"collector\"));\n\n let collector =\n\n <dyn Collect>::downcast_ref::<StringCollector>(&s).expect(\"collector should downcast\");\n\n assert_eq!(collector.0, \"collector\");\n\n}\n\n\n", "file_path": "tracing-subscriber/src/subscribe/tests.rs", "rank": 61, "score": 186055.1471577166 }, { "content": "#[test]\n\nfn basic_subscriber_filters() {\n\n let (trace_subscriber, trace_handle) = subscriber::named(\"trace\")\n\n .event(event::mock().at_level(Level::TRACE))\n\n .event(event::mock().at_level(Level::DEBUG))\n\n .event(event::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n\n\n\n let (debug_subscriber, debug_handle) = subscriber::named(\"debug\")\n\n .event(event::mock().at_level(Level::DEBUG))\n\n .event(event::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n\n\n\n let (info_subscriber, info_handle) = subscriber::named(\"info\")\n\n .event(event::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n\n\n\n let _subscriber = tracing_subscriber::registry()\n", "file_path": "tracing-subscriber/tests/subscriber_filters/main.rs", "rank": 62, "score": 185527.84218206035 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn warn_span() {\n\n warn_span!(target: \"foo_events\", \"foo\", bar.baz = ?2, quux = %3, quuux = 4);\n\n warn_span!(target: \"foo_events\", \"foo\", bar.baz = 2, quux = 3);\n\n warn_span!(target: \"foo_events\", \"foo\", bar.baz = 2, quux = 4,);\n\n warn_span!(target: \"foo_events\", \"foo\");\n\n warn_span!(target: \"foo_events\", \"bar\",);\n\n warn_span!(\"foo\", bar.baz = 2, quux = 3);\n\n warn_span!(\"foo\", bar.baz = 2, quux = 4,);\n\n warn_span!(\"foo\", bar.baz = ?2);\n\n warn_span!(\"foo\", bar.baz = %2);\n\n warn_span!(\"bar\");\n\n warn_span!(\"bar\",);\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 63, "score": 185464.9982278204 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn warn_with_parent() {\n\n let p = span!(Level::TRACE, \"im_a_parent!\");\n\n warn!(parent: &p, foo = ?3, bar.baz = %2, quux = false);\n\n warn!(parent: &p, foo = 3, bar.baz = 2, quux = false);\n\n warn!(parent: &p, foo = 3, bar.baz = 3,);\n\n warn!(parent: &p, \"foo\");\n\n warn!(parent: &p, \"foo: {}\", 3);\n\n warn!(parent: &p, { foo = 3, bar.baz = 80 }, \"quux\");\n\n warn!(parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n warn!(parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n warn!(parent: &p, { foo = 2, bar.baz = 78 }, \"quux\");\n\n warn!(parent: &p, { foo = ?2, bar.baz = %78 }, \"quux\");\n\n warn!(target: \"foo_events\", parent: &p, foo = 3, bar.baz = 2, quux = false);\n\n warn!(target: \"foo_events\", parent: &p, foo = 3, bar.baz = 3,);\n\n warn!(target: \"foo_events\", parent: &p, \"foo\");\n\n warn!(target: \"foo_events\", parent: &p, \"foo: {}\", 3);\n\n warn!(target: \"foo_events\", parent: &p, { foo = 3, bar.baz = 80 }, \"quux\");\n\n warn!(target: \"foo_events\", parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n warn!(target: \"foo_events\", parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n warn!(target: \"foo_events\", parent: &p, { foo = 2, bar.baz = 78, }, \"quux\");\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 64, "score": 185464.9982278204 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn warn_root() {\n\n warn!(parent: None, foo = ?3, bar.baz = %2, quux = false);\n\n warn!(parent: None, foo = 3, bar.baz = 2, quux = false);\n\n warn!(parent: None, foo = 3, bar.baz = 3,);\n\n warn!(parent: None, \"foo\");\n\n warn!(parent: None, \"foo: {}\", 3);\n\n warn!(parent: None, { foo = 3, bar.baz = 80 }, \"quux\");\n\n warn!(parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n warn!(parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n warn!(parent: None, { foo = 2, bar.baz = 78 }, \"quux\");\n\n warn!(parent: None, { foo = ?2, bar.baz = %78 }, \"quux\");\n\n warn!(target: \"foo_events\", parent: None, foo = 3, bar.baz = 2, quux = false);\n\n warn!(target: \"foo_events\", parent: None, foo = 3, bar.baz = 3,);\n\n warn!(target: \"foo_events\", parent: None, \"foo\");\n\n warn!(target: \"foo_events\", parent: None, \"foo: {}\", 3);\n\n warn!(target: \"foo_events\", parent: None, { foo = 3, bar.baz = 80 }, \"quux\");\n\n warn!(target: \"foo_events\", parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n warn!(target: \"foo_events\", parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n warn!(target: \"foo_events\", parent: None, { foo = 2, bar.baz = 78, }, \"quux\");\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 65, "score": 185464.9982278204 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn info_with_parent() {\n\n let p = span!(Level::TRACE, \"im_a_parent!\");\n\n info!(parent: &p, foo = ?3, bar.baz = %2, quux = false);\n\n info!(parent: &p, foo = 3, bar.baz = 2, quux = false);\n\n info!(parent: &p, foo = 3, bar.baz = 3,);\n\n info!(parent: &p, \"foo\");\n\n info!(parent: &p, \"foo: {}\", 3);\n\n info!(parent: &p, { foo = 3, bar.baz = 80 }, \"quux\");\n\n info!(parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n info!(parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n info!(parent: &p, { foo = 2, bar.baz = 78 }, \"quux\");\n\n info!(parent: &p, { foo = ?2, bar.baz = %78 }, \"quux\");\n\n info!(target: \"foo_events\", parent: &p, foo = 3, bar.baz = 2, quux = false);\n\n info!(target: \"foo_events\", parent: &p, foo = 3, bar.baz = 3,);\n\n info!(target: \"foo_events\", parent: &p, \"foo\");\n\n info!(target: \"foo_events\", parent: &p, \"foo: {}\", 3);\n\n info!(target: \"foo_events\", parent: &p, { foo = 3, bar.baz = 80 }, \"quux\");\n\n info!(target: \"foo_events\", parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n info!(target: \"foo_events\", parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n info!(target: \"foo_events\", parent: &p, { foo = 2, bar.baz = 78, }, \"quux\");\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 66, "score": 185462.99615778928 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn info_root() {\n\n info!(parent: None, foo = ?3, bar.baz = %2, quux = false);\n\n info!(parent: None, foo = 3, bar.baz = 2, quux = false);\n\n info!(parent: None, foo = 3, bar.baz = 3,);\n\n info!(parent: None, \"foo\");\n\n info!(parent: None, \"foo: {}\", 3);\n\n info!(parent: None, { foo = 3, bar.baz = 80 }, \"quux\");\n\n info!(parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n info!(parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n info!(parent: None, { foo = 2, bar.baz = 78 }, \"quux\");\n\n info!(parent: None, { foo = ?2, bar.baz = %78 }, \"quux\");\n\n info!(target: \"foo_events\", parent: None, foo = 3, bar.baz = 2, quux = false);\n\n info!(target: \"foo_events\", parent: None, foo = 3, bar.baz = 3,);\n\n info!(target: \"foo_events\", parent: None, \"foo\");\n\n info!(target: \"foo_events\", parent: None, \"foo: {}\", 3);\n\n info!(target: \"foo_events\", parent: None, { foo = 3, bar.baz = 80 }, \"quux\");\n\n info!(target: \"foo_events\", parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n info!(target: \"foo_events\", parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n info!(target: \"foo_events\", parent: None, { foo = 2, bar.baz = 78, }, \"quux\");\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 67, "score": 185462.9961577893 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn error_span() {\n\n error_span!(target: \"foo_events\", \"foo\", bar.baz = ?2, quux = %3, quuux = 4);\n\n error_span!(target: \"foo_events\", \"foo\", bar.baz = 2, quux = 3);\n\n error_span!(target: \"foo_events\", \"foo\", bar.baz = 2, quux = 4,);\n\n error_span!(target: \"foo_events\", \"foo\");\n\n error_span!(target: \"foo_events\", \"bar\",);\n\n error_span!(\"foo\", bar.baz = 2, quux = 3);\n\n error_span!(\"foo\", bar.baz = 2, quux = 4,);\n\n error_span!(\"foo\", bar.baz = ?2);\n\n error_span!(\"foo\", bar.baz = %2);\n\n error_span!(\"bar\");\n\n error_span!(\"bar\",);\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 68, "score": 185240.13978472163 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn error_with_parent() {\n\n let p = span!(Level::TRACE, \"im_a_parent!\");\n\n error!(parent: &p, foo = ?3, bar.baz = %2, quux = false);\n\n error!(parent: &p, foo = 3, bar.baz = 2, quux = false);\n\n error!(parent: &p, foo = 3, bar.baz = 3,);\n\n error!(parent: &p, \"foo\");\n\n error!(parent: &p, \"foo: {}\", 3);\n\n error!(parent: &p, { foo = 3, bar.baz = 80 }, \"quux\");\n\n error!(parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n error!(parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n error!(parent: &p, { foo = 2, bar.baz = 78 }, \"quux\");\n\n error!(parent: &p, { foo = ?2, bar.baz = %78 }, \"quux\");\n\n error!(target: \"foo_events\", parent: &p, foo = 3, bar.baz = 2, quux = false);\n\n error!(target: \"foo_events\", parent: &p, foo = 3, bar.baz = 3,);\n\n error!(target: \"foo_events\", parent: &p, \"foo\");\n\n error!(target: \"foo_events\", parent: &p, \"foo: {}\", 3);\n\n error!(target: \"foo_events\", parent: &p, { foo = 3, bar.baz = 80 }, \"quux\");\n\n error!(target: \"foo_events\", parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n error!(target: \"foo_events\", parent: &p, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n error!(target: \"foo_events\", parent: &p, { foo = 2, bar.baz = 78, }, \"quux\");\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 69, "score": 185240.1397847216 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn error_root() {\n\n error!(parent: None, foo = ?3, bar.baz = %2, quux = false);\n\n error!(parent: None, foo = 3, bar.baz = 2, quux = false);\n\n error!(parent: None, foo = 3, bar.baz = 3,);\n\n error!(parent: None, \"foo\");\n\n error!(parent: None, \"foo: {}\", 3);\n\n error!(parent: None, { foo = 3, bar.baz = 80 }, \"quux\");\n\n error!(parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n error!(parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n error!(parent: None, { foo = 2, bar.baz = 78 }, \"quux\");\n\n error!(parent: None, { foo = ?2, bar.baz = %78 }, \"quux\");\n\n error!(target: \"foo_events\", parent: None, foo = 3, bar.baz = 2, quux = false);\n\n error!(target: \"foo_events\", parent: None, foo = 3, bar.baz = 3,);\n\n error!(target: \"foo_events\", parent: None, \"foo\");\n\n error!(target: \"foo_events\", parent: None, \"foo: {}\", 3);\n\n error!(target: \"foo_events\", parent: None, { foo = 3, bar.baz = 80 }, \"quux\");\n\n error!(target: \"foo_events\", parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}\", true);\n\n error!(target: \"foo_events\", parent: None, { foo = 2, bar.baz = 79 }, \"quux {:?}, {quux}\", true, quux = false);\n\n error!(target: \"foo_events\", parent: None, { foo = 2, bar.baz = 78, }, \"quux\");\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 70, "score": 185240.13978472163 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn moved_field() {\n\n let (collector, handle) = collector::mock()\n\n .event(\n\n event::mock().with_fields(\n\n field::mock(\"foo\")\n\n .with_value(&display(\"hello from my event\"))\n\n .only(),\n\n ),\n\n )\n\n .done()\n\n .run_with_handle();\n\n with_default(collector, || {\n\n let from = \"my event\";\n\n tracing::event!(Level::INFO, foo = display(format!(\"hello from {}\", from)))\n\n });\n\n\n\n handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing/tests/event.rs", "rank": 71, "score": 185058.2648482504 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn moved_field() {\n\n let (collector, handle) = collector::mock()\n\n .new_span(\n\n span::mock().named(\"foo\").with_field(\n\n field::mock(\"bar\")\n\n .with_value(&display(\"hello from my span\"))\n\n .only(),\n\n ),\n\n )\n\n .enter(span::mock().named(\"foo\"))\n\n .exit(span::mock().named(\"foo\"))\n\n .drop_span(span::mock().named(\"foo\"))\n\n .done()\n\n .run_with_handle();\n\n with_default(collector, || {\n\n let from = \"my span\";\n\n let span = tracing::span!(\n\n Level::TRACE,\n\n \"foo\",\n\n bar = display(format!(\"hello from {}\", from))\n\n );\n\n span.in_scope(|| {});\n\n });\n\n\n\n handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing/tests/span.rs", "rank": 72, "score": 185058.2648482504 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn field_shorthand_only() {\n\n #[derive(Debug)]\n\n struct Position {\n\n x: f32,\n\n y: f32,\n\n }\n\n let pos = Position {\n\n x: 3.234,\n\n y: -1.223,\n\n };\n\n\n\n trace!(?pos.x, ?pos.y);\n\n debug!(?pos.x, ?pos.y);\n\n info!(?pos.x, ?pos.y);\n\n warn!(?pos.x, ?pos.y);\n\n error!(?pos.x, ?pos.y);\n\n event!(Level::TRACE, ?pos.x, ?pos.y);\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 73, "score": 185058.2648482504 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn borrowed_field() {\n\n let (collector, handle) = collector::mock()\n\n .new_span(\n\n span::mock().named(\"foo\").with_field(\n\n field::mock(\"bar\")\n\n .with_value(&display(\"hello from my span\"))\n\n .only(),\n\n ),\n\n )\n\n .enter(span::mock().named(\"foo\"))\n\n .exit(span::mock().named(\"foo\"))\n\n .drop_span(span::mock().named(\"foo\"))\n\n .done()\n\n .run_with_handle();\n\n\n\n with_default(collector, || {\n\n let from = \"my span\";\n\n let mut message = format!(\"hello from {}\", from);\n\n let span = tracing::span!(Level::TRACE, \"foo\", bar = display(&message));\n\n span.in_scope(|| {\n\n message.insert_str(10, \" inside\");\n\n });\n\n });\n\n\n\n handle.assert_finished();\n\n}\n\n\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\n// If emitting log instrumentation, this gets moved anyway, breaking the test.\n", "file_path": "tracing/tests/span.rs", "rank": 74, "score": 185058.2648482504 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn borrowed_field() {\n\n let (collector, handle) = collector::mock()\n\n .event(\n\n event::mock().with_fields(\n\n field::mock(\"foo\")\n\n .with_value(&display(\"hello from my event\"))\n\n .only(),\n\n ),\n\n )\n\n .done()\n\n .run_with_handle();\n\n with_default(collector, || {\n\n let from = \"my event\";\n\n let mut message = format!(\"hello from {}\", from);\n\n tracing::event!(Level::INFO, foo = display(&message));\n\n message.push_str(\", which happened!\");\n\n });\n\n\n\n handle.assert_finished();\n\n}\n\n\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\n// If emitting log instrumentation, this gets moved anyway, breaking the test.\n", "file_path": "tracing/tests/event.rs", "rank": 75, "score": 185058.2648482504 }, { "content": "#[test]\n\nfn fields() {\n\n #[instrument(target = \"my_target\", level = \"debug\")]\n\n fn my_fn(arg1: usize, arg2: bool) {}\n\n\n\n let span = span::mock()\n\n .named(\"my_fn\")\n\n .at_level(Level::DEBUG)\n\n .with_target(\"my_target\");\n\n\n\n let span2 = span::mock()\n\n .named(\"my_fn\")\n\n .at_level(Level::DEBUG)\n\n .with_target(\"my_target\");\n\n let (collector, handle) = collector::mock()\n\n .new_span(\n\n span.clone().with_field(\n\n field::mock(\"arg1\")\n\n .with_value(&2usize)\n\n .and(field::mock(\"arg2\").with_value(&false))\n\n .only(),\n", "file_path": "tracing-attributes/tests/instrument.rs", "rank": 76, "score": 185057.404592688 }, { "content": "fn bench_thrpt(c: &mut Criterion, name: &'static str, mut f: impl FnMut(&mut Group<'_>, &usize)) {\n\n const N_SPANS: &[usize] = &[1, 10, 50];\n\n\n\n let mut group = c.benchmark_group(name);\n\n for spans in N_SPANS {\n\n group.throughput(Throughput::Elements(*spans as u64));\n\n f(&mut group, spans);\n\n }\n\n group.finish();\n\n}\n\n\n", "file_path": "tracing-subscriber/benches/fmt.rs", "rank": 77, "score": 184896.4395362102 }, { "content": "#[test]\n\nfn async_fn_with_async_trait_and_fields_expressions() {\n\n use async_trait::async_trait;\n\n\n", "file_path": "tracing-attributes/tests/async_fn.rs", "rank": 78, "score": 184069.2113075002 }, { "content": "#[test]\n\nfn subscriber_filters() {\n\n let (unfiltered, unfiltered_handle) = unfiltered(\"unfiltered\");\n\n let (filtered, filtered_handle) = filtered(\"filtered\");\n\n\n\n let _subscriber = tracing_subscriber::registry()\n\n .with(unfiltered)\n\n .with(filtered.with_filter(filter()))\n\n .set_default();\n\n\n\n events();\n\n\n\n unfiltered_handle.assert_finished();\n\n filtered_handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/cached_subscriber_filters_dont_break_other_subscribers.rs", "rank": 79, "score": 183683.562551518 }, { "content": "#[test]\n\nfn subscriber_filters() {\n\n let (unfiltered, unfiltered_handle) = unfiltered(\"unfiltered\");\n\n let (filtered, filtered_handle) = filtered(\"filtered\");\n\n\n\n let _subscriber = tracing_subscriber::registry()\n\n .with(unfiltered)\n\n .with(filtered.with_filter(filter()))\n\n .set_default();\n\n\n\n events();\n\n\n\n unfiltered_handle.assert_finished();\n\n filtered_handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/unhinted_subscriber_filters_dont_break_other_subscribers.rs", "rank": 80, "score": 183683.562551518 }, { "content": "#[test]\n\nfn subscriber_filters() {\n\n let (unfiltered, unfiltered_handle) = unfiltered(\"unfiltered\");\n\n let (filtered, filtered_handle) = filtered(\"filtered\");\n\n\n\n let subscriber = tracing_subscriber::registry()\n\n .with(unfiltered)\n\n .with(filtered.with_filter(filter()));\n\n assert_eq!(subscriber.max_level_hint(), None);\n\n let _subscriber = subscriber.set_default();\n\n\n\n events();\n\n\n\n unfiltered_handle.assert_finished();\n\n filtered_handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/hinted_subscriber_filters_dont_break_other_subscribers.rs", "rank": 81, "score": 183683.562551518 }, { "content": "#[test]\n\nfn with_filters_unboxed() {\n\n let (trace_subscriber, trace_handle) = subscriber::named(\"trace\")\n\n .event(event::mock().at_level(Level::TRACE))\n\n .event(event::mock().at_level(Level::DEBUG))\n\n .event(event::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n\n let trace_subscriber = trace_subscriber.with_filter(LevelFilter::TRACE);\n\n\n\n let (debug_subscriber, debug_handle) = subscriber::named(\"debug\")\n\n .event(event::mock().at_level(Level::DEBUG))\n\n .event(event::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n\n let debug_subscriber = debug_subscriber.with_filter(LevelFilter::DEBUG);\n\n\n\n let (info_subscriber, info_handle) = subscriber::named(\"info\")\n\n .event(event::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n", "file_path": "tracing-subscriber/tests/subscriber_filters/vec.rs", "rank": 82, "score": 183052.98152539926 }, { "content": "#[test]\n\nfn basic_trees() {\n\n let (with_target, with_target_handle) = subscriber::named(\"info_with_target\")\n\n .event(event::mock().at_level(Level::INFO).with_target(\"my_target\"))\n\n .done()\n\n .run_with_handle();\n\n\n\n let (info, info_handle) = subscriber::named(\"info\")\n\n .event(\n\n event::mock()\n\n .at_level(Level::INFO)\n\n .with_target(module_path!()),\n\n )\n\n .event(event::mock().at_level(Level::INFO).with_target(\"my_target\"))\n\n .done()\n\n .run_with_handle();\n\n\n\n let (all, all_handle) = subscriber::named(\"all\")\n\n .event(\n\n event::mock()\n\n .at_level(Level::INFO)\n", "file_path": "tracing-subscriber/tests/subscriber_filters/trees.rs", "rank": 83, "score": 183052.98152539926 }, { "content": "#[test]\n\nfn with_filters_boxed() {\n\n let (unfiltered_subscriber, unfiltered_handle) = subscriber::named(\"unfiltered\")\n\n .event(event::mock().at_level(Level::TRACE))\n\n .event(event::mock().at_level(Level::DEBUG))\n\n .event(event::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n\n let unfiltered_subscriber = unfiltered_subscriber.boxed();\n\n\n\n let (debug_subscriber, debug_handle) = subscriber::named(\"debug\")\n\n .event(event::mock().at_level(Level::DEBUG))\n\n .event(event::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n\n let debug_subscriber = debug_subscriber.with_filter(LevelFilter::DEBUG).boxed();\n\n\n\n let (target_subscriber, target_handle) = subscriber::named(\"target\")\n\n .event(event::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n", "file_path": "tracing-subscriber/tests/subscriber_filters/vec.rs", "rank": 84, "score": 183052.98152539926 }, { "content": "#[test]\n\nfn box_works() {\n\n let (subscribe, handle) = subscribe();\n\n let subscribe = Box::new(subscribe.with_filter(filter()));\n\n\n\n let _guard = tracing_subscriber::registry().with(subscribe).set_default();\n\n\n\n for i in 0..2 {\n\n tracing::info!(i);\n\n }\n\n\n\n handle.assert_finished();\n\n}\n\n\n\n/// the same as `box_works` but with a type-erased `Box`.\n", "file_path": "tracing-subscriber/tests/subscriber_filters/boxed.rs", "rank": 85, "score": 183052.98152539926 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"tracing-log\"), ignore)]\n\nfn log_events() {\n\n // Reproduces https://github.com/tokio-rs/tracing/issues/1563\n\n mod inner {\n\n pub(super) const MODULE_PATH: &str = module_path!();\n\n\n\n #[tracing::instrument]\n\n pub(super) fn logs() {\n\n log::debug!(\"inner\");\n\n }\n\n }\n\n\n\n let filter = Targets::new()\n\n .with_default(LevelFilter::DEBUG)\n\n .with_target(inner::MODULE_PATH, LevelFilter::WARN);\n\n\n\n let subscriber =\n\n tracing_subscriber::subscribe::Identity::new().with_filter(filter_fn(move |_meta| true));\n\n\n\n let _guard = tracing_subscriber::registry()\n\n .with(filter)\n\n .with(subscriber)\n\n .set_default();\n\n\n\n inner::logs();\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/subscriber_filters/targets.rs", "rank": 86, "score": 183052.68739338557 }, { "content": "#[test]\n\nfn inner_subscriber_short_circuits() {\n\n // This test ensures that when a global filter short-circuits `Interest`\n\n // evaluation, we aren't left with a \"dirty\" per-subscriber filter state.\n\n\n\n let (subscriber, handle) = subscriber::mock()\n\n .event(event::msg(\"hello world\"))\n\n .done()\n\n .run_with_handle();\n\n\n\n let filter = Targets::new().with_target(\"magic_target\", LevelFilter::DEBUG);\n\n\n\n let _guard = tracing_subscriber::registry()\n\n // Note: we don't just use a `LevelFilter` for the global filter here,\n\n // because it will just return a max level filter, and the chain of\n\n // `register_callsite` calls that would trigger the bug never happens...\n\n .with(filter::filter_fn(|meta| meta.level() <= &Level::INFO))\n\n .with(subscriber.with_filter(filter))\n\n .set_default();\n\n\n\n tracing::debug!(\"skip me please!\");\n\n tracing::info!(target: \"magic_target\", \"hello world\");\n\n\n\n handle.assert_finished();\n\n}\n", "file_path": "tracing-subscriber/tests/subscriber_filters/targets.rs", "rank": 87, "score": 182909.50332714498 }, { "content": "#[test]\n\nfn out_of_order() {\n\n let (unfiltered1, unfiltered1_handle) = unfiltered(\"unfiltered_1\");\n\n let (unfiltered2, unfiltered2_handle) = unfiltered(\"unfiltered_2\");\n\n\n\n let (filtered1, filtered1_handle) = filtered(\"filtered_1\");\n\n let (filtered2, filtered2_handle) = filtered(\"filtered_2\");\n\n\n\n let _subscriber = tracing_subscriber::registry()\n\n .with(unfiltered1)\n\n .with(filtered1.with_filter(filter()))\n\n .with(unfiltered2)\n\n .with(filtered2.with_filter(filter()))\n\n .set_default();\n\n events();\n\n\n\n unfiltered1_handle.assert_finished();\n\n unfiltered2_handle.assert_finished();\n\n filtered1_handle.assert_finished();\n\n filtered2_handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/cached_subscriber_filters_dont_break_other_subscribers.rs", "rank": 88, "score": 182909.503327145 }, { "content": "#[test]\n\nfn out_of_order() {\n\n let (unfiltered1, unfiltered1_handle) = unfiltered(\"unfiltered_1\");\n\n let (unfiltered2, unfiltered2_handle) = unfiltered(\"unfiltered_2\");\n\n\n\n let (filtered1, filtered1_handle) = filtered(\"filtered_1\");\n\n let (filtered2, filtered2_handle) = filtered(\"filtered_2\");\n\n\n\n let subscriber = tracing_subscriber::registry()\n\n .with(unfiltered1)\n\n .with(filtered1.with_filter(filter()))\n\n .with(unfiltered2)\n\n .with(filtered2.with_filter(filter()));\n\n assert_eq!(subscriber.max_level_hint(), None);\n\n let _subscriber = subscriber.set_default();\n\n\n\n events();\n\n\n\n unfiltered1_handle.assert_finished();\n\n unfiltered2_handle.assert_finished();\n\n filtered1_handle.assert_finished();\n\n filtered2_handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/hinted_subscriber_filters_dont_break_other_subscribers.rs", "rank": 89, "score": 182909.50332714498 }, { "content": "#[test]\n\nfn basic_subscriber_filters_spans() {\n\n let (trace_subscriber, trace_handle) = subscriber::named(\"trace\")\n\n .new_span(span::mock().at_level(Level::TRACE))\n\n .new_span(span::mock().at_level(Level::DEBUG))\n\n .new_span(span::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n\n\n\n let (debug_subscriber, debug_handle) = subscriber::named(\"debug\")\n\n .new_span(span::mock().at_level(Level::DEBUG))\n\n .new_span(span::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n\n\n\n let (info_subscriber, info_handle) = subscriber::named(\"info\")\n\n .new_span(span::mock().at_level(Level::INFO))\n\n .done()\n\n .run_with_handle();\n\n\n\n let _subscriber = tracing_subscriber::registry()\n", "file_path": "tracing-subscriber/tests/subscriber_filters/main.rs", "rank": 90, "score": 182909.50332714498 }, { "content": "#[test]\n\nfn mi() {\n\n let (unfiltered1, unfiltered1_handle) = unfiltered(\"unfiltered_1\");\n\n let (unfiltered2, unfiltered2_handle) = unfiltered(\"unfiltered_2\");\n\n let (filtered1, filtered1_handle) = filtered(\"filtered_1\");\n\n let (filtered2, filtered2_handle) = filtered(\"filtered_2\");\n\n\n\n let layered1 = filtered1.with_filter(filter()).and_then(unfiltered1);\n\n let layered2 = unfiltered2.and_then(filtered2.with_filter(filter()));\n\n\n\n let subscriber = tracing_subscriber::registry().with(layered1).with(layered2);\n\n assert_eq!(subscriber.max_level_hint(), None);\n\n let _subscriber = subscriber.set_default();\n\n\n\n events();\n\n\n\n unfiltered1_handle.assert_finished();\n\n unfiltered2_handle.assert_finished();\n\n filtered1_handle.assert_finished();\n\n filtered2_handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/hinted_subscriber_filters_dont_break_other_subscribers.rs", "rank": 91, "score": 182909.503327145 }, { "content": "#[test]\n\nfn out_of_order() {\n\n let (unfiltered1, unfiltered1_handle) = unfiltered(\"unfiltered_1\");\n\n let (unfiltered2, unfiltered2_handle) = unfiltered(\"unfiltered_2\");\n\n\n\n let (filtered1, filtered1_handle) = filtered(\"filtered_1\");\n\n let (filtered2, filtered2_handle) = filtered(\"filtered_2\");\n\n\n\n let _subscriber = tracing_subscriber::registry()\n\n .with(unfiltered1)\n\n .with(filtered1.with_filter(filter()))\n\n .with(unfiltered2)\n\n .with(filtered2.with_filter(filter()))\n\n .set_default();\n\n events();\n\n\n\n unfiltered1_handle.assert_finished();\n\n unfiltered2_handle.assert_finished();\n\n filtered1_handle.assert_finished();\n\n filtered2_handle.assert_finished();\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/unhinted_subscriber_filters_dont_break_other_subscribers.rs", "rank": 92, "score": 182909.50332714498 }, { "content": "fn events() {\n\n tracing::trace!(\"hello trace\");\n\n tracing::debug!(\"hello debug\");\n\n tracing::info!(\"hello info\");\n\n tracing::warn!(\"hello warn\");\n\n tracing::error!(\"hello error\");\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/cached_subscriber_filters_dont_break_other_subscribers.rs", "rank": 93, "score": 182903.56155668234 }, { "content": "fn events() {\n\n tracing::trace!(\"hello trace\");\n\n tracing::debug!(\"hello debug\");\n\n tracing::info!(\"hello info\");\n\n tracing::warn!(\"hello warn\");\n\n tracing::error!(\"hello error\");\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/hinted_subscriber_filters_dont_break_other_subscribers.rs", "rank": 94, "score": 182903.5615566823 }, { "content": "fn events() {\n\n tracing::trace!(\"hello trace\");\n\n tracing::debug!(\"hello debug\");\n\n tracing::info!(\"hello info\");\n\n tracing::warn!(\"hello warn\");\n\n tracing::error!(\"hello error\");\n\n}\n\n\n", "file_path": "tracing-subscriber/tests/unhinted_subscriber_filters_dont_break_other_subscribers.rs", "rank": 95, "score": 182903.5615566823 }, { "content": "#[derive(Debug)]\n\nenum ErrorKind {\n\n Parse(ParseError),\n\n Env(env::VarError),\n\n}\n\n\n\nimpl EnvFilter {\n\n /// `RUST_LOG` is the default environment variable used by\n\n /// [`EnvFilter::from_default_env`] and [`EnvFilter::try_from_default_env`].\n\n ///\n\n pub const DEFAULT_ENV: &'static str = \"RUST_LOG\";\n\n\n\n // === constructors, etc ===\n\n\n\n /// Returns a [builder] that can be used to configure a new [`EnvFilter`]\n\n /// instance.\n\n ///\n\n /// The [`Builder`] type is used to set additional configurations, such as\n\n /// [whether regular expressions are enabled](Builder::with_regex) or [the\n\n /// default directive](Builder::with_default_directive) before parsing an\n\n /// [`EnvFilter`] from a string or environment variable.\n", "file_path": "tracing-subscriber/src/filter/env/mod.rs", "rank": 96, "score": 182575.92239458603 }, { "content": "#[derive(Debug)]\n\nenum ParseErrorKind {\n\n #[cfg(feature = \"std\")]\n\n Field(Box<dyn std::error::Error + Send + Sync>),\n\n Level(level::ParseError),\n\n Other(Option<&'static str>),\n\n}\n\n\n\n// === impl DirectiveSet ===\n\n\n\nimpl<T> DirectiveSet<T> {\n\n #[cfg(feature = \"std\")]\n\n pub(crate) fn is_empty(&self) -> bool {\n\n self.directives.is_empty()\n\n }\n\n\n\n pub(crate) fn iter(&self) -> slice::Iter<'_, T> {\n\n self.directives.iter()\n\n }\n\n}\n\n\n", "file_path": "tracing-subscriber/src/filter/directive.rs", "rank": 97, "score": 182575.92239458603 }, { "content": "#[instrument(fields(foo = \"bar\"))]\n\nfn fn_param(param: u32) {}\n\n\n", "file_path": "tracing-attributes/tests/fields.rs", "rank": 98, "score": 182289.69597895155 }, { "content": "#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n#[test]\n\nfn warn_span_root() {\n\n warn_span!(target: \"foo_events\", parent: None, \"foo\", bar.baz = 2, quux = 3);\n\n warn_span!(target: \"foo_events\", parent: None, \"foo\", bar.baz = 2, quux = 4,);\n\n warn_span!(target: \"foo_events\", parent: None, \"foo\");\n\n warn_span!(target: \"foo_events\", parent: None, \"bar\",);\n\n warn_span!(parent: None, \"foo\", bar.baz = 2, quux = 3);\n\n warn_span!(parent: None, \"foo\", bar.baz = 2, quux = 4,);\n\n warn_span!(parent: None, \"foo\");\n\n warn_span!(parent: None, \"bar\",);\n\n}\n\n\n", "file_path": "tracing/tests/macros.rs", "rank": 99, "score": 181827.07465550565 } ]
Rust
src/bin/day24/main.rs
MattiasBuelens/advent-of-code-2019
01a88977bc7b6471bdc84aebd41e1e8d3920946d
use std::collections::{HashMap, HashSet}; fn main() { let grid: Grid = Grid::parse(include_str!("input")); println!("Answer to part 1: {}", part1(grid.clone())); println!("Answer to part 2: {}", part2(grid.clone())); } #[derive(Debug, Eq, PartialEq, Copy, Clone)] enum Tile { EMPTY, BUG, } impl Tile { fn parse(value: char) -> Self { match value { '.' => Tile::EMPTY, '#' => Tile::BUG, _ => panic!("invalid tile: {}", value), } } fn print(&self) -> char { match *self { Tile::EMPTY => '.', Tile::BUG => '#', } } } impl Default for Tile { fn default() -> Self { Tile::EMPTY } } const SIZE: usize = 5; #[derive(Debug, Default, Copy, Clone)] struct Grid { tiles: [[Tile; SIZE]; SIZE], } impl Grid { fn parse(input: &str) -> Self { let mut tiles: [[Tile; SIZE]; SIZE] = Default::default(); let mut y = 0; for line in input.trim().lines() { let mut x = 0; for value in line.chars() { tiles[y][x] = Tile::parse(value); x += 1; } y += 1; } Grid { tiles } } fn print(&self) { for row in &self.tiles { println!("{}", row.iter().map(|x| x.print()).collect::<String>()); } } fn get_neighbours(&self, x: usize, y: usize) -> Vec<Tile> { let mut neighbours: Vec<Tile> = Vec::new(); if y > 0 { neighbours.push(self.tiles[y - 1][x]); } if x > 0 { neighbours.push(self.tiles[y][x - 1]); } if x + 1 < SIZE { neighbours.push(self.tiles[y][x + 1]); } if y + 1 < SIZE { neighbours.push(self.tiles[y + 1][x]); } neighbours } fn step(&mut self) { let mut new_tiles: [[Tile; SIZE]; SIZE] = Default::default(); for y in 0..SIZE { for x in 0..SIZE { let neighbour_bugs = self .get_neighbours(x, y) .iter() .filter(|&x| x == &Tile::BUG) .count(); new_tiles[y][x] = match (self.tiles[y][x], neighbour_bugs) { (Tile::BUG, 1) => Tile::BUG, (Tile::BUG, _) => Tile::EMPTY, (Tile::EMPTY, 1) | (Tile::EMPTY, 2) => Tile::BUG, (Tile::EMPTY, _) => Tile::EMPTY, } } } self.tiles = new_tiles; } fn get_biodiversity_rating(&self) -> u32 { let mut rating = 0; let mut power = 1; for y in 0..SIZE { for x in 0..SIZE { if self.tiles[y][x] == Tile::BUG { rating |= power; } power <<= 1; } } rating } fn count_bugs(&self) -> usize { self.tiles .iter() .map(|row| row.iter().filter(|&x| x == &Tile::BUG).count()) .sum() } } fn part1(mut grid: Grid) -> u32 { let mut seen_ratings: HashSet<u32> = HashSet::new(); seen_ratings.insert(grid.get_biodiversity_rating()); loop { grid.step(); let rating = grid.get_biodiversity_rating(); if seen_ratings.contains(&rating) { break; } else { seen_ratings.insert(rating); } } grid.get_biodiversity_rating() } #[derive(Debug)] struct MultiGrid { grids: HashMap<isize, Grid>, } impl MultiGrid { fn new(grid: Grid) -> Self { let mut grids = HashMap::new(); grids.insert(0, grid); MultiGrid { grids } } fn print(&self) { let mut levels = self.grids.keys().collect::<Vec<_>>(); levels.sort(); for level in levels { println!("Depth {}", level); self.grids[level].print(); println!(); } } fn get_tile(&self, level: isize, x: usize, y: usize) -> Tile { assert!(x != 2 || y != 2); if let Some(grid) = self.grids.get(&level) { grid.tiles[y][x] } else { Tile::EMPTY } } fn get_neighbours(&self, level: isize, x: usize, y: usize) -> Vec<Tile> { let mut neighbours: Vec<Tile> = Vec::new(); match (x, y) { (_, 0) => { neighbours.push(self.get_tile(level - 1, 2, 1)); } (2, 3) => { for x in 0..SIZE { neighbours.push(self.get_tile(level + 1, x, SIZE - 1)); } } _ => { neighbours.push(self.get_tile(level, x, y - 1)); } } match (x, y) { (0, _) => { neighbours.push(self.get_tile(level - 1, 1, 2)); } (3, 2) => { for y in 0..SIZE { neighbours.push(self.get_tile(level + 1, SIZE - 1, y)); } } _ => { neighbours.push(self.get_tile(level, x - 1, y)); } } match (x, y) { (4, _) => { neighbours.push(self.get_tile(level - 1, 3, 2)); } (1, 2) => { for y in 0..SIZE { neighbours.push(self.get_tile(level + 1, 0, y)); } } _ => { neighbours.push(self.get_tile(level, x + 1, y)); } } match (x, y) { (_, 4) => { neighbours.push(self.get_tile(level - 1, 2, 3)); } (2, 1) => { for x in 0..SIZE { neighbours.push(self.get_tile(level + 1, x, 0)); } } _ => { neighbours.push(self.get_tile(level, x, y + 1)); } } neighbours } fn step_tile(&self, level: isize, x: usize, y: usize) -> Tile { let neighbour_bugs = self .get_neighbours(level, x, y) .iter() .filter(|&x| x == &Tile::BUG) .count(); match (self.get_tile(level, x, y), neighbour_bugs) { (Tile::BUG, 1) => Tile::BUG, (Tile::BUG, _) => Tile::EMPTY, (Tile::EMPTY, 1) | (Tile::EMPTY, 2) => Tile::BUG, (Tile::EMPTY, _) => Tile::EMPTY, } } fn step(&mut self) { let mut new_grids: HashMap<isize, Grid> = self.grids.clone(); let min_level = *self.grids.keys().min().unwrap(); let max_level = *self.grids.keys().max().unwrap(); for level in (min_level - 1)..=(max_level + 1) { for y in 0..SIZE { for x in 0..SIZE { if x == 2 && y == 2 { continue; } let new_tile = self.step_tile(level, x, y); if new_tile == Tile::BUG || self.grids.contains_key(&level) { let new_grid = new_grids.entry(level).or_insert(Default::default()); new_grid.tiles[y][x] = new_tile; } } } } self.grids = new_grids; } fn count_bugs(&self) -> usize { self.grids.values().map(|grid| grid.count_bugs()).sum() } } fn part2(grid: Grid) -> usize { let mut multi_grid = MultiGrid::new(grid); for _ in 0..200 { multi_grid.step(); } multi_grid.count_bugs() } #[cfg(test)] mod tests { use super::*; #[test] fn test_part1() { let grid = Grid::parse(include_str!("example")); assert_eq!(part1(grid), 2129920); } }
use std::collections::{HashMap, HashSet}; fn main() { let grid: Grid = Grid::parse(include_str!("input")); println!("Answer to part 1: {}", part1(grid.clone())); println!("Answer to part 2: {}", part2(grid.clone())); } #[derive(Debug, Eq, PartialEq, Copy, Clone)] enum Tile { EMPTY, BUG, } impl Tile { fn parse(value: char) -> Self { match value { '.' => Tile::EMPTY, '#' => Tile::BUG, _ => panic!("invalid tile: {}", value), } } fn print(&self) -> char { match *self { Tile::EMPTY => '.', Tile::BUG => '#', } } } impl Default for Tile { fn default() -> Self { Tile::EMPTY } } const SIZE: usize = 5; #[derive(Debug, Default, Copy, Clone)] struct Grid { tiles: [[Tile; SIZE]; SIZE], } impl Grid { fn parse(input: &str) -> Self { let mut tiles: [[Tile; SIZE]; SIZE] = Default::default(); let mut y = 0; for line in input.trim().lines() { let mut x = 0; for value in line.chars() { tiles[y][x] = Tile::parse(value); x += 1; } y += 1; } Grid { tiles } } fn print(&self) { for row in &self.tiles { println!("{}", row.iter().map(|x| x.print()).collect::<String>()); } } fn get_neighbours(&self, x: usize, y: usize) -> Vec<Tile> { let mut neighbours: Vec<Tile> = Vec::new(); if y > 0 { neighbours.push(self.tiles[y - 1][x]); } if x > 0 { neighbours.push(self.tiles[y][x - 1]); } if x + 1 < SIZE { neighbours.push(self.tiles[y][x + 1]); } if
_key(&level) { let new_grid = new_grids.entry(level).or_insert(Default::default()); new_grid.tiles[y][x] = new_tile; } } } } self.grids = new_grids; } fn count_bugs(&self) -> usize { self.grids.values().map(|grid| grid.count_bugs()).sum() } } fn part2(grid: Grid) -> usize { let mut multi_grid = MultiGrid::new(grid); for _ in 0..200 { multi_grid.step(); } multi_grid.count_bugs() } #[cfg(test)] mod tests { use super::*; #[test] fn test_part1() { let grid = Grid::parse(include_str!("example")); assert_eq!(part1(grid), 2129920); } }
y + 1 < SIZE { neighbours.push(self.tiles[y + 1][x]); } neighbours } fn step(&mut self) { let mut new_tiles: [[Tile; SIZE]; SIZE] = Default::default(); for y in 0..SIZE { for x in 0..SIZE { let neighbour_bugs = self .get_neighbours(x, y) .iter() .filter(|&x| x == &Tile::BUG) .count(); new_tiles[y][x] = match (self.tiles[y][x], neighbour_bugs) { (Tile::BUG, 1) => Tile::BUG, (Tile::BUG, _) => Tile::EMPTY, (Tile::EMPTY, 1) | (Tile::EMPTY, 2) => Tile::BUG, (Tile::EMPTY, _) => Tile::EMPTY, } } } self.tiles = new_tiles; } fn get_biodiversity_rating(&self) -> u32 { let mut rating = 0; let mut power = 1; for y in 0..SIZE { for x in 0..SIZE { if self.tiles[y][x] == Tile::BUG { rating |= power; } power <<= 1; } } rating } fn count_bugs(&self) -> usize { self.tiles .iter() .map(|row| row.iter().filter(|&x| x == &Tile::BUG).count()) .sum() } } fn part1(mut grid: Grid) -> u32 { let mut seen_ratings: HashSet<u32> = HashSet::new(); seen_ratings.insert(grid.get_biodiversity_rating()); loop { grid.step(); let rating = grid.get_biodiversity_rating(); if seen_ratings.contains(&rating) { break; } else { seen_ratings.insert(rating); } } grid.get_biodiversity_rating() } #[derive(Debug)] struct MultiGrid { grids: HashMap<isize, Grid>, } impl MultiGrid { fn new(grid: Grid) -> Self { let mut grids = HashMap::new(); grids.insert(0, grid); MultiGrid { grids } } fn print(&self) { let mut levels = self.grids.keys().collect::<Vec<_>>(); levels.sort(); for level in levels { println!("Depth {}", level); self.grids[level].print(); println!(); } } fn get_tile(&self, level: isize, x: usize, y: usize) -> Tile { assert!(x != 2 || y != 2); if let Some(grid) = self.grids.get(&level) { grid.tiles[y][x] } else { Tile::EMPTY } } fn get_neighbours(&self, level: isize, x: usize, y: usize) -> Vec<Tile> { let mut neighbours: Vec<Tile> = Vec::new(); match (x, y) { (_, 0) => { neighbours.push(self.get_tile(level - 1, 2, 1)); } (2, 3) => { for x in 0..SIZE { neighbours.push(self.get_tile(level + 1, x, SIZE - 1)); } } _ => { neighbours.push(self.get_tile(level, x, y - 1)); } } match (x, y) { (0, _) => { neighbours.push(self.get_tile(level - 1, 1, 2)); } (3, 2) => { for y in 0..SIZE { neighbours.push(self.get_tile(level + 1, SIZE - 1, y)); } } _ => { neighbours.push(self.get_tile(level, x - 1, y)); } } match (x, y) { (4, _) => { neighbours.push(self.get_tile(level - 1, 3, 2)); } (1, 2) => { for y in 0..SIZE { neighbours.push(self.get_tile(level + 1, 0, y)); } } _ => { neighbours.push(self.get_tile(level, x + 1, y)); } } match (x, y) { (_, 4) => { neighbours.push(self.get_tile(level - 1, 2, 3)); } (2, 1) => { for x in 0..SIZE { neighbours.push(self.get_tile(level + 1, x, 0)); } } _ => { neighbours.push(self.get_tile(level, x, y + 1)); } } neighbours } fn step_tile(&self, level: isize, x: usize, y: usize) -> Tile { let neighbour_bugs = self .get_neighbours(level, x, y) .iter() .filter(|&x| x == &Tile::BUG) .count(); match (self.get_tile(level, x, y), neighbour_bugs) { (Tile::BUG, 1) => Tile::BUG, (Tile::BUG, _) => Tile::EMPTY, (Tile::EMPTY, 1) | (Tile::EMPTY, 2) => Tile::BUG, (Tile::EMPTY, _) => Tile::EMPTY, } } fn step(&mut self) { let mut new_grids: HashMap<isize, Grid> = self.grids.clone(); let min_level = *self.grids.keys().min().unwrap(); let max_level = *self.grids.keys().max().unwrap(); for level in (min_level - 1)..=(max_level + 1) { for y in 0..SIZE { for x in 0..SIZE { if x == 2 && y == 2 { continue; } let new_tile = self.step_tile(level, x, y); if new_tile == Tile::BUG || self.grids.contains
random
[ { "content": "pub fn parse_list<T, E>(input: &str, separator: char) -> Vec<T>\n\nwhere\n\n T: FromStr<Err = E>,\n\n E: Debug,\n\n{\n\n return input\n\n .trim()\n\n .split(separator)\n\n .map(|x| x.parse().expect(\"invalid input\"))\n\n .collect();\n\n}\n", "file_path": "src/input.rs", "rank": 0, "score": 51923.025397178484 }, { "content": "pub fn gcd_64(mut a: i64, mut b: i64) -> i64 {\n\n while a != 0 {\n\n let old_a = a;\n\n a = b % a;\n\n b = old_a;\n\n }\n\n b.abs()\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 1, "score": 47584.357789630056 }, { "content": "pub fn gcd(mut a: i32, mut b: i32) -> i32 {\n\n while a != 0 {\n\n let old_a = a;\n\n a = b % a;\n\n b = old_a;\n\n }\n\n b.abs()\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 2, "score": 47584.357789630056 }, { "content": "#[derive(Debug)]\n\nenum OutputValue {\n\n Position(i64),\n\n Relative(i64),\n\n}\n\n\n\nimpl OutputValue {\n\n #[inline]\n\n fn parse(mode: i32, value: i64) -> OutputValue {\n\n match mode {\n\n 0 => OutputValue::Position(value),\n\n 2 => OutputValue::Relative(value),\n\n _ => panic!(\"unexpected parameter mode {}\", mode),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn write(&self, program: &mut Vec<i64>, base: i64, value: i64) {\n\n let pos = match *self {\n\n OutputValue::Position(pos) => pos as usize,\n\n OutputValue::Relative(pos) => (base + pos) as usize,\n\n };\n\n if pos >= program.len() {\n\n program.resize(pos + 1, 0);\n\n }\n\n program[pos as usize] = value;\n\n }\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 3, "score": 46589.33693341235 }, { "content": "#[derive(Debug)]\n\nenum InputValue {\n\n Position(i64),\n\n Immediate(i64),\n\n Relative(i64),\n\n}\n\n\n\nimpl InputValue {\n\n #[inline]\n\n fn parse(mode: i32, value: i64) -> InputValue {\n\n match mode {\n\n 0 => InputValue::Position(value),\n\n 1 => InputValue::Immediate(value),\n\n 2 => InputValue::Relative(value),\n\n _ => panic!(\"unexpected parameter mode {}\", mode),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn read(&self, program: &Vec<i64>, base: i64) -> i64 {\n\n match *self {\n\n InputValue::Position(pos) => *program.get(pos as usize).unwrap_or(&0),\n\n InputValue::Immediate(value) => value,\n\n InputValue::Relative(pos) => *program.get((base + pos) as usize).unwrap_or(&0),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 4, "score": 46589.33693341235 }, { "content": "#[derive(Debug)]\n\nenum Instruction {\n\n Add(InputValue, InputValue, OutputValue),\n\n Multiply(InputValue, InputValue, OutputValue),\n\n Read(OutputValue),\n\n Write(InputValue),\n\n JumpIfTrue(InputValue, InputValue),\n\n JumpIfFalse(InputValue, InputValue),\n\n LessThan(InputValue, InputValue, OutputValue),\n\n Equals(InputValue, InputValue, OutputValue),\n\n RelativeBaseOffset(InputValue),\n\n Halt,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub enum StepResult {\n\n Ok,\n\n NeedInput,\n\n Output(i64),\n\n Halt,\n\n}\n", "file_path": "src/intcode.rs", "rank": 5, "score": 31350.39235036247 }, { "content": "pub fn make_chain(mut machines: VecDeque<Box<dyn Machine>>) -> Box<dyn Machine> {\n\n let head = machines.pop_front().expect(\"expected at least one machine\");\n\n if machines.is_empty() {\n\n head\n\n } else {\n\n Box::new(Chain::new(head, make_chain(machines)))\n\n }\n\n}\n", "file_path": "src/intcode.rs", "rank": 6, "score": 30896.845022519603 }, { "content": "struct Chain<M1: Machine, M2: Machine> {\n\n head: M1,\n\n tail: M2,\n\n}\n\n\n\nimpl<M1: Machine, M2: Machine> Chain<M1, M2> {\n\n fn new(head: M1, tail: M2) -> Chain<M1, M2> {\n\n Chain { head, tail }\n\n }\n\n}\n\n\n\nimpl<M1: Machine, M2: Machine> Machine for Chain<M1, M2> {\n\n fn add_input(&mut self, value: i64) {\n\n self.head.add_input(value);\n\n }\n\n\n\n fn step(&mut self) -> StepResult {\n\n let head_result = self.head.step();\n\n if let StepResult::Output(value) = head_result {\n\n // forward outputs from head to tail\n", "file_path": "src/intcode.rs", "rank": 7, "score": 24358.05601818603 }, { "content": "pub fn lcm_64(a: i64, b: i64) -> i64 {\n\n (a * b).abs() / gcd_64(a, b)\n\n}\n", "file_path": "src/math.rs", "rank": 8, "score": 20320.280209213517 }, { "content": "pub fn lcm(a: i32, b: i32) -> i32 {\n\n (a * b).abs() / gcd(a, b)\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 9, "score": 20320.280209213517 }, { "content": " StepResult::Halt => return None,\n\n StepResult::Ok => {}\n\n };\n\n }\n\n }\n\n\n\n fn add_line(&mut self, line: &str) {\n\n for byte in line.bytes() {\n\n self.add_input(byte as i64);\n\n }\n\n self.add_input('\\n' as u8 as i64);\n\n }\n\n\n\n fn read_line(&mut self) -> String {\n\n let mut output = String::new();\n\n loop {\n\n match self.step() {\n\n StepResult::Ok => {}\n\n StepResult::Output(value) => match value as u8 as char {\n\n '\\n' => break,\n", "file_path": "src/intcode.rs", "rank": 10, "score": 7.578775010471179 }, { "content": "}\n\n\n\nimpl<T: DerefMut<Target = dyn Machine>> Machine for T {\n\n fn add_input(&mut self, value: i64) {\n\n self.deref_mut().add_input(value)\n\n }\n\n\n\n fn step(&mut self) -> StepResult {\n\n self.deref_mut().step()\n\n }\n\n}\n\n\n\npub struct ProgramMachine {\n\n program: Vec<i64>,\n\n pc: usize,\n\n base: i64,\n\n input: VecDeque<i64>,\n\n}\n\n\n\nimpl ProgramMachine {\n", "file_path": "src/intcode.rs", "rank": 11, "score": 5.226598089012269 }, { "content": " _ => output.push(value as u8 as char),\n\n },\n\n StepResult::NeedInput => break,\n\n StepResult::Halt => panic!(\"unexpected halt\"),\n\n }\n\n }\n\n output\n\n }\n\n\n\n fn read_string(&mut self) -> String {\n\n let mut output = String::new();\n\n loop {\n\n match self.step() {\n\n StepResult::Ok => {}\n\n StepResult::Output(value) => output.push(value as u8 as char),\n\n StepResult::NeedInput | StepResult::Halt => break,\n\n }\n\n }\n\n output\n\n }\n", "file_path": "src/intcode.rs", "rank": 12, "score": 4.694095904587357 }, { "content": "use std::fmt::Debug;\n\nuse std::str::FromStr;\n\n\n", "file_path": "src/input.rs", "rank": 13, "score": 4.686202197731376 }, { "content": " }\n\n}\n\n\n\nimpl Default for Vector3D {\n\n fn default() -> Self {\n\n Vector3D::zero()\n\n }\n\n}\n\n\n\nimpl Add for Vector3D {\n\n type Output = Self;\n\n\n\n fn add(self: Vector3D, other: Vector3D) -> Vector3D {\n\n Vector3D {\n\n x: self.x + other.x,\n\n y: self.y + other.y,\n\n z: self.z + other.z,\n\n }\n\n }\n\n}\n", "file_path": "src/vector3d.rs", "rank": 14, "score": 4.473348170207187 }, { "content": "}\n\n\n\nimpl Default for Vector2D {\n\n fn default() -> Self {\n\n Vector2D::zero()\n\n }\n\n}\n\n\n\nimpl Add for Vector2D {\n\n type Output = Self;\n\n\n\n fn add(self: Vector2D, other: Vector2D) -> Vector2D {\n\n Vector2D {\n\n x: self.x + other.x,\n\n y: self.y + other.y,\n\n }\n\n }\n\n}\n\n\n\nimpl Sub for Vector2D {\n", "file_path": "src/vector2d.rs", "rank": 15, "score": 4.469663239817699 }, { "content": "\n\nimpl AddAssign for Vector3D {\n\n fn add_assign(&mut self, other: Self) {\n\n self.x.add_assign(other.x);\n\n self.y.add_assign(other.y);\n\n self.z.add_assign(other.z);\n\n }\n\n}\n\n\n\nimpl SubAssign for Vector3D {\n\n fn sub_assign(&mut self, other: Self) {\n\n self.x.sub_assign(other.x);\n\n self.y.sub_assign(other.y);\n\n self.z.sub_assign(other.z);\n\n }\n\n}\n", "file_path": "src/vector3d.rs", "rank": 16, "score": 4.443920206486673 }, { "content": "use std::ops::{Add, AddAssign, Neg, Sub, SubAssign};\n\n\n\n#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]\n\npub struct Vector2D {\n\n pub x: i32,\n\n pub y: i32,\n\n}\n\n\n\nimpl Vector2D {\n\n pub fn new(x: i32, y: i32) -> Vector2D {\n\n Vector2D { x, y }\n\n }\n\n\n\n pub fn zero() -> Vector2D {\n\n Vector2D::new(0, 0)\n\n }\n\n\n\n pub fn manhattan_distance(&self) -> i32 {\n\n self.x.abs() + self.y.abs()\n\n }\n", "file_path": "src/vector2d.rs", "rank": 17, "score": 4.3727374156568946 }, { "content": "use std::ops::{Add, AddAssign, Neg, Sub, SubAssign};\n\n\n\n#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]\n\npub struct Vector3D {\n\n pub x: i32,\n\n pub y: i32,\n\n pub z: i32,\n\n}\n\n\n\nimpl Vector3D {\n\n pub fn new(x: i32, y: i32, z: i32) -> Vector3D {\n\n Vector3D { x, y, z }\n\n }\n\n\n\n pub fn zero() -> Vector3D {\n\n Vector3D::new(0, 0, 0)\n\n }\n\n\n\n pub fn manhattan_distance(&self) -> i32 {\n\n self.x.abs() + self.y.abs() + self.z.abs()\n", "file_path": "src/vector3d.rs", "rank": 18, "score": 4.155412386064709 }, { "content": " type Output = Self;\n\n\n\n fn sub(self: Vector2D, other: Vector2D) -> Vector2D {\n\n Vector2D {\n\n x: self.x - other.x,\n\n y: self.y - other.y,\n\n }\n\n }\n\n}\n\n\n\nimpl Neg for Vector2D {\n\n type Output = Self;\n\n\n\n fn neg(self) -> Vector2D {\n\n Vector2D::new(-self.x, -self.y)\n\n }\n\n}\n\n\n\nimpl AddAssign for Vector2D {\n\n fn add_assign(&mut self, other: Self) {\n", "file_path": "src/vector2d.rs", "rank": 19, "score": 4.145963229988185 }, { "content": " self.x.add_assign(other.x);\n\n self.y.add_assign(other.y);\n\n }\n\n}\n\n\n\nimpl SubAssign for Vector2D {\n\n fn sub_assign(&mut self, other: Self) {\n\n self.x.sub_assign(other.x);\n\n self.y.sub_assign(other.y);\n\n }\n\n}\n", "file_path": "src/vector2d.rs", "rank": 20, "score": 3.9268876220265945 }, { "content": "use std::collections::VecDeque;\n\nuse std::ops::DerefMut;\n\n\n\n#[derive(Debug)]\n", "file_path": "src/intcode.rs", "rank": 21, "score": 3.6272438181582367 }, { "content": "\n\nimpl Instruction {\n\n fn parse(program: &Vec<i64>, pc: usize) -> Instruction {\n\n let opcode = program[pc] as i32;\n\n let mode1 = (opcode / 100) % 10;\n\n let mode2 = (opcode / 1000) % 10;\n\n let mode3 = (opcode / 10000) % 10;\n\n match opcode % 100 {\n\n 1 => Instruction::Add(\n\n InputValue::parse(mode1, program[pc + 1]),\n\n InputValue::parse(mode2, program[pc + 2]),\n\n OutputValue::parse(mode3, program[pc + 3]),\n\n ),\n\n 2 => Instruction::Multiply(\n\n InputValue::parse(mode1, program[pc + 1]),\n\n InputValue::parse(mode2, program[pc + 2]),\n\n OutputValue::parse(mode3, program[pc + 3]),\n\n ),\n\n 3 => Instruction::Read(OutputValue::parse(mode1, program[pc + 1])),\n\n 4 => Instruction::Write(InputValue::parse(mode1, program[pc + 1])),\n", "file_path": "src/intcode.rs", "rank": 22, "score": 3.50869977628441 }, { "content": "\n\nimpl Sub for Vector3D {\n\n type Output = Self;\n\n\n\n fn sub(self: Vector3D, other: Vector3D) -> Vector3D {\n\n Vector3D {\n\n x: self.x - other.x,\n\n y: self.y - other.y,\n\n z: self.z - other.z,\n\n }\n\n }\n\n}\n\n\n\nimpl Neg for Vector3D {\n\n type Output = Self;\n\n\n\n fn neg(self) -> Vector3D {\n\n Vector3D::new(-self.x, -self.y, -self.z)\n\n }\n\n}\n", "file_path": "src/vector3d.rs", "rank": 23, "score": 3.1001510027700943 }, { "content": " let program = &mut self.program;\n\n let base = &mut self.base;\n\n let instr = Instruction::parse(&program, self.pc);\n\n match &instr {\n\n Instruction::Add(left, right, result) => {\n\n result.write(\n\n program,\n\n *base,\n\n left.read(program, *base) + right.read(program, *base),\n\n );\n\n }\n\n Instruction::Multiply(left, right, result) => {\n\n result.write(\n\n program,\n\n *base,\n\n left.read(program, *base) * right.read(program, *base),\n\n );\n\n }\n\n Instruction::Read(result) => match self.input.pop_front() {\n\n Some(value) => {\n", "file_path": "src/intcode.rs", "rank": 24, "score": 2.9145702549753425 }, { "content": " result.write(program, *base, value);\n\n }\n\n None => return StepResult::NeedInput,\n\n },\n\n Instruction::Write(value) => {\n\n self.pc += instr.length();\n\n return StepResult::Output(value.read(program, *base));\n\n }\n\n Instruction::JumpIfTrue(test, jump) => {\n\n if test.read(program, *base) != 0 {\n\n self.pc = jump.read(program, *base) as usize;\n\n return StepResult::Ok;\n\n }\n\n }\n\n Instruction::JumpIfFalse(test, jump) => {\n\n if test.read(program, *base) == 0 {\n\n self.pc = jump.read(program, *base) as usize;\n\n return StepResult::Ok;\n\n }\n\n }\n", "file_path": "src/intcode.rs", "rank": 25, "score": 2.48577969041816 }, { "content": " 5 => Instruction::JumpIfTrue(\n\n InputValue::parse(mode1, program[pc + 1]),\n\n InputValue::parse(mode2, program[pc + 2]),\n\n ),\n\n 6 => Instruction::JumpIfFalse(\n\n InputValue::parse(mode1, program[pc + 1]),\n\n InputValue::parse(mode2, program[pc + 2]),\n\n ),\n\n 7 => Instruction::LessThan(\n\n InputValue::parse(mode1, program[pc + 1]),\n\n InputValue::parse(mode2, program[pc + 2]),\n\n OutputValue::parse(mode3, program[pc + 3]),\n\n ),\n\n 8 => Instruction::Equals(\n\n InputValue::parse(mode1, program[pc + 1]),\n\n InputValue::parse(mode2, program[pc + 2]),\n\n OutputValue::parse(mode3, program[pc + 3]),\n\n ),\n\n 9 => Instruction::RelativeBaseOffset(InputValue::parse(mode1, program[pc + 1])),\n\n 99 => Instruction::Halt,\n", "file_path": "src/intcode.rs", "rank": 26, "score": 2.0701303345445052 }, { "content": " pub fn new(program: Vec<i64>, input: Vec<i64>) -> ProgramMachine {\n\n ProgramMachine {\n\n program,\n\n pc: 0,\n\n base: 0,\n\n input: VecDeque::from(input),\n\n }\n\n }\n\n\n\n pub fn program(&self) -> &Vec<i64> {\n\n &self.program\n\n }\n\n}\n\n\n\nimpl Machine for ProgramMachine {\n\n fn add_input(&mut self, input: i64) {\n\n self.input.push_back(input);\n\n }\n\n\n\n fn step(&mut self) -> StepResult {\n", "file_path": "src/intcode.rs", "rank": 27, "score": 2.060697857522465 }, { "content": " self.tail.add_input(value);\n\n }\n\n let tail_result = self.tail.step();\n\n match (head_result, tail_result) {\n\n (_, StepResult::Output(value)) => {\n\n // output from tail\n\n StepResult::Output(value)\n\n }\n\n (_, StepResult::Halt) => {\n\n // if tail has halted, then head must have halted as well\n\n // since it can no longer output values\n\n StepResult::Halt\n\n }\n\n (StepResult::Halt, _) => {\n\n // if head has halted but tail hasn't yet,\n\n // continue running tail\n\n StepResult::Ok\n\n }\n\n (StepResult::Output(_), _) => {\n\n // output from head was already forwarded internally\n\n StepResult::Ok\n\n }\n\n (head_result, _) => head_result,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 28, "score": 2.058072224066719 }, { "content": " _ => panic!(\"unexpected opcode {} at index {}\", opcode, pc),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn length(&self) -> usize {\n\n match self {\n\n Instruction::Add(_, _, _)\n\n | Instruction::Multiply(_, _, _)\n\n | Instruction::LessThan(_, _, _)\n\n | Instruction::Equals(_, _, _) => 4,\n\n Instruction::JumpIfTrue(_, _) | Instruction::JumpIfFalse(_, _) => 3,\n\n Instruction::Read(_) | Instruction::Write(_) | Instruction::RelativeBaseOffset(_) => 2,\n\n Instruction::Halt => 1,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/intcode.rs", "rank": 29, "score": 1.9485983416373087 } ]
Rust
src/decode.rs
12101111/6502
b333d946539f04fee8680f5b1b73de40c1102b3c
use super::address::AM::{self, *}; #[derive(Debug, Clone, Copy)] #[allow(non_camel_case_types)] pub enum OP { LDA, LDX, LDY, STA, STX, STY, TAX, TAY, TXA, TYA, TSX, TXS, PHA, PHP, PLA, PLP, ADC, SBC, INX, INY, DEX, DEY, INC, DEC, AND, ORA, EOR, BIT, CMP, CPX, CPY, ASL, LSR, ROL, ROR, CLC, CLD, CLI, CLV, SEC, SED, SEI, JMP, JSR, RTS, RTI, BRK, BEQ, BNE, BCS, BCC, BVS, BVC, BMI, BPL, NOP, ALR, ANC, ARR, AXS, LAX, SAX, DCP, ISC, RLA, RRA, SLO, SRE, SHY, SHX, STP, XXA, AHX, TAS, LAS, } use self::OP::*; #[rustfmt::skip] pub static DECODE: [(OP, AM, u8); 256] = [ /*0x00*/(BRK, IMP, 7),(ORA, ZIX, 6),(STP, NON, 2),(SLO, ZIX, 8),(NOP, ZPG, 3),(ORA, ZPG, 3),(ASL, ZPG, 5),(SLO, ZPG, 5), /*0x08*/(PHP, IMP, 3),(ORA, IMM, 2),(ASL, ACC, 2),(ANC, IMM, 2),(NOP, ABS, 4),(ORA, ABS, 4),(ASL, ABS, 6),(SLO, ABS, 6), /*0x10*/(BPL, REL, 2),(ORA, ZIY, 5),(STP, NON, 2),(SLO, ZIY, 8),(NOP, ZPX, 4),(ORA, ZPX, 4),(ASL, ZPX, 6),(SLO, ZPX, 6), /*0x18*/(CLC, IMP, 2),(ORA, aby, 4),(NOP, IMP, 2),(SLO, ABY, 7),(NOP, abx, 4),(ORA, abx, 4),(ASL, ABX, 7),(SLO, ABX, 7), /*0x20*/(JSR, ABS, 6),(AND, ZIX, 6),(STP, NON, 2),(RLA, ZIX, 8),(BIT, ZPG, 3),(AND, ZPG, 3),(ROL, ZPG, 5),(RLA, ZPG, 5), /*0x28*/(PLP, IMP, 4),(AND, IMM, 2),(ROL, ACC, 2),(ANC, IMM, 2),(BIT, ABS, 4),(AND, ABS, 4),(ROL, ABS, 6),(RLA, ABS, 6), /*0x30*/(BMI, REL, 2),(AND, ZIY, 5),(STP, NON, 2),(RLA, ZIY, 8),(NOP, ZPX, 4),(AND, ZPX, 4),(ROL, ZPX, 6),(RLA, ZPX, 6), /*0x38*/(SEC, IMP, 2),(AND, aby, 4),(NOP, IMP, 2),(RLA, ABY, 7),(NOP, abx, 4),(AND, abx, 4),(ROL, ABX, 7),(RLA, ABX, 7), /*0x40*/(RTI, IMP, 6),(EOR, ZIX, 6),(STP, NON, 2),(SRE, ZIX, 8),(NOP, ZPG, 3),(EOR, ZPG, 3),(LSR, ZPG, 5),(SRE, ZPG, 5), /*0x48*/(PHA, IMP, 3),(EOR, IMM, 2),(LSR, ACC, 2),(ALR, IMM, 2),(JMP, ABS, 3),(EOR, ABS, 4),(LSR, ABS, 6),(SRE, ABS, 6), /*0x50*/(BVC, REL, 2),(EOR, ZIY, 5),(STP, NON, 2),(SRE, ZIY, 8),(NOP, ZPX, 4),(EOR, ZPX, 4),(LSR, ZPX, 6),(SRE, ZPX, 6), /*0x58*/(CLI, IMP, 2),(EOR, aby, 4),(NOP, IMP, 2),(SRE, ABY, 7),(NOP, abx, 4),(EOR, abx, 4),(LSR, ABX, 7),(SRE, ABX, 7), /*0x60*/(RTS, IMP, 6),(ADC, ZIX, 6),(STP, NON, 2),(RRA, ZIX, 8),(NOP, ZPG, 3),(ADC, ZPG, 3),(ROR, ZPG, 5),(RRA, ZPG, 5), /*0x68*/(PLA, IMP, 4),(ADC, IMM, 2),(ROR, ACC, 2),(ARR, IMM, 2),(JMP, IND, 5),(ADC, ABS, 4),(ROR, ABS, 6),(RRA, ABS, 6), /*0x70*/(BVS, REL, 2),(ADC, ziy, 5),(STP, NON, 2),(RRA, ZIY, 8),(NOP, ZPX, 4),(ADC, ZPX, 4),(ROR, ZPX, 6),(RRA, ZPX, 6), /*0x78*/(SEI, IMP, 2),(ADC, aby, 4),(NOP, IMP, 2),(RRA, ABY, 7),(NOP, abx, 4),(ADC, abx, 4),(ROR, ABX, 7),(RRA, ABX, 7), /*0x80*/(NOP, IMM, 2),(STA, ZIX, 6),(NOP, IMM, 2),(SAX, ZIX, 6),(STY, ZPG, 3),(STA, ZPG, 3),(STX, ZPG, 3),(SAX, ZPG, 3), /*0x88*/(DEY, IMP, 2),(NOP, IMM, 2),(TXA, IMP, 2),(XXA, IMM, 2),(STY, ABS, 4),(STA, ABS, 4),(STX, ABS, 4),(SAX, ABS, 4), /*0x90*/(BCC, REL, 2),(STA, ZIY, 6),(STP, NON, 2),(AHX, ZIY, 6),(STY, ZPX, 4),(STA, ZPX, 4),(STX, ZPY, 4),(SAX, ZPY, 4), /*0x98*/(TYA, IMP, 2),(STA, ABY, 5),(TXS, IMP, 2),(TAS, ABY, 5),(SHY, ABX, 5),(STA, ABX, 5),(SHX, ABY, 5),(AHX, ABY, 5), /*0xA0*/(LDY, IMM, 2),(LDA, ZIX, 6),(LDX, IMM, 2),(LAX, ZIX, 6),(LDY, ZPG, 3),(LDA, ZPG, 3),(LDX, ZPG, 3),(LAX, ZPG, 3), /*0xA8*/(TAY, IMP, 2),(LDA, IMM, 2),(TAX, IMP, 2),(LAX, IMM, 2),(LDY, ABS, 4),(LDA, ABS, 4),(LDX, ABS, 4),(LAX, ABS, 4), /*0xB0*/(BCS, REL, 2),(LDA, ziy, 5),(STP, NON, 2),(LAX, ziy, 5),(LDY, ZPX, 4),(LDA, ZPX, 4),(LDX, ZPY, 4),(LAX, ZPY, 4), /*0xB8*/(CLV, IMP, 2),(LDA, aby, 4),(TSX, IMP, 2),(LAS, ABY, 4),(LDY, abx, 4),(LDA, abx, 4),(LDX, aby, 4),(LAX, aby, 4), /*0xC0*/(CPY, IMM, 2),(CMP, ZIX, 6),(NOP, IMM, 2),(DCP, ZIX, 8),(CPY, ZPG, 3),(CMP, ZPG, 3),(DEC, ZPG, 5),(DCP, ZPG, 5), /*0xC8*/(INY, IMP, 2),(CMP, IMM, 2),(DEX, IMP, 2),(AXS, IMM, 2),(CPY, ABS, 4),(CMP, ABS, 4),(DEC, ABS, 6),(DCP, ABS, 6), /*0xD0*/(BNE, REL, 2),(CMP, ziy, 5),(STP, NON, 2),(DCP, ZIY, 8),(NOP, ZPX, 4),(CMP, ZPX, 4),(DEC, ZPX, 6),(DCP, ZPX, 6), /*0xD8*/(CLD, IMP, 2),(CMP, aby, 4),(NOP, IMP, 2),(DCP, ABY, 7),(NOP, abx, 4),(CMP, abx, 4),(DEC, ABX, 7),(DCP, ABX, 7), /*0xE0*/(CPX, IMM, 2),(SBC, ZIX, 6),(NOP, IMM, 3),(ISC, ZIX, 8),(CPX, ZPG, 3),(SBC, ZPG, 3),(INC, ZPG, 5),(ISC, ZPG, 5), /*0xE8*/(INX, IMP, 2),(SBC, IMM, 2),(NOP, IMP, 2),(SBC, IMM, 2),(CPX, ABS, 4),(SBC, ABS, 4),(INC, ABS, 6),(ISC, ABS, 6), /*0xF0*/(BEQ, REL, 2),(SBC, ZIY, 5),(STP, NON, 2),(ISC, ZIY, 8),(NOP, ZPX, 4),(SBC, ZPX, 4),(INC, ZPX, 6),(ISC, ZPX, 6), /*0xF8*/(SED, IMP, 2),(SBC, aby, 4),(NOP, IMP, 2),(ISC, ABY, 7),(NOP, abx, 4),(SBC, abx, 4),(INC, ABX, 7),(ISC, ABX, 7), ];
use super::address::AM::{self, *}; #[derive(Debug, Clone, Copy)] #[allow(non_camel_case_types)] pub enum OP { LDA, LDX, LDY, STA, STX, STY, TAX, TAY, TXA, TYA, TSX, TXS, PHA, PHP, PLA, PLP, ADC, SBC, INX, INY, DEX, DEY, INC, DEC, AND, ORA, EOR, BIT, CMP, CPX, CPY, ASL, LSR, ROL, ROR, CLC, CLD, CLI, CLV, SEC, SED, SEI, JMP, JSR, RTS, RTI, BRK, BEQ, BNE, BCS, BCC, BVS, BVC, BMI, BPL, NOP, ALR, ANC, ARR, AXS, LAX, SAX, DCP, ISC, RLA, RRA, SLO, SRE, SHY, SHX, STP, XXA, AHX, TAS, LAS, } use self::OP::*; #[rustfmt::skip] pub static DECODE: [(OP, AM, u8); 256] = [ /*0x00*/(BRK, IMP, 7),(ORA, ZIX, 6),(STP, NON, 2),(SLO, ZIX, 8),(NOP, ZPG, 3),(ORA, ZPG, 3),(ASL, ZPG, 5),(SLO, ZPG, 5), /*0x08*/(PHP, IMP, 3),(ORA, IMM, 2),(ASL, ACC, 2),(ANC, IMM, 2),(NOP, ABS, 4),(ORA, ABS, 4),(ASL, ABS, 6),(SLO, ABS, 6), /*0x10*/(BPL, REL, 2),(ORA, ZIY, 5),(STP, NON, 2),(SLO, ZIY, 8),(NOP, ZPX, 4),(ORA, ZPX, 4),(ASL, ZPX, 6),(SLO, ZPX, 6), /*0x18*/(CLC, IMP, 2),(ORA, aby, 4),(NOP, IMP, 2),(SLO, ABY, 7),(NOP, abx, 4),(ORA, abx, 4),(ASL, ABX, 7),(SLO, ABX, 7), /*0x20*/(JSR, ABS, 6),(AND, ZIX, 6),(STP, NON, 2),(RLA, ZIX,
),(STA, ZPX, 4),(STX, ZPY, 4),(SAX, ZPY, 4), /*0x98*/(TYA, IMP, 2),(STA, ABY, 5),(TXS, IMP, 2),(TAS, ABY, 5),(SHY, ABX, 5),(STA, ABX, 5),(SHX, ABY, 5),(AHX, ABY, 5), /*0xA0*/(LDY, IMM, 2),(LDA, ZIX, 6),(LDX, IMM, 2),(LAX, ZIX, 6),(LDY, ZPG, 3),(LDA, ZPG, 3),(LDX, ZPG, 3),(LAX, ZPG, 3), /*0xA8*/(TAY, IMP, 2),(LDA, IMM, 2),(TAX, IMP, 2),(LAX, IMM, 2),(LDY, ABS, 4),(LDA, ABS, 4),(LDX, ABS, 4),(LAX, ABS, 4), /*0xB0*/(BCS, REL, 2),(LDA, ziy, 5),(STP, NON, 2),(LAX, ziy, 5),(LDY, ZPX, 4),(LDA, ZPX, 4),(LDX, ZPY, 4),(LAX, ZPY, 4), /*0xB8*/(CLV, IMP, 2),(LDA, aby, 4),(TSX, IMP, 2),(LAS, ABY, 4),(LDY, abx, 4),(LDA, abx, 4),(LDX, aby, 4),(LAX, aby, 4), /*0xC0*/(CPY, IMM, 2),(CMP, ZIX, 6),(NOP, IMM, 2),(DCP, ZIX, 8),(CPY, ZPG, 3),(CMP, ZPG, 3),(DEC, ZPG, 5),(DCP, ZPG, 5), /*0xC8*/(INY, IMP, 2),(CMP, IMM, 2),(DEX, IMP, 2),(AXS, IMM, 2),(CPY, ABS, 4),(CMP, ABS, 4),(DEC, ABS, 6),(DCP, ABS, 6), /*0xD0*/(BNE, REL, 2),(CMP, ziy, 5),(STP, NON, 2),(DCP, ZIY, 8),(NOP, ZPX, 4),(CMP, ZPX, 4),(DEC, ZPX, 6),(DCP, ZPX, 6), /*0xD8*/(CLD, IMP, 2),(CMP, aby, 4),(NOP, IMP, 2),(DCP, ABY, 7),(NOP, abx, 4),(CMP, abx, 4),(DEC, ABX, 7),(DCP, ABX, 7), /*0xE0*/(CPX, IMM, 2),(SBC, ZIX, 6),(NOP, IMM, 3),(ISC, ZIX, 8),(CPX, ZPG, 3),(SBC, ZPG, 3),(INC, ZPG, 5),(ISC, ZPG, 5), /*0xE8*/(INX, IMP, 2),(SBC, IMM, 2),(NOP, IMP, 2),(SBC, IMM, 2),(CPX, ABS, 4),(SBC, ABS, 4),(INC, ABS, 6),(ISC, ABS, 6), /*0xF0*/(BEQ, REL, 2),(SBC, ZIY, 5),(STP, NON, 2),(ISC, ZIY, 8),(NOP, ZPX, 4),(SBC, ZPX, 4),(INC, ZPX, 6),(ISC, ZPX, 6), /*0xF8*/(SED, IMP, 2),(SBC, aby, 4),(NOP, IMP, 2),(ISC, ABY, 7),(NOP, abx, 4),(SBC, abx, 4),(INC, ABX, 7),(ISC, ABX, 7), ];
8),(BIT, ZPG, 3),(AND, ZPG, 3),(ROL, ZPG, 5),(RLA, ZPG, 5), /*0x28*/(PLP, IMP, 4),(AND, IMM, 2),(ROL, ACC, 2),(ANC, IMM, 2),(BIT, ABS, 4),(AND, ABS, 4),(ROL, ABS, 6),(RLA, ABS, 6), /*0x30*/(BMI, REL, 2),(AND, ZIY, 5),(STP, NON, 2),(RLA, ZIY, 8),(NOP, ZPX, 4),(AND, ZPX, 4),(ROL, ZPX, 6),(RLA, ZPX, 6), /*0x38*/(SEC, IMP, 2),(AND, aby, 4),(NOP, IMP, 2),(RLA, ABY, 7),(NOP, abx, 4),(AND, abx, 4),(ROL, ABX, 7),(RLA, ABX, 7), /*0x40*/(RTI, IMP, 6),(EOR, ZIX, 6),(STP, NON, 2),(SRE, ZIX, 8),(NOP, ZPG, 3),(EOR, ZPG, 3),(LSR, ZPG, 5),(SRE, ZPG, 5), /*0x48*/(PHA, IMP, 3),(EOR, IMM, 2),(LSR, ACC, 2),(ALR, IMM, 2),(JMP, ABS, 3),(EOR, ABS, 4),(LSR, ABS, 6),(SRE, ABS, 6), /*0x50*/(BVC, REL, 2),(EOR, ZIY, 5),(STP, NON, 2),(SRE, ZIY, 8),(NOP, ZPX, 4),(EOR, ZPX, 4),(LSR, ZPX, 6),(SRE, ZPX, 6), /*0x58*/(CLI, IMP, 2),(EOR, aby, 4),(NOP, IMP, 2),(SRE, ABY, 7),(NOP, abx, 4),(EOR, abx, 4),(LSR, ABX, 7),(SRE, ABX, 7), /*0x60*/(RTS, IMP, 6),(ADC, ZIX, 6),(STP, NON, 2),(RRA, ZIX, 8),(NOP, ZPG, 3),(ADC, ZPG, 3),(ROR, ZPG, 5),(RRA, ZPG, 5), /*0x68*/(PLA, IMP, 4),(ADC, IMM, 2),(ROR, ACC, 2),(ARR, IMM, 2),(JMP, IND, 5),(ADC, ABS, 4),(ROR, ABS, 6),(RRA, ABS, 6), /*0x70*/(BVS, REL, 2),(ADC, ziy, 5),(STP, NON, 2),(RRA, ZIY, 8),(NOP, ZPX, 4),(ADC, ZPX, 4),(ROR, ZPX, 6),(RRA, ZPX, 6), /*0x78*/(SEI, IMP, 2),(ADC, aby, 4),(NOP, IMP, 2),(RRA, ABY, 7),(NOP, abx, 4),(ADC, abx, 4),(ROR, ABX, 7),(RRA, ABX, 7), /*0x80*/(NOP, IMM, 2),(STA, ZIX, 6),(NOP, IMM, 2),(SAX, ZIX, 6),(STY, ZPG, 3),(STA, ZPG, 3),(STX, ZPG, 3),(SAX, ZPG, 3), /*0x88*/(DEY, IMP, 2),(NOP, IMM, 2),(TXA, IMP, 2),(XXA, IMM, 2),(STY, ABS, 4),(STA, ABS, 4),(STX, ABS, 4),(SAX, ABS, 4), /*0x90*/(BCC, REL, 2),(STA, ZIY, 6),(STP, NON, 2),(AHX, ZIY, 6),(STY, ZPX, 4
random
[ { "content": "pub trait Memory {\n\n fn reset(&mut self);\n\n fn loadb(&mut self, addr: u16) -> u8;\n\n fn try_loadb(&self, addr: u16) -> Option<u8>;\n\n fn storeb(&mut self, addr: u16, val: u8);\n\n fn add_cycles(&mut self, val: usize);\n\n fn get_cycles(&self) -> usize;\n\n}\n\n\n\npub struct CPU<T: Memory> {\n\n pub regs: Regs,\n\n pub mem: T,\n\n}\n\n\n\nimpl<T: Memory> CPU<T> {\n\n pub fn new(mut mem: T) -> CPU<T> {\n\n CPU {\n\n regs: Regs::new(u16::from_le_bytes([\n\n mem.loadb(RESET_VECTOR),\n\n mem.loadb(RESET_VECTOR + 1),\n", "file_path": "src/lib.rs", "rank": 0, "score": 21467.063620864046 }, { "content": "\n\npub(crate) fn ins_size(am: &AM) -> u16 {\n\n match *am {\n\n IMP => 0,\n\n IMM => 1,\n\n ACC => 0,\n\n ABS => 2,\n\n ZPG => 1,\n\n ABX => 2,\n\n abx => 2,\n\n ABY => 2,\n\n aby => 2,\n\n REL => 1,\n\n ZPX => 1,\n\n ZPY => 1,\n\n ZIX => 1,\n\n ZIY => 1,\n\n ziy => 1,\n\n IND => 2,\n\n NON => 0,\n\n }\n\n}\n", "file_path": "src/address.rs", "rank": 11, "score": 27.403049012112625 }, { "content": " info!(\"{}\", self);\n\n }\n\n let code = self.loadb_pc();\n\n let (op, am, ticks) = DECODE[code as usize].clone();\n\n self.regs.pc += 1;\n\n match op {\n\n LDA => ap!(|val| self.regs.set_a(val), self.load(&am)),\n\n LDX => ap!(|val| self.regs.set_x(val), self.load(&am)),\n\n LDY => ap!(|val| self.regs.set_y(val), self.load(&am)),\n\n STA => self.store(&am, self.regs.a),\n\n STX => self.store(&am, self.regs.x),\n\n STY => self.store(&am, self.regs.y),\n\n TAX => self.regs.set_x(self.regs.a),\n\n TAY => self.regs.set_y(self.regs.a),\n\n TXA => self.regs.set_a(self.regs.x),\n\n TYA => self.regs.set_a(self.regs.y),\n\n TSX => self.regs.set_x(self.regs.s),\n\n TXS => self.regs.s = self.regs.x,\n\n PHA => self.pushb(self.regs.a),\n\n PHP => self.pushb((self.regs.p | P::S | P::B).bits()),\n", "file_path": "src/execute.rs", "rank": 12, "score": 19.55457145966541 }, { "content": "// AddressMode\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\n#[allow(non_camel_case_types)]\n\npub enum AM {\n\n ///隐含寻址Implied,单字节指令\n\n IMP,\n\n ///Immediate 立即寻址,双字节指令 #v\n\n IMM,\n\n ///Accumulator 累加器A寻址,单字节指令 A\n\n ACC,\n\n ///Absolute 绝对寻址,三字节指令 a\n\n ABS,\n\n ///ZeroPage 零页寻址,双字节指令 d\n\n ZPG,\n\n ///AbsoluteX 绝对X变址,三字节指令 a,x\n\n ABX,\n\n ///AbsoluteX 绝对X变址,三字节指令 a,x 跨页时周期加1\n\n abx,\n\n ///AbsoluteY 绝对Y变址,三字节指令 a,y\n\n ABY,\n", "file_path": "src/address.rs", "rank": 13, "score": 17.077091140972655 }, { "content": "bitflags! {\n\n/// NVsbdIZC\n\n/// Note:\n\n/// Two interrupts (/IRQ and /NMI) and two instructions (PHP and BRK)\n\n/// push the flags to the stack.\n\n/// In the byte pushed, bit 5 is always set to 1,\n\n/// and bit 4 is 1 if from an instruction (PHP or BRK)\n\n/// or 0 if from an interrupt line being pulled low (/IRQ or /NMI).\n\n///\n\n/// Instruction\tBits 5 and 4\tSide effects after pushing\n\n/// PHP\t 11\t None\n\n/// BRK\t 11\t I is set to 1\n\n/// /IRQ\t 10\t I is set to 1\n\n/// /NMI\t 10\t I is set to 1\n\n/// Two instructions (PLP and RTI) pull a byte from the stack\n\n/// and set all the flags. They ignore bits 5 and 4.\n\n pub struct P:u8 {\n\n /// 7: N:Negative 1 if result's top bit is 1\n\n const N = 1 << 7;\n\n /// 6: V:Overflow 1 if result is overflow\n", "file_path": "src/reg.rs", "rank": 14, "score": 11.707932697192305 }, { "content": " match *am {\n\n IMP => 0, //for NOP\n\n IMM => self.loadb_pc(),\n\n ACC => self.regs.a,\n\n _ => {\n\n let addr = self.get_addr(am);\n\n self.loadb(addr)\n\n }\n\n }\n\n }\n\n pub fn store(&mut self, am: &AM, val: u8) {\n\n match *am {\n\n ACC => self.regs.a = val,\n\n _ => {\n\n let addr = self.get_addr(am);\n\n self.storeb(addr, val);\n\n }\n\n }\n\n }\n\n pub fn branch(&mut self, flag: bool) -> u16 {\n", "file_path": "src/utils.rs", "rank": 15, "score": 11.311241301614777 }, { "content": " let carry = self.regs.p.contains(P::C);\n\n self.regs.p.set(P::C, (val & 0x1) != 0);\n\n val = val >> 1;\n\n if carry {\n\n val |= 0x80;\n\n }\n\n self.regs.set_flag_zn(val);\n\n self.store(&am, val);\n\n }\n\n CLC => self.regs.p.set(P::C, false),\n\n CLD => self.regs.p.set(P::D, false),\n\n CLI => self.regs.p.set(P::I, false),\n\n CLV => self.regs.p.set(P::V, false),\n\n SEC => self.regs.p.set(P::C, true),\n\n SED => self.regs.p.set(P::D, true),\n\n SEI => self.regs.p.set(P::I, true),\n\n NOP => {\n\n let _ = self.load(&am);\n\n }\n\n ALR => {\n", "file_path": "src/execute.rs", "rank": 16, "score": 11.148020608849635 }, { "content": " self.store(&am, val);\n\n let a = self.regs.a;\n\n self.regs.set_a(a | val)\n\n }\n\n SRE => {\n\n let val = self.load(&am);\n\n self.regs.p.set(P::C, (val & 0x1) != 0);\n\n let val = val >> 1;\n\n self.regs.set_flag_zn(val);\n\n self.store(&am, val);\n\n let a = self.regs.a;\n\n self.regs.set_a(a ^ val)\n\n }\n\n SHY => unimplemented!(),\n\n SHX => unimplemented!(),\n\n STP => error!(\"STP\"),\n\n XXA => unimplemented!(),\n\n AHX => unimplemented!(),\n\n TAS => unimplemented!(),\n\n LAS => unimplemented!(),\n", "file_path": "src/execute.rs", "rank": 17, "score": 10.006316019257717 }, { "content": " _ => { /*JUMP*/ }\n\n };\n\n self.regs.pc = match op {\n\n JMP => self.get_addr(&am),\n\n JSR => {\n\n self.pushw(self.regs.pc - 1 + ins_size(&am));\n\n self.get_addr(&am)\n\n }\n\n RTS => self.popw() + 1,\n\n RTI => {\n\n let flags = self.popb();\n\n self.regs.set_flags(flags);\n\n self.popw()\n\n }\n\n BRK => {\n\n self.pushw(self.regs.pc + 1);\n\n self.pushb((self.regs.p | P::S | P::B).bits()); //See Note in reg.rs\n\n self.regs.p.set(P::I, true);\n\n self.loadw(BRK_VECTOR)\n\n }\n", "file_path": "src/execute.rs", "rank": 18, "score": 9.365240347444198 }, { "content": "use crate::address::ins_size;\n\nuse crate::decode::{DECODE, OP::*};\n\nuse crate::reg::P;\n\nuse crate::BRK_VECTOR;\n\nuse crate::{Memory, CPU};\n\n//passby borrowck\n\nmacro_rules! ap {\n\n ($f:expr,$i:expr) => {{\n\n let val = $i;\n\n $f(val)\n\n }};\n\n ($f:expr,$i:expr,$j:expr) => {{\n\n let val = $i;\n\n $f(val, $j)\n\n }};\n\n}\n\nimpl<T: Memory> CPU<T> {\n\n pub fn execute(&mut self) {\n\n #[cfg(feature = \"disasm\")]\n\n {\n", "file_path": "src/execute.rs", "rank": 19, "score": 9.306299547037014 }, { "content": " BEQ => self.branch(self.regs.p.contains(P::Z)),\n\n BNE => self.branch(!self.regs.p.contains(P::Z)),\n\n BCS => self.branch(self.regs.p.contains(P::C)),\n\n BCC => self.branch(!self.regs.p.contains(P::C)),\n\n BVS => self.branch(self.regs.p.contains(P::V)),\n\n BVC => self.branch(!self.regs.p.contains(P::V)),\n\n BMI => self.branch(self.regs.p.contains(P::N)),\n\n BPL => self.branch(!self.regs.p.contains(P::N)),\n\n _ => self.regs.pc + ins_size(&am),\n\n };\n\n self.mem.add_cycles(ticks as usize);\n\n }\n\n}\n\n\n\n#[cfg(feature = \"disasm\")]\n\nimpl<T: Memory> core::fmt::Display for CPU<T> {\n\n fn fmt(&self, f: &mut core::fmt::Formatter) -> Result<(), core::fmt::Error> {\n\n let code = self.try_loadb(self.regs.pc).unwrap();\n\n let (op, am, _) = DECODE[code as usize].clone();\n\n write!(f, \"{:04X} {:02X} \", self.regs.pc, code)?;\n", "file_path": "src/execute.rs", "rank": 20, "score": 8.958675174289128 }, { "content": " ///AbsoluteY 绝对Y变址,三字节指令 a,y 跨页时周期加1\n\n aby,\n\n ///Relative相对寻址,双字节指令 *+d\n\n REL,\n\n ///ZeroPageX零页X变址,双字节指令 d,x\n\n ZPX,\n\n ///ZeroPageY零页Y变址,双字节指令 d,y\n\n ZPY,\n\n ///ZeroPageIndexIndirectX零页间接X变址,双字节指令 (d,x)\n\n ZIX,\n\n ///ZeroPageIndexIndirectY零页间接Y变址,双字节指令 (d),y\n\n ZIY,\n\n ///ZeroPageIndexIndirectY零页间接Y变址,双字节指令 (d),y\n\n ziy,\n\n //AbsoluteIndexIndirect相对寻址,双字节指令 (a) 跨页时周期加1\n\n IND,\n\n // 未知\n\n NON,\n\n}\n\nuse self::AM::*;\n", "file_path": "src/address.rs", "rank": 21, "score": 8.236081248406414 }, { "content": " result -= 1;\n\n }\n\n self.regs.p.set(P::C, (result & 0x100) == 0);\n\n let result = result as u8;\n\n let flag = (a ^ result) & 0x80 != 0x0 && (a ^ val) & 0x80 == 0x80;\n\n self.regs.p.set(P::V, flag);\n\n self.regs.set_a(result);\n\n }\n\n INX => self.regs.set_x(self.regs.x.wrapping_add(1)),\n\n INY => self.regs.set_y(self.regs.y.wrapping_add(1)),\n\n DEX => self.regs.set_x(self.regs.x.wrapping_sub(1)),\n\n DEY => self.regs.set_y(self.regs.y.wrapping_sub(1)),\n\n INC => {\n\n let val = self.load(&am).wrapping_add(1);\n\n self.regs.set_flag_zn(val);\n\n self.store(&am, val);\n\n }\n\n DEC => {\n\n let val = self.load(&am).wrapping_sub(1);\n\n self.regs.set_flag_zn(val);\n", "file_path": "src/execute.rs", "rank": 22, "score": 8.204637163490995 }, { "content": " self.store(&am, val);\n\n }\n\n AND => ap!(|i| self.regs.set_a(i), self.regs.a & self.load(&am)),\n\n ORA => ap!(|i| self.regs.set_a(i), self.regs.a | self.load(&am)),\n\n EOR => ap!(|i| self.regs.set_a(i), self.regs.a ^ self.load(&am)),\n\n BIT => {\n\n let val = self.load(&am);\n\n let a = self.regs.a;\n\n self.regs.p.set(P::Z, (val & a) == 0x0);\n\n self.regs.p.set(P::N, (val & 0x80) != 0);\n\n self.regs.p.set(P::V, (val & 0x40) != 0);\n\n }\n\n CMP => {\n\n let val = self.load(&am);\n\n let a = self.regs.a;\n\n let val = (a as u16).wrapping_sub(val as u16);\n\n self.regs.p.set(P::C, val < 0x100);\n\n self.regs.set_flag_zn(val as u8);\n\n }\n\n CPX => {\n", "file_path": "src/execute.rs", "rank": 23, "score": 7.403801642604913 }, { "content": " }\n\n self.regs.set_a(a);\n\n self.regs.p.set(P::C, a & 0x20 != 0);\n\n self.regs.p.set(P::V, (a >> 5) ^ (a >> 6) & 0x1 != 0);\n\n }\n\n AXS => {\n\n let val = (self.regs.a & self.regs.x) as u16 - self.load(&am) as u16;\n\n self.regs.set_x(val as u8);\n\n self.regs.p.set(P::C, val >> 15 != 0);\n\n }\n\n LAX => {\n\n let val = self.load(&am);\n\n self.regs.set_a(val);\n\n self.regs.set_x(val);\n\n }\n\n SAX => self.store(&am, self.regs.a & self.regs.x),\n\n DCP => {\n\n let val = self.load(&am).wrapping_sub(1);\n\n self.store(&am, val);\n\n let val = (self.regs.a as u16).wrapping_sub(val as u16);\n", "file_path": "src/execute.rs", "rank": 24, "score": 6.762594464881221 }, { "content": "#![no_std]\n\n#[macro_use]\n\nextern crate bitflags;\n\n#[macro_use]\n\nextern crate log;\n\n\n\nmod address;\n\nmod decode;\n\nmod execute;\n\nmod reg;\n\nmod utils;\n\n\n\nuse reg::{Regs, P};\n\n\n\n// IRQ/BRK<NMI<RESET\n\nconst NMI_VECTOR: u16 = 0xFFFA;\n\nconst RESET_VECTOR: u16 = 0xFFFC;\n\n/// IRQ and BRK\n\nconst BRK_VECTOR: u16 = 0xFFFE;\n\n\n", "file_path": "src/lib.rs", "rank": 25, "score": 6.619946153407923 }, { "content": " match ins_size(&am) {\n\n 0 => write!(f, \" \")?,\n\n 1 => write!(f, \"{:02X} \", self.try_loadb(self.regs.pc + 1).unwrap())?,\n\n 2 => write!(\n\n f,\n\n \"{:02X} {:02X} \",\n\n self.try_loadb(self.regs.pc + 1).unwrap(),\n\n self.try_loadb(self.regs.pc + 2).unwrap()\n\n )?,\n\n _ => unreachable!(),\n\n }\n\n use super::address::AM::*;\n\n write!(f, \"{:?} \", op)?;\n\n match am {\n\n IMP => write!(f, \" \")?,\n\n IMM => write!(\n\n f,\n\n \"#${:02X} \",\n\n self.try_loadb(self.regs.pc + 1).unwrap()\n\n )?,\n", "file_path": "src/execute.rs", "rank": 26, "score": 6.54192602353716 }, { "content": "use crate::address::AM::{self, *};\n\nuse crate::{Memory, CPU};\n\nimpl<T: Memory> CPU<T> {\n\n pub fn loadb(&mut self, addr: u16) -> u8 {\n\n self.mem.loadb(addr)\n\n }\n\n pub fn loadw(&mut self, addr: u16) -> u16 {\n\n u16::from_le_bytes([self.loadb(addr), self.loadb(addr + 1)])\n\n }\n\n pub fn loadb_pc(&mut self) -> u8 {\n\n self.loadb(self.regs.pc)\n\n }\n\n pub fn loadw_pc(&mut self) -> u16 {\n\n self.loadw(self.regs.pc)\n\n }\n\n pub fn loadw_zp(&mut self, addr: u8) -> u16 {\n\n let low = self.loadb(addr as u16);\n\n let high = self.loadb(addr.wrapping_add(1) as u16);\n\n u16::from_le_bytes([low, high])\n\n }\n", "file_path": "src/utils.rs", "rank": 27, "score": 6.265138360138944 }, { "content": " }\n\n }\n\n pub fn set_flags(&mut self, val: u8) {\n\n // Due to Note above , always set bit4 to 0 and bit5 to 1\n\n self.p = (P::from_bits(val).unwrap() | P::S) - P::B;\n\n }\n\n pub fn set_flag_zn(&mut self, val: u8) {\n\n self.p.set(P::Z, val == 0);\n\n self.p.set(P::N, (val & 0x80) != 0);\n\n }\n\n pub fn set_x(&mut self, val: u8) {\n\n self.x = val;\n\n self.set_flag_zn(val);\n\n }\n\n pub fn set_y(&mut self, val: u8) {\n\n self.y = val;\n\n self.set_flag_zn(val);\n\n }\n\n pub fn set_a(&mut self, val: u8) {\n\n self.a = val;\n\n self.set_flag_zn(val);\n\n }\n\n}\n", "file_path": "src/reg.rs", "rank": 28, "score": 6.229521119072645 }, { "content": " let addr = self.loadb_pc();\n\n let old_pc = self.regs.pc + 1;\n\n if flag {\n\n let new_pc = ((addr as i8) as i32 + old_pc as i32) as u16;\n\n if ((new_pc ^ old_pc) >> 8) == 0 {\n\n self.mem.add_cycles(1);\n\n } else {\n\n self.mem.add_cycles(2);\n\n }\n\n new_pc\n\n } else {\n\n old_pc\n\n }\n\n }\n\n pub fn get_addr(&mut self, am: &AM) -> u16 {\n\n match *am {\n\n ABS => self.loadw_pc(),\n\n ZPG => self.loadb_pc() as u16,\n\n ABX => self.loadw_pc().wrapping_add(self.regs.x as u16),\n\n abx => {\n", "file_path": "src/utils.rs", "rank": 29, "score": 6.141769128065311 }, { "content": " PLA => ap!(|val| self.regs.set_a(val), self.popb()),\n\n PLP => ap!(|val| self.regs.set_flags(val), self.popb()),\n\n ADC => {\n\n let val = self.load(&am);\n\n let a = self.regs.a;\n\n let mut result = a as u16 + val as u16;\n\n if self.regs.p.contains(P::C) {\n\n result += 1;\n\n }\n\n self.regs.p.set(P::C, (result & 0x100) != 0);\n\n let result = result as u8;\n\n let flag = (a ^ val) & 0x80 == 0x0 && (a ^ result) & 0x80 == 0x80;\n\n self.regs.p.set(P::V, flag);\n\n self.regs.set_a(result);\n\n }\n\n SBC => {\n\n let val = self.load(&am);\n\n let a = self.regs.a;\n\n let mut result = a as i16 - val as i16;\n\n if !self.regs.p.contains(P::C) {\n", "file_path": "src/execute.rs", "rank": 30, "score": 6.081790821105937 }, { "content": " // index Register\n\n pub x: u8,\n\n // index Register\n\n pub y: u8,\n\n // Stack $100-$1FF\n\n // Stack Pointer\n\n pub s: u8,\n\n pub p: P,\n\n /// Program Counter\n\n pub pc: u16,\n\n}\n\nimpl Regs {\n\n pub fn new(pc: u16) -> Regs {\n\n Regs {\n\n a: 0,\n\n x: 0,\n\n y: 0,\n\n s: 0xfd,\n\n p: P::S | P::I,\n\n pc,\n", "file_path": "src/reg.rs", "rank": 31, "score": 5.5211021613980344 }, { "content": " const V = 1 << 6;\n\n /// 5: s: always 1\n\n const S = 1 << 5;\n\n /// 4: b: see Note\n\n const B = 1 << 4;\n\n /// 3: d:Decimal ignored by NES\n\n const D = 1 << 3;\n\n /// 2: I:interrupt 1 if disable interrupt\n\n const I = 1 << 2;\n\n /// 1: Z:Zero 1 if result is 0\n\n const Z = 1 << 1;\n\n /// 0: C:Carry 1 if result has carry(add) or borow(sub)\n\n const C = 1 << 0;\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\npub struct Regs {\n\n // Auumulator\n\n pub a: u8,\n", "file_path": "src/reg.rs", "rank": 32, "score": 5.432931130720365 }, { "content": " ACC => write!(f, \"A \")?,\n\n ABS => {\n\n let addr = self.try_loadw(self.regs.pc + 1).unwrap();\n\n match self.try_loadb(addr) {\n\n Some(data) => write!(f, \"${:04X} = {:02X} \", addr, data)?,\n\n None => write!(f, \"${:04X} REG \", addr)?,\n\n }\n\n }\n\n ZPG => {\n\n let addr = self.try_loadb(self.regs.pc + 1).unwrap();\n\n match self.try_loadb(addr as u16) {\n\n Some(data) => write!(f, \"${:02X} = {:02X} \", addr, data)?,\n\n None => write!(f, \"${:02X} REG \", addr)?,\n\n }\n\n }\n\n ABX | abx => {\n\n let addr = self.try_loadw(self.regs.pc + 1).unwrap();\n\n let addr_x = addr.wrapping_add(self.regs.x as u16);\n\n match self.try_loadb(addr_x) {\n\n Some(data) => write!(f, \"${:04X},X @ {:04X} = {:02X} \", addr, addr_x, data)?,\n", "file_path": "src/execute.rs", "rank": 33, "score": 5.427555461350819 }, { "content": " pub fn pushb(&mut self, val: u8) {\n\n let s = self.regs.s;\n\n self.regs.s = self.regs.s.wrapping_sub(1);\n\n self.storeb(0x100 + s as u16, val);\n\n }\n\n pub fn pushw(&mut self, val: u16) {\n\n self.pushb((val >> 8) as u8);\n\n self.pushb((val & 0xff) as u8)\n\n }\n\n pub fn popb(&mut self) -> u8 {\n\n self.regs.s = self.regs.s.wrapping_add(1);\n\n let s = self.regs.s;\n\n self.loadb(0x100 + s as u16)\n\n }\n\n pub fn popw(&mut self) -> u16 {\n\n let low = self.popb();\n\n let high = self.popb();\n\n u16::from_le_bytes([low, high])\n\n }\n\n pub fn load(&mut self, am: &AM) -> u8 {\n", "file_path": "src/utils.rs", "rank": 34, "score": 5.265178924255366 }, { "content": " let base = self.loadw_pc();\n\n let addr = base.wrapping_add(self.regs.x as u16);\n\n if ((base ^ addr) >> 8) != 0 {\n\n self.mem.add_cycles(1);\n\n }\n\n addr\n\n }\n\n ABY => self.loadw_pc().wrapping_add(self.regs.y as u16),\n\n aby => {\n\n let base = self.loadw_pc();\n\n let addr = base.wrapping_add(self.regs.y as u16);\n\n if ((base ^ addr) >> 8) != 0 {\n\n self.mem.add_cycles(1);\n\n }\n\n addr\n\n }\n\n ZPX => self.loadb_pc().wrapping_add(self.regs.x) as u16,\n\n ZPY => self.loadb_pc().wrapping_add(self.regs.y) as u16,\n\n ZIX => {\n\n let addr = self.loadb_pc();\n", "file_path": "src/utils.rs", "rank": 35, "score": 4.649350243013858 }, { "content": " #[cfg(feature = \"disasm\")]\n\n pub fn try_loadb(&self, addr: u16) -> Option<u8> {\n\n self.mem.try_loadb(addr)\n\n }\n\n #[cfg(feature = \"disasm\")]\n\n pub fn try_loadw(&self, addr: u16) -> Option<u16> {\n\n Some(u16::from_le_bytes([\n\n self.try_loadb(addr)?,\n\n self.try_loadb(addr + 1)?,\n\n ]))\n\n }\n\n #[cfg(feature = \"disasm\")]\n\n pub fn try_loadw_zp(&self, addr: u8) -> Option<u16> {\n\n let low = self.try_loadb(addr as u16)?;\n\n let high = self.try_loadb(addr.wrapping_add(1) as u16)?;\n\n Some(u16::from_le_bytes([low, high]))\n\n }\n\n pub fn storeb(&mut self, addr: u16, val: u8) {\n\n self.mem.storeb(addr, val)\n\n }\n", "file_path": "src/utils.rs", "rank": 36, "score": 4.4891413548590045 }, { "content": " let val = self.load(&am);\n\n let x = self.regs.x;\n\n let val = (x as u16).wrapping_sub(val as u16);\n\n self.regs.p.set(P::C, val < 0x100);\n\n self.regs.set_flag_zn(val as u8);\n\n }\n\n CPY => {\n\n let val = self.load(&am);\n\n let y = self.regs.y;\n\n let val = (y as u16).wrapping_sub(val as u16);\n\n self.regs.p.set(P::C, val < 0x100);\n\n self.regs.set_flag_zn(val as u8);\n\n }\n\n ASL => {\n\n let val = self.load(&am);\n\n self.regs.p.set(P::C, (val & 0x80) != 0);\n\n let val = val << 1;\n\n self.regs.set_flag_zn(val);\n\n self.store(&am, val);\n\n }\n", "file_path": "src/execute.rs", "rank": 37, "score": 4.380241281112793 }, { "content": " LSR => {\n\n let val = self.load(&am);\n\n self.regs.p.set(P::C, (val & 0x1) != 0);\n\n let val = val >> 1;\n\n self.regs.set_flag_zn(val);\n\n self.store(&am, val);\n\n }\n\n ROL => {\n\n let mut val = self.load(&am);\n\n let carry = self.regs.p.contains(P::C);\n\n self.regs.p.set(P::C, (val & 0x80) != 0);\n\n val = val << 1;\n\n if carry {\n\n val |= 0x1;\n\n }\n\n self.regs.set_flag_zn(val);\n\n self.store(&am, val);\n\n }\n\n ROR => {\n\n let mut val = self.load(&am);\n", "file_path": "src/execute.rs", "rank": 38, "score": 4.35009431104066 }, { "content": " self.regs.p.set(P::C, val < 0x100);\n\n self.regs.set_flag_zn(val as u8);\n\n }\n\n ISC => {\n\n let val = self.load(&am).wrapping_add(1);\n\n self.regs.set_flag_zn(val);\n\n self.store(&am, val);\n\n let a = self.regs.a;\n\n let mut result = (a as u16).wrapping_sub(val as u16);\n\n if !self.regs.p.contains(P::C) {\n\n result -= 1;\n\n }\n\n self.regs.p.set(P::C, (result & 0x100) == 0);\n\n let result = result as u8;\n\n let flag = (a ^ result) & 0x80 != 0x0 && (a ^ val) & 0x80 == 0x80;\n\n self.regs.p.set(P::V, flag);\n\n self.regs.set_a(result);\n\n }\n\n RLA => {\n\n let mut val = self.load(&am);\n", "file_path": "src/execute.rs", "rank": 39, "score": 4.245699433807197 }, { "content": " None => write!(f, \"${:04X},X @ {:04X} REG \", addr, addr_x)?,\n\n }\n\n }\n\n ABY | aby => {\n\n let addr = self.try_loadw(self.regs.pc + 1).unwrap();\n\n let addr_y = addr.wrapping_add(self.regs.y as u16);\n\n match self.try_loadb(addr_y) {\n\n Some(data) => write!(f, \"${:04X},Y @ {:04X} = {:02X} \", addr, addr_y, data)?,\n\n None => write!(f, \"${:04X},Y @ {:04X} REG \", addr, addr_y,)?,\n\n }\n\n }\n\n REL => {\n\n let addr = self.try_loadb(self.regs.pc + 1).unwrap();\n\n let new_pc = ((addr as i8) as i32 + self.regs.pc as i32 + 2) as u16;\n\n write!(f, \"${:04X} \", new_pc)?\n\n }\n\n ZPX => {\n\n let addr = self.try_loadb(self.regs.pc + 1).unwrap();\n\n let addr_x = addr.wrapping_add(self.regs.x) as u16;\n\n match self.try_loadb(addr_x) {\n", "file_path": "src/execute.rs", "rank": 40, "score": 3.999584912454807 }, { "content": " let pc = self.regs.pc;\n\n let mut flags = self.regs.p;\n\n flags -= P::B;\n\n self.pushw(pc);\n\n self.pushb(flags.bits());\n\n self.regs.p.insert(P::I);\n\n self.regs.pc = self.loadw(BRK_VECTOR);\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 41, "score": 3.944079413212423 }, { "content": " ])),\n\n mem,\n\n }\n\n }\n\n // External interrupts\n\n pub fn reset(&mut self) {\n\n self.regs = Regs::new(self.loadw(RESET_VECTOR));\n\n self.mem.reset();\n\n }\n\n pub fn nmi(&mut self) {\n\n let pc = self.regs.pc;\n\n let mut flags = self.regs.p;\n\n flags -= P::B;\n\n self.pushw(pc);\n\n self.pushb(flags.bits());\n\n self.regs.p.insert(P::I);\n\n self.regs.pc = self.loadw(NMI_VECTOR);\n\n }\n\n pub fn irq(&mut self) {\n\n if !self.regs.p.contains(P::I) {\n", "file_path": "src/lib.rs", "rank": 42, "score": 3.842266437014902 }, { "content": " let val = self.load(&am);\n\n let mut a = self.regs.a;\n\n a &= val;\n\n self.regs.p.set(P::C, a & 0x1 != 0);\n\n a = a >> 1;\n\n self.regs.set_a(a);\n\n }\n\n ANC => {\n\n let val = self.load(&am);\n\n let a = self.regs.a;\n\n self.regs.set_a(a & val);\n\n self.regs.p.set(P::C, self.regs.p.contains(P::N))\n\n }\n\n ARR => {\n\n let val = self.load(&am);\n\n let mut a = self.regs.a;\n\n a &= val;\n\n a = a >> 1;\n\n if self.regs.p.contains(P::C) {\n\n a |= 0x80;\n", "file_path": "src/execute.rs", "rank": 43, "score": 3.30109580983088 }, { "content": " None => write!(f, \"(${:02X}),Y={:04X}@{:04X}REG\", addr, addr_zw, addr_y)?,\n\n }\n\n }\n\n IND => {\n\n let addr = self.try_loadw(self.regs.pc + 1).unwrap();;\n\n let low = self.try_loadb(addr).unwrap();\n\n let high = self\n\n .try_loadb((addr & 0xFF00) | ((addr + 1) & 0xFF))\n\n .unwrap();\n\n let target = low as u16 | (high as u16) << 8;\n\n write!(f, \"(${:04X}) = {:04X} \", addr, target)?\n\n }\n\n NON => write!(f, \"NON \")?,\n\n }\n\n write!(\n\n f,\n\n \" A:{:02X} X:{:02X} Y:{:02X} P:{:02X} SP:{:02X} CYC:{}\",\n\n self.regs.a,\n\n self.regs.x,\n\n self.regs.y,\n\n self.regs.p.bits(),\n\n self.regs.s,\n\n self.mem.get_cycles()\n\n )\n\n }\n\n}\n", "file_path": "src/execute.rs", "rank": 44, "score": 3.1265647561172187 }, { "content": " }\n\n self.regs.set_flag_zn(val);\n\n self.store(&am, val);\n\n let a = self.regs.a;\n\n let mut result = a as u16 + val as u16;\n\n if new_carry {\n\n result += 1;\n\n }\n\n self.regs.p.set(P::C, (result & 0x100) != 0);\n\n let result = result as u8;\n\n self.regs\n\n .p\n\n .set(P::V, (a ^ val) & 0x80 == 0x0 && (a ^ result) & 0x80 == 0x80);\n\n self.regs.set_a(result);\n\n }\n\n SLO => {\n\n let val = self.load(&am);\n\n self.regs.p.set(P::C, (val & 0x80) != 0);\n\n let val = val << 1;\n\n self.regs.set_flag_zn(val);\n", "file_path": "src/execute.rs", "rank": 45, "score": 2.6655519605211264 }, { "content": " self.loadw_zp(addr.wrapping_add(self.regs.x))\n\n }\n\n ZIY => {\n\n let addr = self.loadb_pc();\n\n self.loadw_zp(addr).wrapping_add(self.regs.y as u16)\n\n }\n\n ziy => {\n\n let addr = self.loadb_pc();\n\n let base = self.loadw_zp(addr);\n\n let addr = base.wrapping_add(self.regs.y as u16);\n\n if ((addr ^ base) >> 8) != 0 {\n\n self.mem.add_cycles(1);\n\n }\n\n addr\n\n }\n\n IND => {\n\n let addr = self.loadw_pc();\n\n let low = self.loadb(addr);\n\n let high = self.loadb((addr & 0xFF00) | ((addr + 1) & 0xFF));\n\n low as u16 | (high as u16) << 8\n\n }\n\n _ => unimplemented!(),\n\n }\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 46, "score": 1.9982232059066416 }, { "content": " match self.try_loadb(addr_zw) {\n\n Some(data) => write!(\n\n f,\n\n \"(${:02X},X)@{:02X}= {:04X}= {:02X}\",\n\n addr, addr_x, addr_zw, data\n\n )?,\n\n None => write!(f, \"(${:02X},X)@{:02X}= {:04X} REG\", addr, addr_x, addr_zw)?,\n\n }\n\n }\n\n ZIY | ziy => {\n\n let addr = self.try_loadb(self.regs.pc + 1).unwrap();\n\n let addr_zw = self.try_loadw_zp(addr).unwrap();\n\n let addr_y = addr_zw.wrapping_add(self.regs.y as u16);\n\n let data = self.try_loadb(addr_y).unwrap();\n\n match self.try_loadb(addr_y) {\n\n Some(data) => write!(\n\n f,\n\n \"(${:02X}),Y={:04X}@{:04X}={:02X}\",\n\n addr, addr_zw, addr_y, data\n\n )?,\n", "file_path": "src/execute.rs", "rank": 47, "score": 1.8501860394753902 }, { "content": "# mos6502\n\n\n\nA Interpreter/Disassembler for MOS 6502.\n\n\n\n## status\n\n\n\nCurrently mainly supports Ricoh 2A03,and support for BCD is missing.\n\n\n\nCan pass the test of [nestest.nes](http://www.qmtpro.com/~nes/misc/nestest.nes)\n\n\n\n![test1](./doc/test1.jpg)\n\n![test2](./doc/test2.jpg)\n\n\n\nNeed more test on non-nes target\n\n\n\nDisassembler is incomplete. Currently only the execution log is displayed in the terminal (by enable `disasm` feature).\n\n\n\n## no_std\n\n\n", "file_path": "README.md", "rank": 48, "score": 1.3409295094275424 }, { "content": " let carry = self.regs.p.contains(P::C);\n\n let new_carry = (val & 0x80) != 0;\n\n self.regs.p.set(P::C, new_carry);\n\n val = val << 1;\n\n if carry {\n\n val |= 0x1;\n\n }\n\n self.regs.set_flag_zn(val);\n\n self.store(&am, val);\n\n self.regs.a &= val;\n\n self.regs.set_flag_zn(self.regs.a)\n\n }\n\n RRA => {\n\n let mut val = self.load(&am);\n\n let carry = self.regs.p.contains(P::C);\n\n let new_carry = (val & 0x1) != 0;\n\n self.regs.p.set(P::C, new_carry);\n\n val = val >> 1;\n\n if carry {\n\n val |= 0x80;\n", "file_path": "src/execute.rs", "rank": 49, "score": 1.3350120913560115 }, { "content": " Some(data) => {\n\n write!(f, \"${:02X},X @ {:02X} = {:02X} \", addr, addr_x, data)?\n\n }\n\n None => write!(f, \"${:02X},X @ {:02X} REG \", addr, addr_x)?,\n\n }\n\n }\n\n ZPY => {\n\n let addr = self.try_loadb(self.regs.pc + 1).unwrap();\n\n let addr_y = addr.wrapping_add(self.regs.y) as u16;\n\n match self.try_loadb(addr_y) {\n\n Some(data) => {\n\n write!(f, \"${:02X},Y @ {:02X} = {:02X} \", addr, addr_y, data)?\n\n }\n\n None => write!(f, \"${:02X},Y @ {:02X} REG \", addr, addr_y)?,\n\n }\n\n }\n\n ZIX => {\n\n let addr = self.try_loadb(self.regs.pc + 1).unwrap();\n\n let addr_x = addr.wrapping_add(self.regs.x);\n\n let addr_zw = self.try_loadw_zp(addr_x).unwrap();\n", "file_path": "src/execute.rs", "rank": 50, "score": 1.2196653153517643 } ]
Rust
src/day14/data/mod.rs
Fryuni/advent-of-code-2021
832121aba87516b4c2727eee9869348e1a5a7840
/* * MIT License * * Copyright (c) 2021 Luiz Ferraz * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ pub mod parser; use itertools::Itertools; use std::collections::HashMap; use std::fmt::{Debug, Formatter, Write}; use std::ops::AddAssign; #[derive(Clone)] pub struct Polymer(Vec<char>); #[derive(Debug)] pub struct PairCounters(HashMap<(char, char), usize>); #[derive(Debug)] pub struct PolymerizationRules { pairs: HashMap<(char, char), char>, } impl Polymer { pub fn grow(&mut self, rules: &PolymerizationRules) { let mut elements = Vec::with_capacity(2 * self.0.len() - 1); for pair in self.0.iter().copied().tuple_windows::<(_, _)>() { elements.push(pair.0); if let Some(extra) = rules.pairs.get(&pair) { elements.push(*extra); } } elements.push(*self.0.last().unwrap()); self.0 = elements; } pub fn elements(&self) -> &[char] { &self.0 } } impl From<&Polymer> for PairCounters { fn from(polymer: &Polymer) -> Self { Self( polymer .elements() .iter() .copied() .tuple_windows::<(_, _)>() .counts(), ) } } impl PairCounters { pub fn project_growth(&mut self, rules: &PolymerizationRules) { let mut new_counters: HashMap<(char, char), usize> = HashMap::with_capacity(self.0.len()); for (&(a, b), &count) in &self.0 { if let Some(&extra) = rules.pairs.get(&(a, b)) { new_counters .entry((a, extra)) .or_default() .add_assign(count); new_counters .entry((extra, b)) .or_default() .add_assign(count); } else { new_counters.entry((a, b)).or_default().add_assign(count); } } self.0 = new_counters; } pub fn into_element_counters(self) -> HashMap<char, usize> { let mut counters: HashMap<char, usize> = HashMap::new(); for ((a, _), count) in self.0 { counters.entry(a).or_default().add_assign(count); } counters } } #[derive(Debug)] pub struct Data { pub template: Polymer, pub rules: PolymerizationRules, } impl Debug for Polymer { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_str("Polymer(")?; for c in &self.0 { f.write_char(*c)?; } f.write_str(")") } }
/* * MIT License * * Copyright (c) 2021 Luiz Ferraz * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ pub mod parser; use itertools::Itertools; use std::collections::HashMap; use std::fmt::{Debug, Formatter, Write}; use std::ops::AddAssign; #[derive(Clone)] pub struct Polymer(Vec<char>); #[derive(Debug)] pub struct PairCounters(HashMap<(char, char), usize>); #[derive(Debug)] pub struct PolymerizationRules { pairs: HashMap<(char, char), char>, } impl Polymer { pub fn grow(&mut self, rules: &PolymerizationRules) { let mut elements = Vec::with_capacity(2 * self.0.len() - 1); for pair in self.0.iter().copied().tuple_windows::<(_, _)>() { elements.push(pair.0); if let Some(extra) = rules.pairs.get(&pair) { elements.push(*extra); } } elements.push(*self.0.last().unwrap()); self.0 = elements; } pub fn elements(&self) -> &[char] { &self.0 } } impl From<&Polymer> for PairCounters { fn from(polymer: &Polymer) -> Self { Self( polymer .elements() .iter() .copied() .tuple_windows::<(_, _)>() .counts(), ) } } impl PairCounters { pub fn project_growth(&mut self, rules: &PolymerizationRules) { let mut new_counters: HashMap<(char, char), usize> = HashMap::with_capacity(self.0.len()); for (&(a, b), &count) in &self.0 { if let Some(&extra) = rules.pairs.get(&(a, b)) { new_counters .entry((a, extra)) .or_default() .add_assign(count); new_counters .entry((extra, b)) .or_default() .add_assign(count); } else { new_counters.entry((a, b)).or_default().add_assign(count); } } self.0 = new_counters; }
} #[derive(Debug)] pub struct Data { pub template: Polymer, pub rules: PolymerizationRules, } impl Debug for Polymer { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_str("Polymer(")?; for c in &self.0 { f.write_char(*c)?; } f.write_str(")") } }
pub fn into_element_counters(self) -> HashMap<char, usize> { let mut counters: HashMap<char, usize> = HashMap::new(); for ((a, _), count) in self.0 { counters.entry(a).or_default().add_assign(count); } counters }
function_block-function_prefix_line
[ { "content": "fn iter_matrix() -> impl Iterator<Item = (usize, usize)> {\n\n (0..10).cartesian_product(0..10)\n\n}\n\n\n\nimpl State {\n\n const FLASHED: usize = 20;\n\n\n\n pub fn advance_state(&mut self) -> usize {\n\n // println!(\"Initial state:\\n{:?}\", self);\n\n\n\n // Advance all the energy levels by 1\n\n for (i, j) in iter_matrix() {\n\n self.energy[i][j] += 1;\n\n }\n\n\n\n // println!(\"Charged state:\\n{:?}\", self);\n\n\n\n // Flashes every energy level above 9\n\n for (i, j) in iter_matrix() {\n\n self.flash(i, j);\n", "file_path": "src/day11/data.rs", "rank": 0, "score": 185496.4380310355 }, { "content": "/// Return an iterator over all the pairs of x, y coordinates of the given matrix\n\nfn get_coordinates(x: usize, y: usize) -> impl Iterator<Item = (usize, usize)> {\n\n (0..x).cartesian_product(0..y)\n\n}\n\n\n", "file_path": "src/day9/main.rs", "rank": 1, "score": 171908.3581863903 }, { "content": "pub fn parse_pair(input: &str) -> ParseResult<Pair> {\n\n nom::combinator::map(\n\n nom::sequence::delimited(\n\n nom::character::complete::char('['),\n\n nom::sequence::separated_pair(\n\n parse_element,\n\n nom::character::complete::char(','),\n\n parse_element,\n\n ),\n\n nom::character::complete::char(']'),\n\n ),\n\n |(left, right)| Pair { left, right },\n\n )(input)\n\n}\n\n\n", "file_path": "src/day18/data/parsing.rs", "rank": 2, "score": 169348.5583033777 }, { "content": "pub fn parse_element(input: &str) -> ParseResult<Element> {\n\n nom::sequence::delimited(\n\n nom::character::complete::space0,\n\n nom::character::complete::i64\n\n .map(Element::Number)\n\n .or(parse_pair.map(Box::new).map(Element::Pair)),\n\n nom::character::complete::space0,\n\n )(input)\n\n}\n\n\n", "file_path": "src/day18/data/parsing.rs", "rank": 3, "score": 169166.10742983408 }, { "content": "pub fn parse_many(input: &str) -> ParseResult<Vec<Element>> {\n\n nom::multi::separated_list1(nom::character::complete::line_ending, parse_element)(input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use test_case::test_case;\n\n\n\n use aoc2021::nom::parse_all;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_element() {\n\n assert_eq!(parse_element(\"1\"), Ok((\"\", Element::Number(1))));\n\n assert_eq!(\n\n parse_element(\"[1,2]\"),\n\n Ok((\n\n \"\",\n\n Element::Pair(Box::new(Pair {\n", "file_path": "src/day18/data/parsing.rs", "rank": 4, "score": 154802.21700302837 }, { "content": "fn input_parser<'a>() -> impl nom::Parser<&'a str, InputData, VerboseError<&'a str>> {\n\n nom::combinator::map(\n\n nom::sequence::separated_pair(\n\n nom::multi::separated_list1(nom::bytes::complete::tag(\",\"), parse_usize),\n\n nom::multi::count(nom::character::complete::newline, 2),\n\n nom::multi::separated_list1(nom::character::complete::newline, board::parse),\n\n ),\n\n |(chosen_numbers, boards)| InputData {\n\n chosen_numbers,\n\n boards,\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/day4/main.rs", "rank": 5, "score": 153945.42342844553 }, { "content": "pub fn calculate_cost(input: &Grid) -> usize {\n\n let grid_size = input.size();\n\n let mut cost_grid: Grid<Cost> = Grid::new(input.size());\n\n\n\n for x in (0..grid_size).rev() {\n\n for y in (0..grid_size).rev() {\n\n cost_grid.set_cost(x, y, input[(x, y)]);\n\n }\n\n }\n\n\n\n cost_grid[(grid_size - 1, grid_size - 1)].minimum = 0;\n\n\n\n cost_grid.propagate();\n\n\n\n cost_grid[(0, 0)].minimum\n\n}\n\n\n\nimpl PartialOrd for Cost {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.total().cmp(&other.total()))\n", "file_path": "src/day15/solution.rs", "rank": 6, "score": 138875.22318720375 }, { "content": "pub fn challenge_two(input: &Matrix) -> usize {\n\n let oxygen = Criteria::apply(Criteria::Oxygen, input);\n\n\n\n let carbon_dioxide = Criteria::apply(Criteria::CO2, input);\n\n\n\n oxygen * carbon_dioxide\n\n}\n", "file_path": "src/day3/challenge_two.rs", "rank": 7, "score": 136537.28331609617 }, { "content": "pub fn challenge_one(input: &Matrix) -> usize {\n\n let mut counter = StateCounter::new(input.width);\n\n\n\n for v in &input.data {\n\n for (position, &state) in v.iter().enumerate() {\n\n counter.add(state, position);\n\n }\n\n }\n\n\n\n let pairs = counter.into_iter().collect_vec();\n\n\n\n let (mut gamma, mut epsilon) = (0, 0);\n\n\n\n for (offset, (zeros, ones)) in pairs.into_iter().rev().enumerate() {\n\n if ones > zeros {\n\n gamma |= 1 << offset;\n\n } else {\n\n epsilon |= 1 << offset;\n\n }\n\n }\n\n\n\n gamma * epsilon\n\n}\n", "file_path": "src/day3/challenge_one.rs", "rank": 8, "score": 136537.2833160962 }, { "content": "fn extract_answer_from_counters(counters: HashMap<char, usize>) -> anyhow::Result<usize> {\n\n counters\n\n .into_iter()\n\n .minmax_by(|(_, left), (_, right)| left.cmp(right))\n\n .into_option()\n\n .map(|((min_c, min_v), (max_c, max_v))| {\n\n println!(\"Min: {} Max: {}\", min_c, max_c);\n\n\n\n max_v - min_v\n\n })\n\n .ok_or(anyhow::anyhow!(\"No minmax found\"))\n\n}\n\n\n", "file_path": "src/day14/main.rs", "rank": 9, "score": 133591.5087274065 }, { "content": "pub fn parse_input(input: &str) -> ParseResult<Data> {\n\n map(\n\n separated_pair(\n\n parse_grid,\n\n count(newline, 2),\n\n separated_list1(newline, parse_instruction),\n\n ),\n\n |(grid, instructions)| Data {\n\n grid,\n\n fold_instructions: instructions,\n\n },\n\n )(input)\n\n}\n", "file_path": "src/day13/data/parser.rs", "rank": 10, "score": 130923.85350616438 }, { "content": "pub fn parse_input(input: &str) -> ParseResult<Data> {\n\n let (rem, (polymer, rules)) =\n\n separated_pair(parse_polymer, newline, parse_polymerization_rules)(input)?;\n\n\n\n Ok((\n\n rem,\n\n Data {\n\n template: polymer,\n\n rules,\n\n },\n\n ))\n\n}\n", "file_path": "src/day14/data/parser.rs", "rank": 11, "score": 130923.85350616438 }, { "content": "pub fn parse_packet(data: &[u8]) -> ParseResult<Packet, &[u8]> {\n\n bits(parse_packet_inner)(data)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n macro_rules! test_packet {\n\n ($name: ident, $input: literal, $packet: expr) => {\n\n #[test]\n\n fn $name() {\n\n let data = hex::decode($input).unwrap();\n\n let (_, packet) = parse_packet(&data).unwrap();\n\n\n\n let expected = $packet;\n\n\n\n assert_eq!(packet, expected);\n\n }\n\n };\n", "file_path": "src/day16/parser.rs", "rank": 12, "score": 129301.04804610368 }, { "content": "pub fn abs_diff<T: PartialOrd + Sub>(a: T, b: T) -> T::Output {\n\n if a > b {\n\n a - b\n\n } else {\n\n b - a\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 13, "score": 116764.3231903752 }, { "content": "fn parse_polymer(input: &str) -> ParseResult<Polymer> {\n\n map(alpha1, |c: &str| Polymer(c.chars().collect()))(input)\n\n}\n\n\n", "file_path": "src/day14/data/parser.rs", "rank": 14, "score": 115624.38238949273 }, { "content": "fn parse_polymerization_rules(input: &str) -> ParseResult<PolymerizationRules> {\n\n let mut parse_iterator = iterator(\n\n input,\n\n preceded(\n\n newline,\n\n separated_pair(pair(anychar, anychar), tag(\" -> \"), anychar),\n\n ),\n\n );\n\n\n\n let rules = PolymerizationRules {\n\n pairs: parse_iterator.collect(),\n\n };\n\n\n\n parse_iterator.finish().map(|(r, _)| (r, rules))\n\n}\n\n\n", "file_path": "src/day14/data/parser.rs", "rank": 15, "score": 111636.38742702093 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct Coordinate(usize, usize);\n\n\n\nimpl Controller {\n\n fn neighboors_of(&self, point: Coordinate) -> Vec<Coordinate> {\n\n let mut neighboors = Vec::with_capacity(4);\n\n let x = point.0;\n\n let y = point.1;\n\n if x > 0 {\n\n neighboors.push(Coordinate(x - 1, y));\n\n }\n\n if x < self.input[0].len() - 1 {\n\n neighboors.push(Coordinate(x + 1, y));\n\n }\n\n if y > 0 {\n\n neighboors.push(Coordinate(x, y - 1));\n\n }\n\n if y < self.input.len() - 1 {\n\n neighboors.push(Coordinate(x, y + 1));\n\n }\n\n neighboors\n", "file_path": "src/day9/challenge_two.rs", "rank": 16, "score": 107451.13993100393 }, { "content": "fn challenge_one(input: &[Element]) -> anyhow::Result<i64> {\n\n // Get the magnitude of the summation of all the elements\n\n input\n\n .iter()\n\n .cloned()\n\n .reduce(std::ops::Add::add)\n\n .ok_or_else(|| anyhow!(\"No elements provided\"))\n\n .map(|element| element.magnitude())\n\n}\n\n\n", "file_path": "src/day18/main.rs", "rank": 17, "score": 104773.86622184084 }, { "content": "fn challenge_two(input: &[Element]) -> anyhow::Result<i64> {\n\n // Get the maximum magnitude possible, adding just two of the elements\n\n\n\n input\n\n .into_par_iter()\n\n .enumerate()\n\n .flat_map(|(left_index, left)| {\n\n input\n\n .into_par_iter()\n\n .enumerate()\n\n .filter_map(move |(right_index, right)| {\n\n if left_index == right_index {\n\n None\n\n } else {\n\n Some((left.clone() + right.clone()).magnitude())\n\n }\n\n })\n\n })\n\n .max()\n\n .ok_or_else(|| anyhow!(\"Could not compute maximum magnitude\"))\n\n}\n\n\n", "file_path": "src/day18/main.rs", "rank": 18, "score": 104773.86622184084 }, { "content": "pub trait InputProvider {\n\n /// Returns the input as a string\n\n ///\n\n /// # Errors\n\n /// If the input cannot be read or is not valid an error is returned\n\n fn get_input(&self, name: &str) -> anyhow::Result<String>;\n\n}\n\n\n\npub struct LazyInputProvider(&'static str);\n\n\n\nimpl LazyInputProvider {\n\n #[must_use]\n\n pub const fn new(path: &'static str) -> Self {\n\n Self(path)\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! lazy_input {\n\n ($day: literal) => {\n", "file_path": "src/lib.rs", "rank": 19, "score": 104593.17639615113 }, { "content": "pub fn run(input: &[Vec<u32>]) -> u32 {\n\n let mut controller = Controller {\n\n input: input.to_owned(),\n\n counter: vec![vec![1; input[0].len()]; input.len()],\n\n };\n\n\n\n controller.calculate_descent();\n\n\n\n let mut basin_sizes = controller\n\n .find_low_points()\n\n .into_iter()\n\n .map(|point| controller.get_counter(point))\n\n .collect_vec();\n\n\n\n basin_sizes.sort_unstable();\n\n\n\n basin_sizes.into_iter().rev().take(3).product()\n\n}\n\n\n\nimpl Debug for Controller {\n", "file_path": "src/day9/challenge_two.rs", "rank": 20, "score": 104223.40618795175 }, { "content": "#[derive(Debug)]\n\nstruct FastFishes([usize; 9]);\n\n\n\nimpl FastFishes {\n\n fn from_input(input: &[u8]) -> Self {\n\n let mut counters = [0; 9];\n\n\n\n for &i in input {\n\n counters[i as usize] += 1;\n\n }\n\n\n\n Self(counters)\n\n }\n\n\n\n fn advance_day(&mut self) {\n\n self.0.rotate_left(1);\n\n\n\n self.0[6] += self.0[8];\n\n }\n\n}\n\n\n", "file_path": "src/day6/main.rs", "rank": 21, "score": 100556.6117115974 }, { "content": "#[derive(Debug)]\n\nstruct StateCounter(Vec<usize>, Vec<usize>);\n\n\n\nimpl StateCounter {\n\n fn new(size: usize) -> Self {\n\n Self(vec![0; size], vec![0; size])\n\n }\n\n\n\n fn add(&mut self, state: State, position: usize) {\n\n match state {\n\n State::Zero => self.0[position] += 1,\n\n State::One => self.1[position] += 1,\n\n };\n\n }\n\n}\n\n\n\nimpl IntoIterator for StateCounter {\n\n type Item = (usize, usize);\n\n type IntoIter = impl Iterator<Item = Self::Item>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.0.into_iter().zip(self.1.into_iter())\n\n }\n\n}\n\n\n", "file_path": "src/day3/challenge_one.rs", "rank": 22, "score": 100320.17575917393 }, { "content": "struct PositionCounters(Vec<usize>);\n\n\n\nimpl PositionCounters {\n\n fn from_input(input: &[usize]) -> Self {\n\n let max = input.iter().max().unwrap();\n\n let mut counters = vec![0; *max + 1];\n\n\n\n for i in input {\n\n counters[*i] += 1;\n\n }\n\n\n\n PositionCounters(counters)\n\n }\n\n\n\n fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n fn iter(&self) -> impl Iterator<Item = usize> + '_ {\n\n self.0.iter().copied()\n\n }\n\n}\n\n\n", "file_path": "src/day7/main.rs", "rank": 23, "score": 98684.49876795422 }, { "content": "/// Binary search for the last number that matches the predicate\n\n///\n\n/// ## Errors\n\n///\n\n/// If no value below 1000 matches the predicate an error is returned for not finding a lower bound.\n\n/// If the first 32 powers of 2 offsets from the lower bound matches the predicate an error is returned for not finding an upper bound.\n\npub fn binary_search_last<F, V>(test: F) -> anyhow::Result<(i64, V)>\n\nwhere\n\n F: Fn(i64) -> Option<V>,\n\n{\n\n // Find the initial bounds\n\n let mut low = (0..1000)\n\n .find(|&x| test(x).is_some())\n\n .ok_or_else(|| anyhow::anyhow!(\"Could not find lower bound\"))?;\n\n\n\n let mut high = (0..32)\n\n .map(|x| low.max(1) << x)\n\n .find(|&x| test(x).is_none())\n\n .ok_or_else(|| anyhow::anyhow!(\"Could not find upper bound\"))?;\n\n\n\n // Binary search the boundary of the test\n\n loop {\n\n let mid = (low + high) / 2;\n\n\n\n if let Some(value) = test(mid) {\n\n low = mid + 1;\n", "file_path": "src/lib.rs", "rank": 24, "score": 92886.87519971583 }, { "content": "pub fn parse(input: &str) -> nom::IResult<&str, Board, VerboseError<&str>> {\n\n nom::combinator::map(parse_usize_matrix::<5, 5>, |matrix| Board {\n\n values: matrix,\n\n state: Default::default(),\n\n })(input)\n\n}\n\n\n", "file_path": "src/day4/board.rs", "rank": 25, "score": 92886.87519971583 }, { "content": "fn challenge_one(input: &[usize]) -> usize {\n\n let positions = PositionCounters::from_input(input);\n\n\n\n let mut costs = vec![0; positions.len()];\n\n\n\n for (i, count) in positions.iter().enumerate() {\n\n for (j, slot) in costs.iter_mut().enumerate() {\n\n *slot += if i < j {\n\n count * (j - i)\n\n } else {\n\n count * (i - j)\n\n };\n\n }\n\n }\n\n\n\n *costs.iter().min().unwrap()\n\n}\n\n\n", "file_path": "src/day7/main.rs", "rank": 26, "score": 89101.70697217621 }, { "content": "fn challenge_two(input: &[usize]) -> usize {\n\n let positions = PositionCounters::from_input(input);\n\n\n\n let mut costs = vec![0; positions.len()];\n\n\n\n for (i, count) in positions.iter().enumerate() {\n\n for (j, slot) in costs.iter_mut().enumerate() {\n\n let distance = if i < j { j - i } else { i - j };\n\n\n\n *slot += count * (distance + 1) * distance / 2;\n\n }\n\n }\n\n\n\n *costs.iter().min().unwrap()\n\n}\n\n\n", "file_path": "src/day7/main.rs", "rank": 27, "score": 89101.70697217621 }, { "content": "fn challenge_one(input: &Grid) -> usize {\n\n solution::calculate_cost(input)\n\n}\n\n\n", "file_path": "src/day15/main.rs", "rank": 28, "score": 80170.80853490831 }, { "content": "fn challenge_two(input: &[u8]) -> usize {\n\n let mut fishes = FastFishes::from_input(input);\n\n\n\n for _ in 0..256 {\n\n fishes.advance_day();\n\n }\n\n\n\n fishes.0.iter().sum()\n\n}\n\n\n", "file_path": "src/day6/main.rs", "rank": 29, "score": 80170.80853490831 }, { "content": "fn challenge_one(input: &[u8]) -> usize {\n\n let mut fishes = FastFishes::from_input(input);\n\n\n\n for _ in 0..80 {\n\n fishes.advance_day();\n\n }\n\n\n\n fishes.0.iter().sum()\n\n}\n\n\n", "file_path": "src/day6/main.rs", "rank": 30, "score": 80170.80853490831 }, { "content": "fn challenge_one(input: &[Instruction]) -> usize {\n\n let (h, v) = input\n\n .iter()\n\n .fold((0, 0), |(h, v), instruction| match instruction {\n\n Instruction::Forward(x) => (h + x, v),\n\n Instruction::Down(x) => (h, v + x),\n\n Instruction::Up(x) => (h, v - x),\n\n });\n\n\n\n h * v\n\n}\n\n\n", "file_path": "src/day2/main.rs", "rank": 31, "score": 80170.80853490831 }, { "content": "fn challenge_two(input: &[Instruction]) -> usize {\n\n let (h, v, _) = input\n\n .iter()\n\n .fold((0, 0, 0), |(h, v, aim), instruction| match instruction {\n\n Instruction::Forward(x) => (h + x, v + (aim * x), aim),\n\n Instruction::Down(x) => (h, v, aim + x),\n\n Instruction::Up(x) => (h, v, aim - x),\n\n });\n\n\n\n h * v\n\n}\n\n\n", "file_path": "src/day2/main.rs", "rank": 32, "score": 80170.80853490831 }, { "content": "fn challenge_two(input: &Grid) -> usize {\n\n let grid_size = input.size();\n\n let mut expanded_grid = Grid::new(grid_size * 5);\n\n\n\n for y in 0..expanded_grid.size() {\n\n for x in 0..expanded_grid.size() {\n\n let offset = (x / grid_size) + (y / grid_size);\n\n\n\n expanded_grid[(x, y)] = (input[(x % grid_size, y % grid_size)] + offset - 1) % 9 + 1;\n\n }\n\n }\n\n\n\n solution::calculate_cost(&expanded_grid)\n\n}\n\n\n", "file_path": "src/day15/main.rs", "rank": 33, "score": 80170.80853490831 }, { "content": "fn challenge_two(input: &input::Input) -> usize {\n\n input\n\n .lines\n\n .iter()\n\n .map(input::Line::validate)\n\n .filter_map(|result| match result {\n\n LineResult::Ok | LineResult::Corrupted { .. } => None,\n\n LineResult::Incomplete { missing_brackets } => Some(missing_brackets),\n\n })\n\n .map(|missing_brackets| {\n\n missing_brackets\n\n .into_iter()\n\n .rev()\n\n .fold(0, |acc, bracket| match bracket {\n\n Bracket::CloseRound => (acc * 5) + 1,\n\n Bracket::CloseSquare => (acc * 5) + 2,\n\n Bracket::CloseCurly => (acc * 5) + 3,\n\n Bracket::CloseAngle => (acc * 5) + 4,\n\n _ => unreachable!(\"Opening brackets should not be unexpected\"),\n\n })\n\n })\n\n .collect_vec()\n\n .tap_mut(|result| result.sort_unstable())\n\n .pipe(|result| result[(result.len() - 1) / 2])\n\n}\n\n\n", "file_path": "src/day10/main.rs", "rank": 34, "score": 77857.94406618684 }, { "content": "fn challenge_two(input: &input::Data) -> usize {\n\n input\n\n .iter()\n\n .map(|entry| {\n\n let mut processor = process::EntryProcessor::new(entry.patterns);\n\n\n\n processor.process_trivial();\n\n processor.first_inference();\n\n processor.second_inference();\n\n\n\n processor\n\n .apply_conclusions(&entry.digits)\n\n .into_iter()\n\n .collect::<Option<ArrayVec<_, 4>>>()\n\n .expect(\"all digits should be decoded\")\n\n .into_iter()\n\n .rev()\n\n .enumerate()\n\n .fold(0, |acc, (i, digit)| {\n\n acc + digit * 10usize.pow(i.try_into().expect(\"i < 4\"))\n\n })\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "src/day8/main.rs", "rank": 35, "score": 77857.94406618684 }, { "content": "fn challenge_two(input: &input::Data) -> usize {\n\n let mut diagram = input::Diagram::new();\n\n\n\n input.lines.iter().for_each(|line| diagram.add_line(line));\n\n\n\n diagram.get_intersections().len()\n\n}\n\n\n", "file_path": "src/day5/main.rs", "rank": 36, "score": 77857.94406618684 }, { "content": "fn challenge_two(input: &data::State) -> usize {\n\n let mut input = input.clone();\n\n\n\n for step in 1.. {\n\n if input.advance_state() == 100 {\n\n return step;\n\n }\n\n }\n\n\n\n unreachable!(\"For loop should break with the answer\")\n\n}\n\n\n", "file_path": "src/day11/main.rs", "rank": 37, "score": 77857.94406618684 }, { "content": "fn challenge_one(input: &input::Data) -> usize {\n\n let mut diagram = input::Diagram::new();\n\n\n\n input\n\n .lines\n\n .iter()\n\n .filter(|line| line.is_cardinal())\n\n .for_each(|line| diagram.add_line(line));\n\n\n\n diagram.get_intersections().len()\n\n}\n\n\n", "file_path": "src/day5/main.rs", "rank": 38, "score": 77857.94406618684 }, { "content": "fn challenge_one(input: &data::Packet) -> usize {\n\n input\n\n .linearize()\n\n .into_iter()\n\n .map(|x| x.version() as usize)\n\n .sum()\n\n}\n\n\n", "file_path": "src/day16/main.rs", "rank": 39, "score": 77857.94406618684 }, { "content": "fn challenge_one(input: &data::Data) -> usize {\n\n let folded_grid: data::Grid = input.fold_instructions[0].apply(&input.grid);\n\n\n\n folded_grid.dots.len()\n\n}\n\n\n", "file_path": "src/day13/main.rs", "rank": 40, "score": 77857.94406618684 }, { "content": "fn challenge_one(input: &[Vec<u32>]) -> usize {\n\n let max_x = input.len();\n\n let max_y = input[0].len();\n\n\n\n let mut total_sum = 0;\n\n\n\n for (x, y) in get_coordinates(max_x, max_y) {\n\n let mut adjacent = Vec::with_capacity(4);\n\n\n\n if x > 0 {\n\n adjacent.push(input[x - 1][y]);\n\n }\n\n if x < max_x - 1 {\n\n adjacent.push(input[x + 1][y]);\n\n }\n\n if y > 0 {\n\n adjacent.push(input[x][y - 1]);\n\n }\n\n if y < max_y - 1 {\n\n adjacent.push(input[x][y + 1]);\n", "file_path": "src/day9/main.rs", "rank": 41, "score": 77857.94406618684 }, { "content": "fn challenge_two(input: data::Area) -> usize {\n\n let max_x = input.max_x();\n\n let max_y = input.max_y().abs().max(input.min_y().abs());\n\n\n\n (0..=max_x)\n\n .cartesian_product(-max_y..=max_y)\n\n .filter_map(|(x, y)| {\n\n let probe = data::Probe::launch(x, y);\n\n\n\n probe.intersects(input).map(|time| (probe, time))\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "src/day17/main.rs", "rank": 42, "score": 77857.94406618684 }, { "content": "fn challenge_one(input: &input::Input) -> usize {\n\n input\n\n .lines\n\n .iter()\n\n .map(input::Line::validate)\n\n .filter_map(|result| match result {\n\n LineResult::Ok | LineResult::Incomplete { .. } => None,\n\n LineResult::Corrupted { found, .. } => Some(found),\n\n })\n\n .map(|bracket| match bracket {\n\n Bracket::CloseRound => 3,\n\n Bracket::CloseSquare => 57,\n\n Bracket::CloseCurly => 1197,\n\n Bracket::CloseAngle => 25137,\n\n _ => unreachable!(\"Opening brackets should not be unexpected\"),\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "src/day10/main.rs", "rank": 43, "score": 77857.94406618684 }, { "content": "fn challenge_one(input: &data::State) -> usize {\n\n let mut input = input.clone();\n\n\n\n let mut total_flahes = 0;\n\n\n\n for _ in 0..100 {\n\n total_flahes += input.advance_state();\n\n }\n\n\n\n total_flahes\n\n}\n\n\n", "file_path": "src/day11/main.rs", "rank": 44, "score": 77857.94406618684 }, { "content": "fn challenge_one(input: &str) -> anyhow::Result<usize> {\n\n Ok(parse_input(input)?\n\n .into_iter()\n\n .tuple_windows::<(usize, usize)>()\n\n .fold(0, |v, (a, b)| if b > a { v + 1 } else { v }))\n\n}\n\n\n", "file_path": "src/day1/main.rs", "rank": 45, "score": 75679.6085856855 }, { "content": "fn challenge_two(input: &str) -> anyhow::Result<usize> {\n\n Ok(parse_input(input)?\n\n .into_iter()\n\n .tuple_windows::<(usize, usize, usize)>()\n\n .map(|(a, b, c)| a + b + c)\n\n .tuple_windows::<(usize, usize)>()\n\n .fold(0, |v, (a, b)| if b > a { v + 1 } else { v }))\n\n}\n\n\n", "file_path": "src/day1/main.rs", "rank": 46, "score": 75679.6085856855 }, { "content": "fn challenge_two(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day21/main.rs", "rank": 47, "score": 75679.6085856855 }, { "content": "fn challenge_two(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day24/main.rs", "rank": 48, "score": 75679.6085856855 }, { "content": "fn challenge_one(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day23/main.rs", "rank": 49, "score": 75679.6085856855 }, { "content": "fn challenge_one(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day20/main.rs", "rank": 50, "score": 75679.6085856855 }, { "content": "fn challenge_one(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day19/main.rs", "rank": 51, "score": 75679.6085856855 }, { "content": "fn challenge_one(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day21/main.rs", "rank": 52, "score": 75679.6085856855 }, { "content": "fn challenge_one(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day25/main.rs", "rank": 53, "score": 75679.6085856855 }, { "content": "fn challenge_two(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day20/main.rs", "rank": 54, "score": 75679.6085856855 }, { "content": "fn challenge_two(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day25/main.rs", "rank": 55, "score": 75679.6085856855 }, { "content": "fn challenge_two(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day23/main.rs", "rank": 56, "score": 75679.6085856855 }, { "content": "fn challenge_one(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day24/main.rs", "rank": 57, "score": 75679.6085856855 }, { "content": "fn challenge_two(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day22/main.rs", "rank": 58, "score": 75679.6085856855 }, { "content": "fn challenge_two(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day19/main.rs", "rank": 59, "score": 75679.6085856855 }, { "content": "fn challenge_one(_input: &str) -> anyhow::Result<usize> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/day22/main.rs", "rank": 60, "score": 75679.6085856855 }, { "content": "fn parse_grid(input: &str) -> ParseResult<Grid> {\n\n map(separated_list1(newline, parse_point), |points| Grid {\n\n dots: points.into_iter().collect(),\n\n })(input)\n\n}\n\n\n", "file_path": "src/day13/data/parser.rs", "rank": 61, "score": 75359.07872734543 }, { "content": "fn parse_point(input: &str) -> ParseResult<Point> {\n\n map(\n\n separated_pair(parse_usize, tag(\",\"), parse_usize),\n\n |(x, y)| Point(x, y),\n\n )(input)\n\n}\n\n\n", "file_path": "src/day13/data/parser.rs", "rank": 62, "score": 75359.07872734543 }, { "content": "fn challenge_two(input: &InputData) -> anyhow::Result<usize> {\n\n let mut boards = input.boards.iter().copied().collect_vec();\n\n\n\n for &number in &input.chosen_numbers {\n\n boards.iter_mut().for_each(|board| board.mark_value(number));\n\n\n\n if boards.len() == 1 {\n\n if let Some(score) = boards[0].winning_score() {\n\n return Ok(score * number);\n\n }\n\n }\n\n\n\n boards.retain(|board| !board.is_winner());\n\n }\n\n\n\n bail!(\"No winning score found\")\n\n}\n\n\n", "file_path": "src/day4/main.rs", "rank": 63, "score": 74222.84167488855 }, { "content": "fn challenge_one(input: &InputData) -> anyhow::Result<usize> {\n\n let mut boards = input.boards.iter().copied().collect_vec();\n\n\n\n for &number in &input.chosen_numbers {\n\n for board in &mut boards {\n\n board.mark_value(number);\n\n if let Some(score) = board.winning_score() {\n\n return Ok(score * number);\n\n }\n\n }\n\n }\n\n\n\n bail!(\"No winning score found\")\n\n}\n\n\n", "file_path": "src/day4/main.rs", "rank": 64, "score": 74222.84167488855 }, { "content": "fn challenge_one(input: &CaveSystem) -> anyhow::Result<usize> {\n\n Ok(input\n\n .get_cave(\"start\")\n\n .ok_or(anyhow!(\"missing start cave\"))?\n\n .seek_no_double_small(\"end\")\n\n .count())\n\n}\n\n\n", "file_path": "src/day12/main.rs", "rank": 65, "score": 74222.84167488855 }, { "content": "fn challenge_two(input: &CaveSystem) -> anyhow::Result<usize> {\n\n Ok(input\n\n .get_cave(\"start\")\n\n .ok_or(anyhow!(\"missing start cave\"))?\n\n .seek_single_double_small(\"end\")\n\n .count())\n\n}\n\n\n", "file_path": "src/day12/main.rs", "rank": 66, "score": 74222.84167488855 }, { "content": "fn parse_instruction(input: &str) -> ParseResult<FoldInstruction> {\n\n map(\n\n preceded(\n\n tag(\"fold along \"),\n\n separated_pair(one_of(\"xy\"), tag(\"=\"), parse_usize),\n\n ),\n\n |(axis, value)| match axis {\n\n 'x' => FoldInstruction::AlongX(value),\n\n 'y' => FoldInstruction::AlongY(value),\n\n _ => unreachable!(),\n\n },\n\n )(input)\n\n}\n\n\n", "file_path": "src/day13/data/parser.rs", "rank": 67, "score": 73960.34632407645 }, { "content": "fn challenge_one(input: &input::Data) -> anyhow::Result<usize> {\n\n input\n\n .iter()\n\n .map(|entry| {\n\n let mut processor = process::EntryProcessor::new(entry.patterns);\n\n\n\n processor.process_trivial();\n\n\n\n processor\n\n .apply_conclusions(&entry.digits)\n\n .into_iter()\n\n .filter(Option::is_some)\n\n .count()\n\n })\n\n .sum1()\n\n .ok_or(anyhow::anyhow!(\"No patterns found\"))\n\n}\n\n\n", "file_path": "src/day8/main.rs", "rank": 68, "score": 73623.94440362963 }, { "content": "fn parse_input(input: &str) -> anyhow::Result<Vec<usize>> {\n\n input\n\n .lines()\n\n .map(|line| {\n\n line.parse()\n\n .with_context(|| format!(\"could not parse line {}: {}\", 0, line))\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/day1/main.rs", "rank": 69, "score": 73623.94440362963 }, { "content": "fn challenge_one(input: &data::Data) -> anyhow::Result<usize> {\n\n let mut polymer = input.template.clone();\n\n\n\n for _ in 0..10 {\n\n polymer.grow(&input.rules);\n\n }\n\n\n\n extract_answer_from_counters(polymer.elements().iter().copied().counts())\n\n}\n\n\n", "file_path": "src/day14/main.rs", "rank": 70, "score": 73623.94440362963 }, { "content": "fn challenge_two(input: &data::Data) -> anyhow::Result<usize> {\n\n let mut pair_counters = data::PairCounters::from(&input.template);\n\n\n\n for _ in 0..40 {\n\n pair_counters.project_growth(&input.rules);\n\n }\n\n\n\n let mut element_counters = pair_counters.into_element_counters();\n\n\n\n element_counters\n\n .entry(input.template.elements().last().copied().unwrap())\n\n .or_default()\n\n .add_assign(1);\n\n\n\n extract_answer_from_counters(element_counters)\n\n}\n\n\n", "file_path": "src/day14/main.rs", "rank": 71, "score": 73623.94440362963 }, { "content": "#[derive(Copy, Clone, Eq)]\n\nstruct Cost {\n\n own: usize,\n\n minimum: usize,\n\n}\n\n\n\nimpl Cost {\n\n fn total(&self) -> usize {\n\n self.own + self.minimum\n\n }\n\n\n\n fn apply_neighbor(&mut self, cost: usize) -> bool {\n\n if self.minimum > cost {\n\n self.minimum = cost;\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day15/solution.rs", "rank": 72, "score": 71635.60284984203 }, { "content": "fn parse_u3_tag(data: BitInput) -> ParseResult<u8, BitInput> {\n\n bits::complete::take(3usize)(data)\n\n}\n\n\n", "file_path": "src/day16/parser.rs", "rank": 73, "score": 70708.2440376755 }, { "content": "fn parse_literal_value(data: BitInput) -> ParseResult<u64, BitInput> {\n\n let (remainder, result): (BitInput, Vec<u8>) = multi::many0(sequence::preceded(\n\n bits::complete::tag(1usize, 1usize),\n\n bits::complete::take(4usize),\n\n ))(data)?;\n\n\n\n let (remainder, last_part): (BitInput, u8) = sequence::preceded(\n\n bits::complete::tag(0usize, 1usize),\n\n bits::complete::take(4_u8),\n\n )(remainder)?;\n\n\n\n let mut value = 0_u64;\n\n\n\n for part in result {\n\n value = (value << 4) | u64::from(part);\n\n }\n\n\n\n value = (value << 4) | u64::from(last_part);\n\n\n\n Ok((remainder, value))\n\n}\n\n\n", "file_path": "src/day16/parser.rs", "rank": 74, "score": 70708.2440376755 }, { "content": "fn parse_packet_inner(data: BitInput) -> ParseResult<Packet, BitInput> {\n\n let (remainder, (version, type_id)) = sequence::pair(parse_u3_tag, parse_u3_tag)(data)?;\n\n\n\n // Literal packet fast-path\n\n if type_id == 4u8 {\n\n return map(parse_literal_value, |value| Packet::Literal {\n\n version,\n\n value,\n\n })(remainder);\n\n }\n\n\n\n nom::combinator::map_res(parse_operator_subpackets, move |packets| {\n\n OperatorType::try_from(type_id).map(|operator_type| Packet::Operator {\n\n version,\n\n operator_type,\n\n packets,\n\n })\n\n })(remainder)\n\n}\n\n\n", "file_path": "src/day16/parser.rs", "rank": 75, "score": 70708.2440376755 }, { "content": "#[derive(Debug, Default)]\n\nstruct InputData {\n\n chosen_numbers: Vec<usize>,\n\n boards: Vec<board::Board>,\n\n}\n\n\n", "file_path": "src/day4/main.rs", "rank": 76, "score": 70570.16649684822 }, { "content": "struct Controller {\n\n input: Vec<Vec<u32>>,\n\n counter: Vec<Vec<u32>>,\n\n}\n\n\n", "file_path": "src/day9/challenge_two.rs", "rank": 77, "score": 70570.16649684822 }, { "content": "fn parse_operator_length(data: BitInput) -> ParseResult<OperatorLength, BitInput> {\n\n let (remainder, tag): (BitInput, u8) = bits::complete::take(1usize)(data)?;\n\n\n\n match tag {\n\n 0 => map(bits::complete::take(15usize), OperatorLength::Bits)(remainder),\n\n 1 => map(bits::complete::take(11usize), OperatorLength::Packets)(remainder),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/day16/parser.rs", "rank": 78, "score": 69492.7120790663 }, { "content": "fn parse_operator_subpackets(data: BitInput) -> ParseResult<Vec<Packet>, BitInput> {\n\n let (remainder, length) = parse_operator_length(data)?;\n\n\n\n match length {\n\n OperatorLength::Bits(length) => {\n\n if remainder.input_len() < length {\n\n return Err(nom::Err::Error(nom::error::VerboseError {\n\n errors: vec![(\n\n remainder,\n\n nom::error::VerboseErrorKind::Context(\"not enough data to create packets\"),\n\n )],\n\n }));\n\n }\n\n\n\n let target_remainder_length = remainder.input_len() - length;\n\n\n\n let mut iterator = nom::combinator::iterator(\n\n remainder,\n\n sequence::terminated(\n\n parse_packet_inner,\n", "file_path": "src/day16/parser.rs", "rank": 79, "score": 68893.81480780739 }, { "content": "type BitInput<'a> = (&'a [u8], usize);\n\n\n", "file_path": "src/day16/parser.rs", "rank": 80, "score": 66655.79238685309 }, { "content": "struct StateInteger<'a>(&'a [State]);\n\n\n\nimpl<'a, T: AsRef<[State]> + ?Sized> From<&'a T> for StateInteger<'a> {\n\n fn from(s: &'a T) -> Self {\n\n Self(s.as_ref())\n\n }\n\n}\n\n\n\nimpl From<StateInteger<'_>> for usize {\n\n fn from(s: StateInteger<'_>) -> Self {\n\n let mut value = 0;\n\n\n\n for (offset, &state) in s.0.iter().rev().enumerate() {\n\n if let State::One = state {\n\n value |= 1 << offset;\n\n }\n\n }\n\n\n\n value\n\n }\n", "file_path": "src/day3/challenge_two.rs", "rank": 81, "score": 64317.58436869578 }, { "content": "fn main() {\n\n process(\n\n \"sample\",\n\n data::Area::new(data::Point(20, -10), data::Point(30, -5)),\n\n );\n\n\n\n process(\n\n \"input\",\n\n data::Area::new(data::Point(253, -73), data::Point(280, -46)),\n\n );\n\n}\n", "file_path": "src/day17/main.rs", "rank": 82, "score": 54080.34071221431 }, { "content": "#[test]\n\nfn clear_board() {\n\n let board = Board::default();\n\n assert_eq!(board.winning_score(), None);\n\n}\n\n\n\nimpl Debug for Board {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n f.write_str(\"BingoBoard {\\n\")?;\n\n f.write_str(\" values: [\\n\")?;\n\n for row in &self.values {\n\n f.write_str(\" [\")?;\n\n for value in row {\n\n write!(f, \"{:>3}\", value)?;\n\n }\n\n f.write_str(\"],\\n\")?;\n\n }\n\n f.write_str(\" ],\\n\")?;\n\n\n\n f.write_str(\" state: [\\n\")?;\n\n for row in &self.state {\n", "file_path": "src/day4/board.rs", "rank": 83, "score": 53103.972601244386 }, { "content": "#[test]\n\nfn drawing_lines() {\n\n let mut diagram = Diagram::new();\n\n diagram.add_line(&Line(Point(1, 1), Point(1, 5)));\n\n diagram.add_line(&Line(Point(5, 1), Point(5, 3)));\n\n diagram.add_line(&Line(Point(0, 5), Point(2, 5)));\n\n\n\n assert_eq!(\n\n format!(\"{}\", diagram),\n\n \"......\\n\\\n\n .1...1\\n\\\n\n .1...1\\n\\\n\n .1...1\\n\\\n\n .1....\\n\\\n\n 121...\\n\"\n\n );\n\n}\n\n\n", "file_path": "src/day5/input.rs", "rank": 84, "score": 53103.972601244386 }, { "content": "#[test]\n\nfn parse_display() {\n\n assert_eq!(\n\n Parser::parse_display(\"abcdefg\"),\n\n Ok((\"\", DisplayState(0b0111_1111)))\n\n );\n\n assert_eq!(\n\n Parser::parse_display(\"\"),\n\n Err(nom::Err::Error(nom::error::VerboseError {\n\n errors: vec![(\n\n \"\",\n\n nom::error::VerboseErrorKind::Nom(nom::error::ErrorKind::Many1)\n\n )],\n\n }))\n\n );\n\n}\n\n\n", "file_path": "src/day8/input.rs", "rank": 85, "score": 53103.972601244386 }, { "content": "#[test]\n\nfn parse_digits() {\n\n assert_eq!(\n\n Parser::parse_digits::<4>(\"abcdefg abcdefg abcdefg abcdefg\"),\n\n Ok((\n\n \"\",\n\n [\n\n DisplayState(0b0111_1111),\n\n DisplayState(0b0111_1111),\n\n DisplayState(0b0111_1111),\n\n DisplayState(0b0111_1111),\n\n ]\n\n ))\n\n );\n\n}\n", "file_path": "src/day8/input.rs", "rank": 86, "score": 53103.972601244386 }, { "content": "#[test]\n\nfn line_points_horizontal() {\n\n let line = Line(Point(1, 1), Point(5, 1));\n\n assert_eq!(\n\n line.points(),\n\n vec![\n\n Point(1, 1),\n\n Point(2, 1),\n\n Point(3, 1),\n\n Point(4, 1),\n\n Point(5, 1),\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/day5/input.rs", "rank": 87, "score": 52182.20642447944 }, { "content": "#[test]\n\nfn line_points_vertical() {\n\n let line = Line(Point(1, 1), Point(1, 5));\n\n assert_eq!(\n\n line.points(),\n\n vec![\n\n Point(1, 1),\n\n Point(1, 2),\n\n Point(1, 3),\n\n Point(1, 4),\n\n Point(1, 5),\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/day5/input.rs", "rank": 88, "score": 52182.20642447944 }, { "content": "#[test]\n\nfn drawing_sample_lines() {\n\n let mut diagram = Diagram::new();\n\n\n\n diagram.add_line(&Line(Point(0, 9), Point(5, 9)));\n\n diagram.add_line(&Line(Point(0, 9), Point(2, 9)));\n\n\n\n diagram.add_line(&Line(Point(9, 4), Point(3, 4)));\n\n diagram.add_line(&Line(Point(2, 2), Point(2, 1)));\n\n diagram.add_line(&Line(Point(7, 0), Point(7, 4)));\n\n diagram.add_line(&Line(Point(3, 4), Point(1, 4)));\n\n\n\n assert_eq!(\n\n format!(\"{}\", diagram),\n\n \".......1..\\n\\\n\n ..1....1..\\n\\\n\n ..1....1..\\n\\\n\n .......1..\\n\\\n\n .112111211\\n\\\n\n ..........\\n\\\n\n ..........\\n\\\n\n ..........\\n\\\n\n ..........\\n\\\n\n 222111....\\n\"\n\n );\n\n}\n", "file_path": "src/day5/input.rs", "rank": 89, "score": 52182.20642447944 }, { "content": "#[test]\n\nfn line_points_diagonal_minus_plus() {\n\n let line = Line(Point(5, 1), Point(1, 5));\n\n assert_eq!(\n\n line.points(),\n\n vec![\n\n Point(5, 1),\n\n Point(4, 2),\n\n Point(3, 3),\n\n Point(2, 4),\n\n Point(1, 5),\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/day5/input.rs", "rank": 90, "score": 50485.12900348121 }, { "content": "#[test]\n\nfn line_points_diagonal_minus_minus() {\n\n let line = Line(Point(5, 5), Point(1, 1));\n\n assert_eq!(\n\n line.points(),\n\n vec![\n\n Point(5, 5),\n\n Point(4, 4),\n\n Point(3, 3),\n\n Point(2, 2),\n\n Point(1, 1),\n\n ]\n\n );\n\n}\n\n\n\npub struct Diagram {\n\n pub points: Vec<Point>,\n\n}\n\n\n\nimpl Diagram {\n\n pub fn new() -> Diagram {\n", "file_path": "src/day5/input.rs", "rank": 91, "score": 50485.12900348121 }, { "content": "#[test]\n\nfn line_points_diagonal_plus_minus() {\n\n let line = Line(Point(1, 5), Point(5, 1));\n\n assert_eq!(\n\n line.points(),\n\n vec![\n\n Point(1, 5),\n\n Point(2, 4),\n\n Point(3, 3),\n\n Point(4, 2),\n\n Point(5, 1),\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/day5/input.rs", "rank": 92, "score": 50485.12900348121 }, { "content": "#[test]\n\nfn line_points_diagonal_plus_plus() {\n\n let line = Line(Point(1, 1), Point(5, 5));\n\n assert_eq!(\n\n line.points(),\n\n vec![\n\n Point(1, 1),\n\n Point(2, 2),\n\n Point(3, 3),\n\n Point(4, 4),\n\n Point(5, 5),\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/day5/input.rs", "rank": 93, "score": 50485.12900348121 }, { "content": "fn main() -> anyhow::Result<()> {\n\n process(\"sample\").context(\"sample data\")?;\n\n process(\"input\").context(\"real data\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day4/main.rs", "rank": 94, "score": 50447.01095982037 }, { "content": "fn main() -> anyhow::Result<()> {\n\n process(\"sample\").context(\"sample data\")?;\n\n process(\"input\").context(\"real data\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day5/main.rs", "rank": 95, "score": 50447.01095982037 }, { "content": "fn main() -> anyhow::Result<()> {\n\n process(\"sample\").context(\"sample data\")?;\n\n process(\"input\").context(\"real data\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day25/main.rs", "rank": 96, "score": 50447.01095982037 }, { "content": "fn main() -> anyhow::Result<()> {\n\n process(\"sample\").context(\"sample data\")?;\n\n process(\"input\").context(\"real data\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day6/main.rs", "rank": 97, "score": 50447.01095982037 }, { "content": "fn main() -> anyhow::Result<()> {\n\n process(\"sample\").context(\"sample data\")?;\n\n process(\"input\").context(\"real data\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day24/main.rs", "rank": 98, "score": 50447.01095982037 }, { "content": "fn main() -> anyhow::Result<()> {\n\n process(\"sample\").context(\"sample data\")?;\n\n process(\"input\").context(\"real data\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day3/main.rs", "rank": 99, "score": 50447.01095982037 } ]
Rust
crates/melody_cli/src/main.rs
ikroeber/melody
693832f659482d100000da84d6fc55b8ff43dd07
pub mod consts; pub mod macros; pub mod output; pub mod utils; use clap::Parser; use consts::COMMAND_MARKER; use melody_compiler::{compiler, ParseError}; use output::{ print_output, print_output_pretty, print_repl_welcome, print_source_line, prompt, report_clear, report_exit, report_missing_path, report_no_lines_to_print, report_nothing_to_redo, report_nothing_to_undo, report_parse_error, report_read_file_error, report_read_input_error, report_redo, report_repl_parse_error, report_source, report_undo, report_unrecognized_command, report_write_file_error, }; use std::fs::{read_to_string, write}; use utils::{exit, read_input, ExitCode}; #[derive(Parser, Debug)] #[clap(about, version, author)] struct Args { #[clap(value_name = "INPUT_FILE_PATH", help = "Read from a file")] input_file_path: Option<String>, #[clap( short = 'o', long = "output", value_name = "OUTPUT_FILE_PATH", help = "Write to a file" )] output_file_path: Option<String>, #[clap(short = 'n', long = "no-color", help = "Print output with no color")] no_color_output: bool, #[clap(short = 'r', long = "repl", help = "Start the Melody REPL")] start_repl: bool, } enum CliError { MissingPath, ReadFileError(String), ParseError(ParseError), WriteFileError(String), ReadInputError, } fn main() { match cli() { Ok(_) => exit(ExitCode::Ok), Err(error) => { match error { CliError::MissingPath => report_missing_path(), CliError::ReadFileError(path) => report_read_file_error(path), CliError::WriteFileError(output_file_path) => { report_write_file_error(output_file_path) } CliError::ParseError(parse_error) => report_parse_error( parse_error.token, parse_error.line, parse_error.line_index + 1, ), CliError::ReadInputError => report_read_input_error(), } exit(ExitCode::Error) } }; } fn cli() -> Result<(), CliError> { let args = Args::parse(); let Args { start_repl, input_file_path, output_file_path, no_color_output, } = args; if start_repl { return repl(); } let input_file_path = input_file_path.ok_or(CliError::MissingPath)?; let source = read_to_string(input_file_path.clone()) .map_err(|_| CliError::ReadFileError(input_file_path))?; let compiler_output = compiler(&source).map_err(CliError::ParseError)?; match output_file_path { Some(output_file_path) => { write(&output_file_path, compiler_output) .map_err(|_| CliError::WriteFileError(output_file_path))?; } None => { if no_color_output { print_output(compiler_output); } else { print_output_pretty(compiler_output); } } } Ok(()) } fn repl() -> Result<(), CliError> { print_repl_welcome(); let mut valid_lines: Vec<String> = Vec::new(); let mut redo_lines: Vec<String> = Vec::new(); 'repl: loop { prompt(); let input = read_input().map_err(|_| CliError::ReadInputError)?; if input.starts_with(COMMAND_MARKER) { match input.as_str() { format_command!("u", "undo") => { if valid_lines.is_empty() { report_nothing_to_undo(); } else { report_undo(false); let latest = valid_lines.pop().unwrap(); redo_lines.push(latest); if !valid_lines.is_empty() { let source = &valid_lines.join("\n"); let raw_output = compiler(source); let output = raw_output.unwrap(); print_output_pretty(format!("{output}\n")); } } } format_command!("r", "redo") => { if redo_lines.is_empty() { report_nothing_to_redo(); } else { report_redo(); let latest = redo_lines.pop().unwrap(); valid_lines.push(latest); let source = &valid_lines.join("\n"); let raw_output = compiler(source); let output = raw_output.unwrap(); print_output_pretty(format!("{output}\n")); } } format_command!("s", "source") => { if valid_lines.is_empty() { report_no_lines_to_print(); } else { report_source(); for (line_index, line) in valid_lines.iter().enumerate() { print_source_line(line_index + 1, String::from(line)); } println!(); } } format_command!("c", "clear") => { report_clear(); valid_lines.clear(); redo_lines.clear(); } format_command!("e", "exit") => { report_exit(); return Ok(()); } _ => report_unrecognized_command(input.trim().to_owned()), } continue 'repl; } if input.is_empty() { let source = &valid_lines.join("\n"); let raw_output = compiler(source); let output = raw_output.unwrap(); print_output_pretty(format!("{output}\n")); continue 'repl; } valid_lines.push(input); let source = &valid_lines.join("\n"); let raw_output = compiler(source); if let Err(error) = raw_output { let ParseError { token, line: _, line_index: _, } = error; report_repl_parse_error(token); valid_lines.pop(); continue 'repl; } redo_lines.clear(); let output = raw_output.unwrap(); print_output_pretty(format!("{output}\n")) } }
pub mod consts; pub mod macros; pub mod output; pub mod utils; use clap::Parser; use consts::COMMAND_MARKER; use melody_compiler::{compiler, ParseError}; use output::{ print_output, print_output_pretty, print_repl_welcome, print_source_line, prompt, report_clear, report_exit, report_missing_path, report_no_lines_to_print, report_nothing_to_redo, report_nothing_to_undo, report_parse_error, report_read_file_error, report_read_input_error, report_redo, report_repl_parse_error, report_source, report_undo, report_unrecognized_command, report_write_file_error, }; use std::fs::{read_to_string, write}; use utils::{exit, read_input, ExitCode}; #[derive(Parser, Debug)] #[clap(about, version, author)] struct Args { #[clap(value_name = "INPUT_FILE_PATH", help = "Read from a file")] input_file_path: Option<String>, #[clap( short = 'o', long = "output", value_name = "OUTPUT_FILE_PATH", help = "Write to a file" )] output_file_path: Option<String>, #[clap(short = 'n', long = "no-color", help = "Print output with no color")] no_color_output: bool, #[clap(short = 'r', long = "repl", help = "Start the Melody REPL")] start_repl: bool, } enum CliError { MissingPath, ReadFileError(String), ParseError(ParseError), WriteFileError(String), ReadInputError, } fn main() { match cli() { Ok(_) => exit(ExitCode::Ok), Err(error) => { match error { CliError::MissingPath => report_missing_path(), CliError::ReadFileError(path) => report_read_file_error(path), CliError::WriteFileError(output_file_path) => { report_write_file_error(output_file_path) } CliError::ParseError(parse_error) => report_parse_error( parse_error.token, parse_error.line, parse_error.line_index + 1, ), CliError::ReadInputError => report_read_input_error(), } exit(ExitCode::Error) } }; } fn cli() -> Result<(), CliError> { let args = Args::parse(); let Args { start_repl, input_file_path, output_file_path, no_color_output, } = args; if start_repl { return repl(); } let input_file_path = input_file_path.ok_or(CliError::MissingPath)?; let source = read_to_string(input_file_path.clone()) .map_err(|_| CliError::ReadFileError(input_file_path))?; let compiler_output = compiler(&source).map_err(CliError::ParseError)?; match output_file_path { Some(output_file_path) => { write(&output_file_path, compiler_output) .map_err(|_| CliError::WriteFileError(output_file_path))?; } None => { if no_color_output { print_output(compiler_output); } else { print_output_pretty(compiler_output); } } } Ok(()) } fn repl() -> Result<(), CliError> { print_repl_welcome(); let mut valid_lines: Vec<String> = Vec::new(); let mut redo_lines: Vec<String> = Vec::new(); 'repl: loop { prompt(); let input = read_input().map_err(|_| CliError::ReadInputError)?; if input.starts_with(COMMAND_MARKER) { match input.as_str() { format_command!("u", "undo") => { if valid_lines.is_empty() { report_nothing_to_undo(); } else { report_undo(false); let latest = valid_lines.pop().unwrap(); redo_lines.push(latest); if !valid_lines.is_empty() { let source = &valid_lines.join("\n"); let raw_output = compiler(source); let output = raw_output.unwrap(); print_output_pretty(format!("{output}\n")); } } } format_command!("r", "redo") => { if redo_lines.is_empty() { report_nothing_to_redo(); } else { report_redo(); let latest = redo_lines.pop().unwrap(); valid_lines.push(latest); let source = &valid_lines.join("\n"); let raw_output = compiler(source); let output = raw_output.unwrap(); print_output_pretty(format!("{output}\n")); } } format_command!("s", "source") => { if valid_lines.is_empty() { report_no_lines_to_print(); } else { report_source(); for (line_index, line) in valid_lines.iter().enumerate() { print_source_line(line_index + 1, String::from(line)); } println!(); } } format_command!("c", "clear") => { report_clear(); valid_lines.clear(); redo_lines.clear(); } format_command!("e", "exit") => { report_exit(); return Ok(()); } _ => report_unrecognized_command(input.trim().to_owned()), } continue 'repl; } if input.is_empty() { let source = &valid_lines.join("\n"); let raw_output = compiler(source); let output = raw_output.unwrap(); print_output_pretty(format!("{output}\n")); continue 'repl; } valid_lines.push(input); let source = &valid_lines.join("\n"); let raw_output = compiler(source);
redo_lines.clear(); let output = raw_output.unwrap(); print_output_pretty(format!("{output}\n")) } }
if let Err(error) = raw_output { let ParseError { token, line: _, line_index: _, } = error; report_repl_parse_error(token); valid_lines.pop(); continue 'repl; }
if_condition
[]
Rust
crates/wasm-bindgen-cli-support/src/lib.rs
lukewagner/wasm-bindgen
7384bd1967e325101b09234753598122c71eaefe
#[macro_use] extern crate failure; extern crate parity_wasm; extern crate wasm_bindgen_shared as shared; extern crate serde_json; extern crate wasm_gc; use std::fs::File; use std::io::Write; use std::path::{Path, PathBuf}; use failure::Error; use parity_wasm::elements::*; mod js; pub mod wasm2es6js; pub struct Bindgen { path: Option<PathBuf>, nodejs: bool, debug: bool, typescript: bool, } impl Bindgen { pub fn new() -> Bindgen { Bindgen { path: None, nodejs: false, debug: false, typescript: false, } } pub fn input_path<P: AsRef<Path>>(&mut self, path: P) -> &mut Bindgen { self.path = Some(path.as_ref().to_path_buf()); self } pub fn nodejs(&mut self, node: bool) -> &mut Bindgen { self.nodejs = node; self } pub fn debug(&mut self, debug: bool) -> &mut Bindgen { self.debug = debug; self } pub fn typescript(&mut self, typescript: bool) -> &mut Bindgen { self.typescript = typescript; self } pub fn generate<P: AsRef<Path>>(&mut self, path: P) -> Result<(), Error> { self._generate(path.as_ref()) } fn _generate(&mut self, out_dir: &Path) -> Result<(), Error> { let input = match self.path { Some(ref path) => path, None => panic!("must have a path input for now"), }; let stem = input.file_stem().unwrap().to_str().unwrap(); let mut module = parity_wasm::deserialize_file(input).map_err(|e| { format_err!("{:?}", e) })?; let program = extract_program(&mut module); let (js, ts) = js::Js { globals: String::new(), imports: String::new(), typescript: format!("/* tslint:disable */\n"), exposed_globals: Default::default(), required_internal_exports: Default::default(), config: &self, module: &mut module, program: &program, }.generate(stem); let js_path = out_dir.join(stem).with_extension("js"); File::create(&js_path).unwrap() .write_all(js.as_bytes()).unwrap(); if self.typescript { let ts_path = out_dir.join(stem).with_extension("d.ts"); File::create(&ts_path).unwrap() .write_all(ts.as_bytes()).unwrap(); } let wasm_path = out_dir.join(format!("{}_wasm", stem)).with_extension("wasm"); let wasm_bytes = parity_wasm::serialize(module).map_err(|e| { format_err!("{:?}", e) })?; let bytes = wasm_gc::Config::new() .demangle(false) .gc(&wasm_bytes)?; File::create(&wasm_path)?.write_all(&bytes)?; Ok(()) } } fn extract_program(module: &mut Module) -> shared::Program { let data = module.sections_mut() .iter_mut() .filter_map(|s| { match *s { Section::Data(ref mut s) => Some(s), _ => None, } }) .next(); let mut ret = shared::Program { structs: Vec::new(), free_functions: Vec::new(), imports: Vec::new(), imported_structs: Vec::new(), custom_type_names: Vec::new(), }; let data = match data { Some(data) => data, None => return ret, }; for i in (0..data.entries().len()).rev() { { let value = data.entries()[i].value(); if !value.starts_with(b"wbg:") { continue } let json = &value[4..]; let p = match serde_json::from_slice(json) { Ok(f) => f, Err(e) => { panic!("failed to decode what looked like wasm-bindgen data: {}", e) } }; let shared::Program { structs, free_functions, imports, imported_structs, custom_type_names, } = p; ret.structs.extend(structs); ret.free_functions.extend(free_functions); ret.imports.extend(imports); ret.imported_structs.extend(imported_structs); if custom_type_names.len() > 0 { assert_eq!(ret.custom_type_names.len(), 0); } ret.custom_type_names.extend(custom_type_names); } data.entries_mut().remove(i); } return ret }
#[macro_use] extern crate failure; extern crate parity_wasm; extern crate wasm_bindgen_shared as shared; extern crate serde_json; extern crate wasm_gc; use std::fs::File; use std::io::Write; use std::path::{Path, PathBuf}; use failure::Error; use parity_wasm::elements::*; mod js; pub mod wasm2es6js; pub struct Bindgen { path: Option<PathBuf>, nodejs: bool, debug: bool, typescript: bool, } impl Bindgen { pub fn new() -> Bindgen { Bindgen { path: None, nodejs: false, debug: false, typescript: false, } } pub fn input_path<P: AsRef<Path>>(&mut self, path: P) -> &mut Bindgen { self.path = Some(path.as_ref().to_path_buf()); self } pub fn nodejs(&mut self, node: bool) -> &mut Bindgen { self.nodejs = node; self } pub fn debug(&mut self, debug: bool) -> &mut Bindgen { self.debug = debug; self } pub fn typescript(&mut self, typescript: bool) -> &mut Bindgen { self.typescript = typescript; self } pub fn generate<P: AsRef<Path>>(&mut self, path: P) -> Result<(), Error> { self._generate(path.as_ref()) } fn _generate(&mut self, out_dir: &Path) -> Result<(), Error> { let input = match self.path { Some(ref path) => path, None => panic!("must have a path input for now"), }; let stem = input.file_stem().unwrap().to_str().unwrap(); let mut module = parity_wasm::deserialize_file(input).map_err(|e| { format_err!("{:?}", e) })?; let program = extract_program(&mut module); let (js, ts) = js::Js { globals: String::new(), imports: String::new(), typescript: format!("/* tslint:disable */\n"), exposed_globals: Default::default(), required_internal_exports: Default::default(), config: &self, module: &mut module, program: &program, }.generate(stem); let js_path = out_dir.join(stem).with_extension("js"); File::create(&js_path).unwrap() .write_all(js.as_bytes()).unwrap(); if self.typescript { let ts_path = out_dir.join(stem).with_extension("d.ts"); File::create(&ts_path).unwrap() .write_all(ts.as_bytes()).unwrap(); } let wasm_path = out_dir.join(format!("{}_wasm", stem)).with_extension("wasm"); let wasm_bytes = parity_wasm::serialize(module).map_err(|e| { format_err!("{:?}", e) })?; let bytes = wasm_gc::Config::new() .demangle(false) .gc(&wasm_bytes)?; File::create(&wasm_path)?.write_all(&bytes)?; Ok(()) } } fn extract_program(module: &mut Module) -> shared::Program { let data = module.sections_mut() .iter_mut() .filter_map(|s| { match *s { Section::Data(ref mut s) => Some(s), _ => None, } }) .next(); let mut ret = shared::Program { structs: Vec::new(), free_functions: Vec::new(), imports: Vec::new(), imported_structs: Vec::new(), cus
ts, imported_structs, custom_type_names, } = p; ret.structs.extend(structs); ret.free_functions.extend(free_functions); ret.imports.extend(imports); ret.imported_structs.extend(imported_structs); if custom_type_names.len() > 0 { assert_eq!(ret.custom_type_names.len(), 0); } ret.custom_type_names.extend(custom_type_names); } data.entries_mut().remove(i); } return ret }
tom_type_names: Vec::new(), }; let data = match data { Some(data) => data, None => return ret, }; for i in (0..data.entries().len()).rev() { { let value = data.entries()[i].value(); if !value.starts_with(b"wbg:") { continue } let json = &value[4..]; let p = match serde_json::from_slice(json) { Ok(f) => f, Err(e) => { panic!("failed to decode what looked like wasm-bindgen data: {}", e) } }; let shared::Program { structs, free_functions, impor
function_block-random_span
[ { "content": "fn bindgen_imported_struct(import: &ast::ImportStruct, tokens: &mut Tokens) {\n\n let name = import.name;\n\n\n\n let mut methods = Tokens::new();\n\n\n\n for &(_is_method, ref f) in import.functions.iter() {\n\n let import_name = shared::mangled_import_name(\n\n Some(&import.name.to_string()),\n\n f.wasm_function.name.as_ref(),\n\n );\n\n bindgen_import_function(f, &import_name, &mut methods);\n\n }\n\n\n\n (my_quote! {\n\n pub struct #name {\n\n obj: ::wasm_bindgen::JsObject,\n\n }\n\n\n\n impl #name {\n\n #methods\n", "file_path": "crates/wasm-bindgen-macro/src/lib.rs", "rank": 1, "score": 204357.01694927114 }, { "content": "fn bindgen_import(import: &ast::Import, tokens: &mut Tokens) {\n\n let import_name = shared::mangled_import_name(\n\n None,\n\n import.function.wasm_function.name.as_ref(),\n\n );\n\n bindgen_import_function(&import.function, &import_name, tokens);\n\n}\n\n\n", "file_path": "crates/wasm-bindgen-macro/src/lib.rs", "rank": 2, "score": 184373.84799832257 }, { "content": "pub fn mangled_import_name(struct_: Option<&str>, f: &str) -> String {\n\n match struct_ {\n\n Some(s) => format!(\"__wbg_s_{}_{}\", s, f),\n\n None => format!(\"__wbg_f_{}\", f),\n\n }\n\n}\n\n\n\npub type Type = char;\n\n\n\npub const TYPE_NUMBER: char = '\\u{5e}';\n\npub const TYPE_BORROWED_STR: char = '\\u{5f}';\n\npub const TYPE_STRING: char = '\\u{60}';\n\npub const TYPE_BOOLEAN: char = '\\u{61}';\n\npub const TYPE_JS_OWNED: char = '\\u{62}';\n\npub const TYPE_JS_REF: char = '\\u{63}';\n\n\n\npub const TYPE_CUSTOM_START: u32 = 0x64;\n\npub const TYPE_CUSTOM_REF_FLAG: u32 = 1;\n", "file_path": "crates/wasm-bindgen-shared/src/lib.rs", "rank": 3, "score": 181949.99737582414 }, { "content": "fn bindgen_struct_fn(s: &ast::Struct, f: &ast::Function, into: &mut Tokens) {\n\n bindgen(&f.struct_function_export_name(s.name),\n\n f.rust_symbol(Some(s.name)),\n\n Receiver::StructFunction(s.name, f.name),\n\n &f.arguments,\n\n f.ret.as_ref(),\n\n into)\n\n}\n\n\n", "file_path": "crates/wasm-bindgen-macro/src/lib.rs", "rank": 4, "score": 172190.0494221826 }, { "content": "pub fn free_function(struct_name: &str) -> String {\n\n let mut name = format!(\"__wbindgen_\");\n\n name.extend(struct_name\n\n .chars()\n\n .flat_map(|s| s.to_lowercase()));\n\n name.push_str(\"_free\");\n\n return name\n\n}\n\n\n", "file_path": "crates/wasm-bindgen-shared/src/lib.rs", "rank": 5, "score": 171327.47753519693 }, { "content": "pub fn struct_function_export_name(struct_: &str, f: &str) -> String {\n\n let mut name = struct_\n\n .chars()\n\n .flat_map(|s| s.to_lowercase())\n\n .collect::<String>();\n\n name.push_str(\"_\");\n\n name.push_str(f);\n\n return name\n\n}\n\n\n", "file_path": "crates/wasm-bindgen-shared/src/lib.rs", "rank": 6, "score": 169594.7733493252 }, { "content": "#[proc_macro]\n\npub fn wasm_bindgen(input: TokenStream) -> TokenStream {\n\n // Parse the input as a list of Rust items, reusing the `syn::File` parser.\n\n let file = syn::parse::<ast::File>(input)\n\n .expect(\"expected a set of valid Rust items\");\n\n\n\n let mut ret = Tokens::new();\n\n\n\n let mut program = ast::Program {\n\n structs: Vec::new(),\n\n free_functions: Vec::new(),\n\n imports: Vec::new(),\n\n imported_structs: Vec::new(),\n\n };\n\n\n\n // Translate all input items into our own internal representation (the `ast`\n\n // module). We'll be panicking here on anything that we can't process\n\n\n\n for item in file.items.iter() {\n\n let item = match *item {\n\n ast::MyItem::ExternClass(ref c) => {\n", "file_path": "crates/wasm-bindgen-macro/src/lib.rs", "rank": 7, "score": 169072.7209059665 }, { "content": "pub fn extract_path_ident(path: &syn::Path) -> Option<syn::Ident> {\n\n if path.leading_colon.is_some() {\n\n return None\n\n }\n\n if path.segments.len() != 1 {\n\n return None\n\n }\n\n match path.segments.first().unwrap().value().arguments {\n\n syn::PathArguments::None => {}\n\n _ => return None,\n\n }\n\n path.segments.first().map(|v| v.value().ident)\n\n}\n\n\n\nimpl Type {\n\n pub fn from(ty: &syn::Type) -> Type {\n\n match *ty {\n\n syn::Type::Reference(ref r) => {\n\n match *r.elem {\n\n syn::Type::Path(syn::TypePath { qself: None, ref path }) => {\n", "file_path": "crates/wasm-bindgen-macro/src/ast.rs", "rank": 8, "score": 168438.10465644952 }, { "content": "fn bindgen_struct(idx: usize, s: &ast::Struct, into: &mut Tokens) {\n\n for f in s.functions.iter() {\n\n bindgen_struct_fn(s, f, into);\n\n }\n\n for f in s.methods.iter() {\n\n bindgen_struct_method(s, f, into);\n\n }\n\n\n\n let name = &s.name;\n\n let free_fn = s.free_function();\n\n let c = char::from_u32(idx as u32 * 2 + shared::TYPE_CUSTOM_START);\n\n (my_quote! {\n\n impl ::wasm_bindgen::convert::WasmBoundary for #name {\n\n type Js = u32;\n\n const DESCRIPTOR: char = #c;\n\n\n\n fn into_js(self) -> u32 {\n\n Box::into_raw(Box::new(::wasm_bindgen::__rt::WasmRefCell::new(self))) as u32\n\n }\n\n\n", "file_path": "crates/wasm-bindgen-macro/src/lib.rs", "rank": 9, "score": 167840.6304999459 }, { "content": "pub fn root() -> PathBuf {\n\n let idx = IDX.with(|x| *x);\n\n\n\n let mut me = env::current_exe().unwrap();\n\n me.pop(); // chop off exe name\n\n me.pop(); // chop off `deps`\n\n me.pop(); // chop off `debug` / `release`\n\n me.push(\"generated-tests\");\n\n me.push(&format!(\"test{}\", idx));\n\n return me\n\n}\n\n\n", "file_path": "crates/test-support/src/lib.rs", "rank": 10, "score": 163410.35355327846 }, { "content": "fn bindgen_struct_method(s: &ast::Struct, m: &ast::Method, into: &mut Tokens) {\n\n bindgen(&m.function.struct_function_export_name(s.name),\n\n m.function.rust_symbol(Some(s.name)),\n\n Receiver::StructMethod(s.name, m.mutable, m.function.name),\n\n &m.function.arguments,\n\n m.function.ret.as_ref(),\n\n into)\n\n}\n\n\n", "file_path": "crates/wasm-bindgen-macro/src/lib.rs", "rank": 11, "score": 162902.43716698303 }, { "content": "fn bindgen_import_function(import: &ast::ImportFunction,\n\n import_name: &str,\n\n tokens: &mut Tokens) {\n\n let vis = &import.rust_vis;\n\n let ret = &import.rust_decl.output;\n\n let fn_token = &import.rust_decl.fn_token;\n\n let arguments = &import.rust_decl.inputs;\n\n\n\n let mut abi_argument_names = Vec::new();\n\n let mut abi_arguments = Vec::new();\n\n let mut arg_conversions = Vec::new();\n\n let ret_ident = syn::Ident::from(\"_ret\");\n\n\n\n let inputs = import.rust_decl.inputs.iter().collect::<Vec<_>>();\n\n let (is_method, inputs) = match inputs.get(0) {\n\n Some(&&syn::FnArg::Captured(_)) => (false, &inputs[..]),\n\n Some(_) => (true, &inputs[1..]),\n\n None => (false, &inputs[..]),\n\n };\n\n\n", "file_path": "crates/wasm-bindgen-macro/src/lib.rs", "rank": 12, "score": 146778.26285765908 }, { "content": "fn run(cmd: &mut Command, program: &str) {\n\n println!(\"···················································\");\n\n println!(\"running {:?}\", cmd);\n\n let start = Instant::now();\n\n let output = match cmd.output() {\n\n Ok(output) => output,\n\n Err(err) => panic!(\"failed to spawn `{}`: {}\", program, err),\n\n };\n\n println!(\"exit: {}\", output.status);\n\n let dur = start.elapsed();\n\n println!(\"dur: {}.{:03}ms\", dur.as_secs(), dur.subsec_nanos() / 1_000_000);\n\n if output.stdout.len() > 0 {\n\n println!(\"stdout ---\\n{}\", String::from_utf8_lossy(&output.stdout));\n\n }\n\n if output.stderr.len() > 0 {\n\n println!(\"stderr ---\\n{}\", String::from_utf8_lossy(&output.stderr));\n\n }\n\n assert!(output.status.success());\n\n}\n", "file_path": "crates/test-support/src/lib.rs", "rank": 13, "score": 146111.96494915156 }, { "content": "pub fn free_function_export_name(function_name: &str) -> String {\n\n function_name.to_string()\n\n}\n\n\n", "file_path": "crates/wasm-bindgen-shared/src/lib.rs", "rank": 14, "score": 140241.69647443862 }, { "content": "fn bindgen_fn(function: &ast::Function, into: &mut Tokens) {\n\n bindgen(&function.free_function_export_name(),\n\n function.rust_symbol(None),\n\n Receiver::FreeFunction(function.name),\n\n &function.arguments,\n\n function.ret.as_ref(),\n\n into)\n\n}\n\n\n", "file_path": "crates/wasm-bindgen-macro/src/lib.rs", "rank": 15, "score": 135158.81003153473 }, { "content": "fn rollup() -> PathBuf {\n\n static INIT: Once = ONCE_INIT;\n\n\n\n let mut me = env::current_exe().unwrap();\n\n me.pop(); // chop off exe name\n\n me.pop(); // chop off `deps`\n\n me.pop(); // chop off `debug` / `release`\n\n let install_dir = me.clone();\n\n me.push(\"node_modules/rollup/bin/rollup\");\n\n\n\n INIT.call_once(|| {\n\n if !me.exists() {\n\n let mut npm = if cfg!(windows) {\n\n let mut n = Command::new(\"cmd\");\n\n n.arg(\"/c\").arg(\"npm\");\n\n n\n\n } else {\n\n Command::new(\"npm\")\n\n };\n\n run(npm\n", "file_path": "crates/test-support/src/lib.rs", "rank": 16, "score": 121651.37845960096 }, { "content": "pub fn project() -> Project {\n\n let dir = Path::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n let dir = dir.parent().unwrap() // chop off `test-support`\n\n .parent().unwrap(); // chop off `crates`\n\n\n\n let mut lockfile = String::new();\n\n fs::File::open(&dir.join(\"Cargo.lock\")).unwrap()\n\n .read_to_string(&mut lockfile).unwrap();\n\n Project {\n\n debug: true,\n\n js: false,\n\n files: vec![\n\n (\"Cargo.toml\".to_string(), format!(r#\"\n\n [package]\n\n name = \"test{}\"\n\n version = \"0.0.1\"\n\n authors = []\n\n\n\n [workspace]\n\n\n", "file_path": "crates/test-support/src/lib.rs", "rank": 17, "score": 117031.2910423027 }, { "content": "fn bindgen(export_name: &syn::LitStr,\n\n generated_name: syn::Ident,\n\n receiver: Receiver,\n\n arguments: &[ast::Type],\n\n ret_type: Option<&ast::Type>,\n\n into: &mut Tokens) {\n\n let mut args = vec![];\n\n let mut arg_conversions = vec![];\n\n let mut converted_arguments = vec![];\n\n let ret = syn::Ident::from(\"_ret\");\n\n\n\n let mut offset = 0;\n\n if let Receiver::StructMethod(class, _, _) = receiver {\n\n args.push(my_quote! { me: *mut ::wasm_bindgen::__rt::WasmRefCell<#class> });\n\n arg_conversions.push(my_quote! {\n\n ::wasm_bindgen::__rt::assert_not_null(me);\n\n let me = unsafe { &*me };\n\n });\n\n offset = 1;\n\n }\n", "file_path": "crates/wasm-bindgen-macro/src/lib.rs", "rank": 18, "score": 109693.20501440053 }, { "content": "struct LiteralBuilder<'a> {\n\n dst: &'a mut Tokens,\n\n cnt: usize,\n\n}\n\n\n\nimpl<'a> LiteralBuilder<'a> {\n\n fn byte(&mut self, byte: u8) {\n\n if self.cnt > 0 {\n\n ::syn::token::Comma::default().to_tokens(self.dst);\n\n }\n\n self.cnt += 1;\n\n byte.to_tokens(self.dst);\n\n }\n\n\n\n fn append(&mut self, s: &str) {\n\n for byte in s.bytes() {\n\n self.byte(byte);\n\n }\n\n }\n\n\n", "file_path": "crates/wasm-bindgen-macro/src/ast.rs", "rank": 19, "score": 100526.85222818461 }, { "content": " function _assertBoolean(n) {{\n\n if (typeof(n) !== 'boolean')\n\n throw new Error('expected a boolean argument');\n\n }}\n\n \"));\n\n }\n\n\n\n fn expose_pass_string_to_wasm(&mut self) {\n\n if !self.exposed_globals.insert(\"pass_string_to_wasm\") {\n\n return\n\n }\n\n self.required_internal_exports.insert(\"__wbindgen_malloc\");\n\n if self.config.nodejs {\n\n self.globals.push_str(&format!(\"\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 20, "score": 97367.45975380436 }, { "content": " function _assertNum(n) {{\n\n if (typeof(n) !== 'number')\n\n throw new Error('expected a number argument');\n\n }}\n\n \"));\n\n }\n\n\n\n fn expose_assert_bool(&mut self) {\n\n if !self.exposed_globals.insert(\"assert_bool\") {\n\n return\n\n }\n\n self.globals.push_str(&format!(\"\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 21, "score": 97367.45975380436 }, { "content": "#[cold]\n\n#[inline(never)]\n\npub fn throw(s: &str) -> ! {\n\n extern {\n\n fn __wbindgen_throw(a: *const u8, b: usize) -> !;\n\n }\n\n unsafe {\n\n __wbindgen_throw(s.as_ptr(), s.len());\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub mod __rt {\n\n use std::cell::{Cell, UnsafeCell};\n\n use std::mem;\n\n use std::ops::{Deref, DerefMut};\n\n\n\n #[inline]\n\n pub fn assert_not_null<T>(s: *mut T) {\n\n if s.is_null() {\n\n throw_null();\n\n }\n", "file_path": "src/lib.rs", "rank": 22, "score": 95559.88534107541 }, { "content": "#[test]\n\nfn import_class() {\n\n test_support::project()\n\n .file(\"src/lib.rs\", r#\"\n\n #![feature(proc_macro)]\n\n\n\n extern crate wasm_bindgen;\n\n\n\n use wasm_bindgen::prelude::*;\n\n\n\n wasm_bindgen! {\n\n #[wasm_module = \"./test\"]\n\n extern struct Foo {\n\n fn bar();\n\n }\n\n\n\n pub fn bar() {\n\n Foo::bar();\n\n }\n\n }\n\n \"#)\n", "file_path": "tests/import-class.rs", "rank": 23, "score": 90160.17525520302 }, { "content": "#[test]\n\nfn unused() {\n\n test_support::project()\n\n .file(\"src/lib.rs\", r#\"\n\n #![feature(proc_macro)]\n\n\n\n extern crate wasm_bindgen;\n\n\n\n use wasm_bindgen::prelude::*;\n\n\n\n wasm_bindgen! {\n\n #[wasm_module = \"./test\"]\n\n extern \"JS\" {\n\n fn debug_print(s: &str);\n\n }\n\n\n\n pub fn bar() {}\n\n }\n\n \"#)\n\n .file(\"test.ts\", r#\"\n\n import * as wasm from \"./out\";\n\n\n\n export function debug_print() {}\n\n\n\n export function test() {\n\n wasm.bar();\n\n }\n\n \"#)\n\n .test();\n\n}\n\n\n", "file_path": "tests/imports.rs", "rank": 24, "score": 86477.2284962691 }, { "content": "#[test]\n\nfn simple() {\n\n test_support::project()\n\n .file(\"src/lib.rs\", r#\"\n\n #![feature(proc_macro)]\n\n\n\n extern crate wasm_bindgen;\n\n\n\n use wasm_bindgen::prelude::*;\n\n\n\n wasm_bindgen! {\n\n #[wasm_module = \"./test\"]\n\n extern \"JS\" {\n\n fn foo(s: &str);\n\n fn another(a: u32) -> i32;\n\n fn take_and_return_bool(a: bool) -> bool;\n\n fn return_object() -> JsObject;\n\n }\n\n pub fn bar(s: &str) {\n\n foo(s);\n\n }\n", "file_path": "tests/imports.rs", "rank": 25, "score": 86477.2284962691 }, { "content": "#[test]\n\nfn strings() {\n\n test_support::project()\n\n .file(\"src/lib.rs\", r#\"\n\n #![feature(proc_macro)]\n\n\n\n extern crate wasm_bindgen;\n\n\n\n use wasm_bindgen::prelude::*;\n\n\n\n wasm_bindgen! {\n\n #[wasm_module = \"./test\"]\n\n extern \"JS\" {\n\n fn foo(a: String) -> String;\n\n }\n\n\n\n pub fn bar(a: &str) -> String {\n\n foo(a.to_string())\n\n }\n\n\n\n pub fn bar2(a: String) -> String {\n", "file_path": "tests/imports.rs", "rank": 26, "score": 86477.2284962691 }, { "content": "#[test]\n\nfn works() {\n\n test_support::project()\n\n .debug(false)\n\n .file(\"src/lib.rs\", r#\"\n\n #![feature(proc_macro)]\n\n\n\n extern crate wasm_bindgen;\n\n\n\n use wasm_bindgen::prelude::*;\n\n\n\n wasm_bindgen! {\n\n pub struct A {}\n\n\n\n impl A {\n\n pub fn new() -> A {\n\n A {}\n\n }\n\n }\n\n pub fn clone(a: &JsObject) -> JsObject {\n\n drop(a.clone());\n", "file_path": "tests/non-debug.rs", "rank": 27, "score": 84369.12826977436 }, { "content": "#[test]\n\nfn construct() {\n\n test_support::project()\n\n .file(\"src/lib.rs\", r#\"\n\n #![feature(proc_macro)]\n\n\n\n extern crate wasm_bindgen;\n\n\n\n use wasm_bindgen::prelude::*;\n\n\n\n wasm_bindgen! {\n\n #[wasm_module = \"./test\"]\n\n extern struct Foo {\n\n fn create() -> Foo;\n\n fn get_internal_string(&self) -> String;\n\n fn append_to_internal_string(&self, s: &str);\n\n fn assert_internal_string(&self, s: &str);\n\n }\n\n\n\n pub fn run() {\n\n let f = Foo::create();\n", "file_path": "tests/import-class.rs", "rank": 28, "score": 83967.96220278717 }, { "content": "#[test]\n\nfn simple() {\n\n test_support::project()\n\n .file(\"src/lib.rs\", r#\"\n\n #![feature(proc_macro)]\n\n\n\n extern crate wasm_bindgen;\n\n\n\n use wasm_bindgen::prelude::*;\n\n\n\n wasm_bindgen! {\n\n extern struct Math {\n\n fn random() -> f64;\n\n fn log(a: f64) -> f64;\n\n }\n\n\n\n pub fn get_random() -> f64 {\n\n Math::random()\n\n }\n\n\n\n pub fn do_log(a: f64) -> f64 {\n", "file_path": "tests/import-class.rs", "rank": 29, "score": 83967.96220278717 }, { "content": "#[macro_use]\n\nextern crate serde_derive;\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Program {\n\n pub structs: Vec<Struct>,\n\n pub free_functions: Vec<Function>,\n\n pub imports: Vec<Import>,\n\n pub imported_structs: Vec<ImportStruct>,\n\n pub custom_type_names: Vec<String>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Struct {\n\n pub name: String,\n\n pub functions: Vec<Function>,\n\n pub methods: Vec<Method>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n", "file_path": "crates/wasm-bindgen-shared/src/lib.rs", "rank": 30, "score": 83400.75210609617 }, { "content": "pub struct Import {\n\n pub module: String,\n\n pub function: Function,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ImportStruct {\n\n pub module: Option<String>,\n\n pub name: String,\n\n pub functions: Vec<ImportStructFunction>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ImportStructFunction {\n\n pub method: bool,\n\n pub function: Function,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Method {\n", "file_path": "crates/wasm-bindgen-shared/src/lib.rs", "rank": 31, "score": 83394.55844738641 }, { "content": " pub mutable: bool,\n\n pub function: Function,\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Function {\n\n pub name: String,\n\n pub arguments: Vec<Type>,\n\n pub ret: Option<Type>,\n\n}\n\n\n", "file_path": "crates/wasm-bindgen-shared/src/lib.rs", "rank": 32, "score": 83390.41475227947 }, { "content": "extern crate base64;\n\n\n\nuse std::collections::HashSet;\n\n\n\nuse parity_wasm::elements::*;\n\n\n\nuse super::Error;\n\n\n\npub struct Config {\n\n base64: bool,\n\n}\n\n\n\npub struct Output {\n\n module: Module,\n\n base64: bool,\n\n}\n\n\n\nimpl Config {\n\n pub fn new() -> Config {\n\n Config {\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 33, "score": 80576.26545458243 }, { "content": "\n\n pub fn js(self) -> String {\n\n let mut js_imports = String::new();\n\n let mut exports = String::new();\n\n let mut imports = String::new();\n\n let mut export_mem = false;\n\n\n\n if let Some(i) = self.module.import_section() {\n\n let mut set = HashSet::new();\n\n for entry in i.entries() {\n\n match *entry.external() {\n\n External::Function(_) => {}\n\n External::Table(_) => {\n\n panic!(\"wasm imports a table which isn't supported yet\");\n\n }\n\n External::Memory(_) => {\n\n panic!(\"wasm imports memory which isn't supported yet\");\n\n }\n\n External::Global(_) => {\n\n panic!(\"wasm imports globals which aren't supported yet\");\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 34, "score": 80569.42749060015 }, { "content": " base64: false,\n\n }\n\n }\n\n\n\n pub fn base64(&mut self, base64: bool) -> &mut Self {\n\n self.base64 = base64;\n\n self\n\n }\n\n\n\n pub fn generate(&mut self, wasm: &[u8]) -> Result<Output, Error> {\n\n assert!(self.base64);\n\n let module = deserialize_buffer(wasm).map_err(|e| {\n\n format_err!(\"{:?}\", e)\n\n })?;\n\n Ok(Output {\n\n module,\n\n base64: self.base64,\n\n })\n\n }\n\n}\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 35, "score": 80564.73032215188 }, { "content": "\n\nimpl Output {\n\n pub fn typescript(&self) -> String {\n\n let mut exports = format!(\"/* tslint:disable */\\n\");\n\n\n\n if let Some(i) = self.module.export_section() {\n\n let imported_functions = self.module.import_section()\n\n .map(|m| m.functions() as u32)\n\n .unwrap_or(0);\n\n for entry in i.entries() {\n\n let idx = match *entry.internal() {\n\n Internal::Function(i) => i - imported_functions,\n\n Internal::Memory(_) => {\n\n exports.push_str(&format!(\"\n\n export const {}: WebAssembly.Memory;\n\n \", entry.field()));\n\n continue\n\n }\n\n Internal::Table(_) => {\n\n panic!(\"wasm exports a table which isn't supported yet\");\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 36, "score": 80556.44987662889 }, { "content": " let ty = match types.types()[idx as usize] {\n\n Type::Function(ref f) => f,\n\n };\n\n let mut args = String::new();\n\n for (i, _) in ty.params().iter().enumerate() {\n\n if i > 0 {\n\n args.push_str(\", \");\n\n }\n\n args.push((b'a' + (i as u8)) as char);\n\n }\n\n\n\n exports.push_str(&format!(\"\n\n export function {name}({args}) {{\n\n {ret} wasm.exports.{name}({args});\n\n }}\n\n \",\n\n name = entry.field(),\n\n args = args,\n\n ret = if ty.return_type().is_some() { \"return\" } else { \"\" },\n\n ));\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 37, "score": 80554.48753860629 }, { "content": " }\n\n Internal::Global(_) => {\n\n panic!(\"wasm exports globals which aren't supported yet\");\n\n }\n\n };\n\n\n\n let functions = self.module.function_section()\n\n .expect(\"failed to find function section\");\n\n let idx = functions.entries()[idx as usize].type_ref();\n\n\n\n let types = self.module.type_section()\n\n .expect(\"failed to find type section\");\n\n let ty = match types.types()[idx as usize] {\n\n Type::Function(ref f) => f,\n\n };\n\n let mut args = String::new();\n\n for (i, _) in ty.params().iter().enumerate() {\n\n if i > 0 {\n\n args.push_str(\", \");\n\n }\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 38, "score": 80554.10489258541 }, { "content": " }\n\n }\n\n\n\n let wasm = serialize(self.module)\n\n .expect(\"failed to serialize\");\n\n\n\n format!(\"\n\n {js_imports}\n\n let wasm;\n\n let bytes;\n\n const base64 = \\\"{base64}\\\";\n\n if (typeof Buffer === 'undefined') {{\n\n bytes = Uint8Array.from(atob(base64), c => c.charCodeAt(0));\n\n }} else {{\n\n bytes = Buffer.from(base64, 'base64');\n\n }}\n\n {mem_export}\n\n export const booted = WebAssembly.instantiate(bytes, {{ {imports} }})\n\n .then(obj => {{\n\n wasm = obj.instance;\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 39, "score": 80553.8588032027 }, { "content": " {memory}\n\n }});\n\n\n\n {exports}\n\n \",\n\n base64 = base64::encode(&wasm),\n\n js_imports = js_imports,\n\n imports = imports,\n\n exports = exports,\n\n mem_export = if export_mem { \"export let memory;\" } else { \"\" },\n\n memory = if export_mem { \"memory = wasm.exports.memory;\" } else { \"\" },\n\n )\n\n }\n\n}\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 40, "score": 80551.84100405656 }, { "content": " let idx = match *entry.internal() {\n\n Internal::Function(i) => i - imported_functions,\n\n Internal::Memory(_) => {\n\n export_mem = true;\n\n continue\n\n }\n\n Internal::Table(_) => {\n\n panic!(\"wasm exports a table which isn't supported yet\");\n\n }\n\n Internal::Global(_) => {\n\n panic!(\"wasm exports globals which aren't supported yet\");\n\n }\n\n };\n\n\n\n let functions = self.module.function_section()\n\n .expect(\"failed to find function section\");\n\n let idx = functions.entries()[idx as usize].type_ref();\n\n\n\n let types = self.module.type_section()\n\n .expect(\"failed to find type section\");\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 41, "score": 80551.6393828718 }, { "content": " }\n\n }\n\n\n\n if !set.insert(entry.module()) {\n\n continue\n\n }\n\n\n\n let name = (b'a' + (set.len() as u8)) as char;\n\n js_imports.push_str(&format!(\"import * as import_{} from '{}';\",\n\n name,\n\n entry.module()));\n\n imports.push_str(&format!(\"'{}': import_{}, \", entry.module(), name));\n\n }\n\n }\n\n\n\n if let Some(i) = self.module.export_section() {\n\n let imported_functions = self.module.import_section()\n\n .map(|m| m.functions() as u32)\n\n .unwrap_or(0);\n\n for entry in i.entries() {\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 42, "score": 80550.71368121065 }, { "content": " args.push((b'a' + (i as u8)) as char);\n\n args.push_str(\": number\");\n\n }\n\n\n\n exports.push_str(&format!(\"\n\n export function {name}({args}): {ret};\n\n \",\n\n name = entry.field(),\n\n args = args,\n\n ret = if ty.return_type().is_some() { \"number\" } else { \"void\" },\n\n ));\n\n }\n\n }\n\n\n\n if self.base64 {\n\n exports.push_str(\"export const booted: Promise<boolean>;\");\n\n }\n\n\n\n return exports\n\n }\n", "file_path": "crates/wasm-bindgen-cli-support/src/wasm2es6js.rs", "rank": 43, "score": 80547.48371954913 }, { "content": "use std::collections::HashSet;\n\n\n\nuse shared;\n\nuse parity_wasm::elements::*;\n\n\n\nuse super::Bindgen;\n\n\n\npub struct Js<'a> {\n\n pub globals: String,\n\n pub imports: String,\n\n pub typescript: String,\n\n pub exposed_globals: HashSet<&'static str>,\n\n pub required_internal_exports: HashSet<&'static str>,\n\n pub config: &'a Bindgen,\n\n pub module: &'a mut Module,\n\n pub program: &'a shared::Program,\n\n}\n\n\n\nimpl<'a> Js<'a> {\n\n pub fn generate(&mut self, module_name: &str) -> (String, String) {\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 44, "score": 79661.67196416252 }, { "content": " false,\n\n &func.arguments,\n\n func.ret.as_ref());\n\n self.globals.push_str(\"export \");\n\n self.globals.push_str(&js);\n\n self.globals.push_str(\"\\n\");\n\n self.typescript.push_str(\"export \");\n\n self.typescript.push_str(&ts);\n\n self.typescript.push_str(\"\\n\");\n\n }\n\n\n\n pub fn generate_struct(&mut self, s: &shared::Struct) {\n\n let mut dst = String::new();\n\n dst.push_str(&format!(\"export class {} {{\", s.name));\n\n let mut ts_dst = dst.clone();\n\n ts_dst.push_str(\"\n\n public ptr: number;\n\n \");\n\n if self.config.debug {\n\n self.expose_check_token();\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 45, "score": 79655.99595533828 }, { "content": " for f in self.program.free_functions.iter() {\n\n self.generate_free_function(f);\n\n }\n\n for f in self.program.imports.iter() {\n\n self.generate_import(&f.module, &f.function);\n\n }\n\n for s in self.program.structs.iter() {\n\n self.generate_struct(s);\n\n }\n\n for s in self.program.imported_structs.iter() {\n\n self.generate_import_struct(s);\n\n }\n\n\n\n {\n\n let mut bind = |name: &str, f: &Fn(&mut Self) -> String| {\n\n if !self.wasm_import_needed(name) {\n\n return\n\n }\n\n let global = format!(\"export const {} = {};\", name, f(self));\n\n self.globals.push_str(&global);\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 46, "score": 79651.28582354398 }, { "content": " import * as wasm from './{module_name}_wasm'; // imports from wasm file\n\n {imports}\n\n\n\n {globals}\n\n \",\n\n module_name = module_name,\n\n globals = self.globals,\n\n imports = self.imports,\n\n );\n\n\n\n self.rewrite_imports(module_name);\n\n self.unexport_unused_internal_exports();\n\n\n\n (js, self.typescript.clone())\n\n }\n\n\n\n pub fn generate_free_function(&mut self, func: &shared::Function) {\n\n let (js, ts) = self.generate_function(\"function\",\n\n &func.name,\n\n &func.name,\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 47, "score": 79648.58224368512 }, { "content": " };\n\n for import in imports.entries_mut() {\n\n if import.module() != \"env\" {\n\n continue\n\n }\n\n if import.field().starts_with(\"__wbindgen\") {\n\n import.module_mut().truncate(0);\n\n import.module_mut().push_str(\"./\");\n\n import.module_mut().push_str(module_name);\n\n continue\n\n }\n\n\n\n // rustc doesn't have support for importing from anything other\n\n // than the module `env` so let's use the metadata here to\n\n // rewrite the imports if they import from `env` until it's\n\n // fixed upstream.\n\n let program_import = self.program.imports\n\n .iter()\n\n .any(|f| shared::mangled_import_name(None, &f.function.name) == import.field());\n\n let struct_import = self.program.imported_structs\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 48, "score": 79647.94511014382 }, { "content": " _ => continue,\n\n };\n\n exports.entries_mut().retain(|export| {\n\n !export.field().starts_with(\"__wbindgen\") ||\n\n required.contains(export.field())\n\n });\n\n }\n\n }\n\n\n\n fn expose_drop_ref(&mut self) {\n\n if !self.exposed_globals.insert(\"drop_ref\") {\n\n return\n\n }\n\n self.expose_global_slab();\n\n self.expose_global_slab_next();\n\n let validate_owned = if self.config.debug {\n\n String::from(\"\n\n if ((idx & 1) === 1)\n\n throw new Error('cannot drop ref of stack objects');\n\n \")\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 49, "score": 79647.55777353066 }, { "content": " self.globals.push_str(&dst);\n\n self.globals.push_str(\"\\n\");\n\n }\n\n\n\n fn wasm_import_needed(&self, name: &str) -> bool {\n\n let imports = match self.module.import_section() {\n\n Some(s) => s,\n\n None => return false,\n\n };\n\n\n\n imports.entries().iter().any(|i| {\n\n i.module() == \"env\" && i.field() == name\n\n })\n\n }\n\n\n\n fn rewrite_imports(&mut self, module_name: &str) {\n\n for section in self.module.sections_mut() {\n\n let imports = match *section {\n\n Section::Import(ref mut s) => s,\n\n _ => continue,\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 50, "score": 79647.0565299323 }, { "content": " }\n\n\n\n pub fn generate_import(&mut self, module: &str, import: &shared::Function) {\n\n let imported_name = format!(\"import{}\", self.imports.len());\n\n\n\n self.imports.push_str(&format!(\"\n\n import {{ {} as {} }} from '{}';\n\n \", import.name, imported_name, module));\n\n\n\n self.gen_import_shim(&shared::mangled_import_name(None, &import.name),\n\n &imported_name,\n\n false,\n\n import)\n\n }\n\n\n\n pub fn generate_import_struct(&mut self, import: &shared::ImportStruct) {\n\n if let Some(ref module) = import.module {\n\n self.imports.push_str(&format!(\"\n\n import {{ {} }} from '{}';\n\n \", import.name, module));\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 51, "score": 79646.84021319084 }, { "content": " let convert_ret = match ret {\n\n None => {\n\n dst_ts.push_str(\": void\");\n\n format!(\"return ret;\")\n\n }\n\n Some(&shared::TYPE_NUMBER) => {\n\n dst_ts.push_str(\": number\");\n\n format!(\"return ret;\")\n\n }\n\n Some(&shared::TYPE_BOOLEAN) => {\n\n dst_ts.push_str(\": boolean\");\n\n format!(\"return ret != 0;\")\n\n }\n\n Some(&shared::TYPE_JS_OWNED) => {\n\n dst_ts.push_str(\": any\");\n\n self.expose_take_object();\n\n format!(\"return takeObject(ret);\")\n\n }\n\n Some(&shared::TYPE_STRING) => {\n\n dst_ts.push_str(\": string\");\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 52, "score": 79646.04840616841 }, { "content": " }\n\n\n\n fn generate_function(&mut self,\n\n prefix: &str,\n\n name: &str,\n\n wasm_name: &str,\n\n is_method: bool,\n\n arguments: &[shared::Type],\n\n ret: Option<&shared::Type>) -> (String, String) {\n\n let mut dst = format!(\"{}(\", name);\n\n let mut dst_ts = format!(\"{}(\", name);\n\n let mut passed_args = String::new();\n\n let mut arg_conversions = String::new();\n\n let mut destructors = String::new();\n\n\n\n if is_method {\n\n passed_args.push_str(\"this.ptr\");\n\n }\n\n\n\n for (i, arg) in arguments.iter().enumerate() {\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 53, "score": 79644.60631256168 }, { "content": " self.gen_import_shim(&shared::mangled_import_name(Some(class), &function.name),\n\n &delegate,\n\n is_method,\n\n function)\n\n }\n\n\n\n fn gen_import_shim(\n\n &mut self,\n\n shim_name: &str,\n\n shim_delegate: &str,\n\n is_method: bool,\n\n import: &shared::Function,\n\n ) {\n\n let mut dst = String::new();\n\n\n\n dst.push_str(&format!(\"function {}(\", shim_name));\n\n let mut invocation = String::new();\n\n\n\n if is_method {\n\n dst.push_str(\"ptr\");\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 54, "score": 79643.48024904303 }, { "content": " return\n\n }\n\n self.expose_global_stack();\n\n self.expose_global_slab();\n\n\n\n let get_obj = if self.config.debug {\n\n String::from(\"\n\n if (typeof(val) === 'number')\n\n throw new Error('corrupt slab');\n\n return val.obj;\n\n \")\n\n } else {\n\n String::from(\"\n\n return val.obj;\n\n \")\n\n };\n\n self.globals.push_str(&format!(\"\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 55, "score": 79643.21958502084 }, { "content": " }\n\n\n\n for f in import.functions.iter() {\n\n self.generate_import_struct_function(&import.name,\n\n f.method,\n\n &f.function);\n\n }\n\n }\n\n\n\n fn generate_import_struct_function(\n\n &mut self,\n\n class: &str,\n\n is_method: bool,\n\n function: &shared::Function,\n\n ) {\n\n let delegate = if is_method {\n\n format!(\"{}.prototype.{}.call\", class, function.name)\n\n } else {\n\n format!(\"{}.{}\", class, function.name)\n\n };\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 56, "score": 79643.0217522459 }, { "content": " } else {\n\n String::new()\n\n };\n\n let dec_ref = if self.config.debug {\n\n String::from(\"\n\n if (typeof(obj) === 'number')\n\n throw new Error('corrupt slab');\n\n obj.cnt -= 1;\n\n if (obj.cnt > 0)\n\n return;\n\n \")\n\n } else {\n\n String::from(\"\n\n obj.cnt -= 1;\n\n if (obj.cnt > 0)\n\n return;\n\n \")\n\n };\n\n self.globals.push_str(&format!(\"\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 57, "score": 79642.77300917181 }, { "content": " .iter()\n\n .flat_map(|s| s.functions.iter().map(move |f| (s, &f.function)))\n\n .any(|(s, f)| {\n\n shared::mangled_import_name(Some(&s.name), &f.name) == import.field()\n\n });\n\n if program_import || struct_import {\n\n import.module_mut().truncate(0);\n\n import.module_mut().push_str(\"./\");\n\n import.module_mut().push_str(module_name);\n\n continue\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn unexport_unused_internal_exports(&mut self) {\n\n let required = &self.required_internal_exports;\n\n for section in self.module.sections_mut() {\n\n let exports = match *section {\n\n Section::Export(ref mut s) => s,\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 58, "score": 79642.58133442598 }, { "content": " if self.config.debug {\n\n format!(\"\\\n\n return new {name}(ret, token);\n\n \", name = name)\n\n } else {\n\n format!(\"\\\n\n return new {name}(ret);\n\n \", name = name)\n\n }\n\n }\n\n };\n\n dst_ts.push_str(\";\");\n\n dst.push_str(\" {\\n \");\n\n dst.push_str(&arg_conversions);\n\n if destructors.len() == 0 {\n\n dst.push_str(&format!(\"\\\n\n const ret = wasm.{}({passed});\n\n {convert_ret}\n\n \",\n\n f = wasm_name,\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 59, "score": 79642.36836588474 }, { "content": " let (js, ts) = self.generate_function(\n\n \"\",\n\n &method.function.name,\n\n &shared::struct_function_export_name(&s.name, &method.function.name),\n\n true,\n\n &method.function.arguments,\n\n method.function.ret.as_ref(),\n\n );\n\n dst.push_str(&js);\n\n dst.push_str(\"\\n\");\n\n ts_dst.push_str(&ts);\n\n ts_dst.push_str(\"\\n\");\n\n }\n\n dst.push_str(\"}\\n\");\n\n ts_dst.push_str(\"}\\n\");\n\n\n\n self.globals.push_str(&dst);\n\n self.globals.push_str(\"\\n\");\n\n self.typescript.push_str(&ts_dst);\n\n self.typescript.push_str(\"\\n\");\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 60, "score": 79641.54649706764 }, { "content": " let name = format!(\"arg{}\", i);\n\n if i > 0 {\n\n dst.push_str(\", \");\n\n dst_ts.push_str(\", \");\n\n }\n\n dst.push_str(&name);\n\n dst_ts.push_str(&name);\n\n\n\n let mut pass = |arg: &str| {\n\n if passed_args.len() > 0 {\n\n passed_args.push_str(\", \");\n\n }\n\n passed_args.push_str(arg);\n\n };\n\n match *arg {\n\n shared::TYPE_NUMBER => {\n\n dst_ts.push_str(\": number\");\n\n if self.config.debug {\n\n self.expose_assert_num();\n\n arg_conversions.push_str(&format!(\"_assertNum({});\\n\", name));\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 61, "score": 79641.4495598828 }, { "content": " dst.push_str(&format!(\"arg{}\", i));\n\n }\n\n shared::TYPE_JS_REF => {\n\n self.expose_get_object();\n\n invocation.push_str(&format!(\"getObject(arg{})\", i));\n\n dst.push_str(&format!(\"arg{}\", i));\n\n }\n\n _ => {\n\n panic!(\"unsupported type in import\");\n\n }\n\n }\n\n }\n\n let invoc = format!(\"{}({})\", shim_delegate, invocation);\n\n let invoc = match import.ret {\n\n Some(shared::TYPE_NUMBER) => format!(\"return {};\", invoc),\n\n Some(shared::TYPE_BOOLEAN) => format!(\"return {} ? 1 : 0;\", invoc),\n\n Some(shared::TYPE_JS_OWNED) => {\n\n self.expose_add_heap_object();\n\n format!(\"return addHeapObject({});\", invoc)\n\n }\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 62, "score": 79641.16237749402 }, { "content": " }\n\n pass(&name)\n\n }\n\n shared::TYPE_BOOLEAN => {\n\n dst_ts.push_str(\": boolean\");\n\n if self.config.debug {\n\n self.expose_assert_bool();\n\n arg_conversions.push_str(&format!(\"\\\n\n _assertBoolean({name});\n\n \", name = name));\n\n } else {\n\n }\n\n pass(&format!(\"arg{i} ? 1 : 0\", i = i))\n\n }\n\n shared::TYPE_BORROWED_STR |\n\n shared::TYPE_STRING => {\n\n dst_ts.push_str(\": string\");\n\n self.expose_pass_string_to_wasm();\n\n arg_conversions.push_str(&format!(\"\\\n\n const [ptr{i}, len{i}] = passStringToWasm({arg});\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 63, "score": 79640.64979035476 }, { "content": " self.expose_get_string_from_wasm();\n\n self.required_internal_exports.insert(\"__wbindgen_boxed_str_ptr\");\n\n self.required_internal_exports.insert(\"__wbindgen_boxed_str_len\");\n\n self.required_internal_exports.insert(\"__wbindgen_boxed_str_free\");\n\n format!(\"\n\n const ptr = wasm.__wbindgen_boxed_str_ptr(ret);\n\n const len = wasm.__wbindgen_boxed_str_len(ret);\n\n const realRet = getStringFromWasm(ptr, len);\n\n wasm.__wbindgen_boxed_str_free(ret);\n\n return realRet;\n\n \")\n\n }\n\n Some(&shared::TYPE_JS_REF) |\n\n Some(&shared::TYPE_BORROWED_STR) => panic!(),\n\n Some(&t) if (t as u32) & shared::TYPE_CUSTOM_REF_FLAG != 0 => panic!(),\n\n Some(&custom) => {\n\n let custom = (custom as u32) - shared::TYPE_CUSTOM_START;\n\n let name = &self.program.custom_type_names[custom as usize / 2];\n\n dst_ts.push_str(\": \");\n\n dst_ts.push_str(name);\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 64, "score": 79639.93089953285 }, { "content": " wasm.{}(ptr);\n\n }}\n\n \", shared::free_function(&s.name)));\n\n ts_dst.push_str(\"free(): void;\\n\");\n\n\n\n for function in s.functions.iter() {\n\n let (js, ts) = self.generate_function(\n\n \"static\",\n\n &function.name,\n\n &shared::struct_function_export_name(&s.name, &function.name),\n\n false,\n\n &function.arguments,\n\n function.ret.as_ref(),\n\n );\n\n dst.push_str(&js);\n\n dst.push_str(\"\\n\");\n\n ts_dst.push_str(&ts);\n\n ts_dst.push_str(\"\\n\");\n\n }\n\n for method in s.methods.iter() {\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 65, "score": 79639.74044543583 }, { "content": " }\n\n\n\n fn expose_global_slab(&mut self) {\n\n if !self.exposed_globals.insert(\"slab\") {\n\n return\n\n }\n\n self.globals.push_str(&format!(\"let slab = [];\"));\n\n }\n\n\n\n fn expose_global_slab_next(&mut self) {\n\n if !self.exposed_globals.insert(\"slab_next\") {\n\n return\n\n }\n\n self.globals.push_str(&format!(\"\n\n let slab_next = 0;\n\n \"));\n\n }\n\n\n\n fn expose_get_object(&mut self) {\n\n if !self.exposed_globals.insert(\"get_object\") {\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 66, "score": 79639.73810868598 }, { "content": " invocation.push_str(\"getObject(ptr)\");\n\n self.expose_get_object();\n\n }\n\n\n\n let mut extra = String::new();\n\n\n\n for (i, arg) in import.arguments.iter().enumerate() {\n\n if invocation.len() > 0 {\n\n invocation.push_str(\", \");\n\n }\n\n if i > 0 || is_method {\n\n dst.push_str(\", \");\n\n }\n\n match *arg {\n\n shared::TYPE_NUMBER => {\n\n invocation.push_str(&format!(\"arg{}\", i));\n\n dst.push_str(&format!(\"arg{}\", i));\n\n }\n\n shared::TYPE_BOOLEAN => {\n\n invocation.push_str(&format!(\"arg{} != 0\", i));\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 67, "score": 79639.38702890294 }, { "content": " custom => {\n\n let custom = (custom as u32) - shared::TYPE_CUSTOM_START;\n\n let s = &self.program.custom_type_names[custom as usize / 2];\n\n dst_ts.push_str(&format!(\": {}\", s));\n\n if self.config.debug {\n\n self.expose_assert_class();\n\n arg_conversions.push_str(&format!(\"\\\n\n _assertClass({arg}, {struct_});\n\n \", arg = name, struct_ = s));\n\n }\n\n arg_conversions.push_str(&format!(\"\\\n\n const ptr{i} = {arg}.ptr;\n\n {arg}.ptr = 0;\n\n \", i = i, arg = name));\n\n pass(&format!(\"ptr{}\", i));\n\n }\n\n }\n\n }\n\n dst.push_str(\")\");\n\n dst_ts.push_str(\")\");\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 68, "score": 79639.18830359145 }, { "content": " Some(shared::TYPE_STRING) => {\n\n self.expose_pass_string_to_wasm();\n\n if import.arguments.len() > 0 || is_method {\n\n dst.push_str(\", \");\n\n }\n\n dst.push_str(\"wasmretptr\");\n\n format!(\"\n\n const [retptr, retlen] = passStringToWasm({});\n\n (new Uint32Array(wasm.memory.buffer))[wasmretptr / 4] = retlen;\n\n return retptr;\n\n \", invoc)\n\n }\n\n None => invoc,\n\n _ => unimplemented!(),\n\n };\n\n dst.push_str(\") {\\n\");\n\n dst.push_str(&extra);\n\n dst.push_str(&format!(\"{}\\n}}\", invoc));\n\n\n\n self.globals.push_str(\"export \");\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 69, "score": 79638.47114933748 }, { "content": " self.expose_borrowed_objects();\n\n arg_conversions.push_str(&format!(\"\\\n\n const idx{i} = addBorrowedObject({arg});\n\n \", i = i, arg = name));\n\n destructors.push_str(\"stack.pop();\\n\");\n\n pass(&format!(\"idx{}\", i));\n\n }\n\n custom if (custom as u32) & shared::TYPE_CUSTOM_REF_FLAG != 0 => {\n\n let custom = ((custom as u32) & !shared::TYPE_CUSTOM_REF_FLAG) -\n\n shared::TYPE_CUSTOM_START;\n\n let s = &self.program.custom_type_names[custom as usize / 2];\n\n dst_ts.push_str(&format!(\": {}\", s));\n\n if self.config.debug {\n\n self.expose_assert_class();\n\n arg_conversions.push_str(&format!(\"\\\n\n _assertClass({arg}, {struct_});\n\n \", arg = name, struct_ = s));\n\n }\n\n pass(&format!(\"{}.ptr\", name));\n\n }\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 70, "score": 79638.35301707438 }, { "content": " };\n\n\n\n bind(\"__wbindgen_object_clone_ref\", &|me| {\n\n me.expose_add_heap_object();\n\n me.expose_get_object();\n\n let bump_cnt = if me.config.debug {\n\n String::from(\"\n\n if (typeof(val) === 'number')\n\n throw new Error('corrupt slab');\n\n val.cnt += 1;\n\n \")\n\n } else {\n\n String::from(\"val.cnt += 1;\")\n\n };\n\n format!(\"\n\n function(idx) {{\n\n // If this object is on the stack promote it to the heap.\n\n if ((idx & 1) === 1)\n\n return addHeapObject(getObject(idx));\n\n\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 71, "score": 79637.35939640623 }, { "content": " \", i = i, arg = name));\n\n pass(&format!(\"ptr{}\", i));\n\n pass(&format!(\"len{}\", i));\n\n if *arg == shared::TYPE_BORROWED_STR {\n\n destructors.push_str(&format!(\"\\n\\\n\n wasm.__wbindgen_free(ptr{i}, len{i});\\n\\\n\n \", i = i));\n\n self.required_internal_exports.insert(\"__wbindgen_free\");\n\n }\n\n }\n\n shared::TYPE_JS_OWNED => {\n\n dst_ts.push_str(\": any\");\n\n self.expose_add_heap_object();\n\n arg_conversions.push_str(&format!(\"\\\n\n const idx{i} = addHeapObject({arg});\n\n \", i = i, arg = name));\n\n pass(&format!(\"idx{}\", i));\n\n }\n\n shared::TYPE_JS_REF => {\n\n dst_ts.push_str(\": any\");\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 72, "score": 79636.5939985425 }, { "content": " passed = passed_args,\n\n convert_ret = convert_ret,\n\n ));\n\n } else {\n\n dst.push_str(&format!(\"\\\n\n try {{\n\n const ret = wasm.{f}({passed});\n\n {convert_ret}\n\n }} finally {{\n\n {destructors}\n\n }}\n\n \",\n\n f = wasm_name,\n\n passed = passed_args,\n\n destructors = destructors,\n\n convert_ret = convert_ret,\n\n ));\n\n }\n\n dst.push_str(\"}\");\n\n (format!(\"{} {}\", prefix, dst), format!(\"{} {}\", prefix, dst_ts))\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 73, "score": 79634.41220468475 }, { "content": " dst.push_str(&format!(\"arg{}\", i));\n\n }\n\n shared::TYPE_BORROWED_STR => {\n\n self.expose_get_string_from_wasm();\n\n invocation.push_str(&format!(\"getStringFromWasm(ptr{0}, len{0})\", i));\n\n dst.push_str(&format!(\"ptr{0}, len{0}\", i));\n\n }\n\n shared::TYPE_STRING => {\n\n self.expose_get_string_from_wasm();\n\n dst.push_str(&format!(\"ptr{0}, len{0}\", i));\n\n extra.push_str(&format!(\"\n\n let arg{0} = getStringFromWasm(ptr{0}, len{0});\n\n wasm.__wbindgen_free(ptr{0}, len{0});\n\n \", i));\n\n invocation.push_str(&format!(\"arg{}\", i));\n\n self.required_internal_exports.insert(\"__wbindgen_free\");\n\n }\n\n shared::TYPE_JS_OWNED => {\n\n self.expose_take_object();\n\n invocation.push_str(&format!(\"takeObject(arg{})\", i));\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 74, "score": 79632.34469744291 }, { "content": " console.log(ptr, len);\n\n if (ptr === 0) {{\n\n a = Symbol();\n\n }} else {{\n\n a = Symbol(getStringFromWasm(ptr, len));\n\n }}\n\n return addHeapObject(a);\n\n }}\")\n\n });\n\n\n\n bind(\"__wbindgen_is_symbol\", &|me| {\n\n me.expose_get_object();\n\n String::from(\"(i) => typeof(getObject(i)) == 'symbol' ? 1 : 0\")\n\n });\n\n\n\n bind(\"__wbindgen_throw\", &|me| {\n\n me.expose_get_string_from_wasm();\n\n format!(\"\n\n function(ptr, len) {{\n\n throw new Error(getStringFromWasm(ptr, len));\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 75, "score": 79632.28749921016 }, { "content": " }}\n\n \")\n\n });\n\n\n\n bind(\"__wbindgen_string_get\", &|me| {\n\n me.expose_pass_string_to_wasm();\n\n me.expose_get_object();\n\n String::from(\"(i, len_ptr) => {\n\n let obj = getObject(i);\n\n if (typeof(obj) !== 'string')\n\n return 0;\n\n const [ptr, len] = passStringToWasm(obj);\n\n (new Uint32Array(wasm.memory.buffer))[len_ptr / 4] = len;\n\n return ptr;\n\n }\")\n\n });\n\n }\n\n\n\n let js = format!(\"\n\n /* tslint:disable */\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 76, "score": 79632.28983606292 }, { "content": " bind(\"__wbindgen_number_new\", &|me| {\n\n me.expose_add_heap_object();\n\n String::from(\"addHeapObject\")\n\n });\n\n\n\n bind(\"__wbindgen_number_get\", &|me| {\n\n me.expose_get_object();\n\n format!(\"\n\n function(n, invalid) {{\n\n let obj = getObject(n);\n\n if (typeof(obj) === 'number')\n\n return obj;\n\n (new Uint8Array(wasm.memory.buffer))[invalid] = 1;\n\n return 0;\n\n }}\n\n \")\n\n });\n\n\n\n bind(\"__wbindgen_undefined_new\", &|me| {\n\n me.expose_add_heap_object();\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 77, "score": 79631.1771547146 }, { "content": " dst.push_str(&format!(\"\n\n constructor(ptr, sym) {{\n\n _checkToken(sym);\n\n this.ptr = ptr;\n\n }}\n\n \"));\n\n ts_dst.push_str(\"constructor(ptr: number, sym: Symbol);\\n\");\n\n } else {\n\n dst.push_str(&format!(\"\n\n constructor(ptr) {{\n\n this.ptr = ptr;\n\n }}\n\n \"));\n\n ts_dst.push_str(\"constructor(ptr: number);\\n\");\n\n }\n\n\n\n dst.push_str(&format!(\"\n\n free() {{\n\n const ptr = this.ptr;\n\n this.ptr = 0;\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 78, "score": 79630.74866244613 }, { "content": " String::from(\"() => addHeapObject(undefined)\")\n\n });\n\n\n\n bind(\"__wbindgen_null_new\", &|me| {\n\n me.expose_add_heap_object();\n\n String::from(\"() => addHeapObject(null)\")\n\n });\n\n\n\n bind(\"__wbindgen_is_null\", &|me| {\n\n me.expose_get_object();\n\n String::from(\"(idx) => getObject(idx) === null ? 1 : 0\")\n\n });\n\n\n\n bind(\"__wbindgen_is_undefined\", &|me| {\n\n me.expose_get_object();\n\n String::from(\"(idx) => getObject(idx) === undefined ? 1 : 0\")\n\n });\n\n\n\n bind(\"__wbindgen_boolean_new\", &|me| {\n\n me.expose_add_heap_object();\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 79, "score": 79630.57716145461 }, { "content": " String::from(\"(v) => addHeapObject(v == 1)\")\n\n });\n\n\n\n bind(\"__wbindgen_boolean_get\", &|me| {\n\n me.expose_get_object();\n\n String::from(\"(i) => {\n\n let v = getObject(i);\n\n if (typeof(v) == 'boolean') {\n\n return v ? 1 : 0;\n\n } else {\n\n return 2;\n\n }\n\n }\")\n\n });\n\n\n\n bind(\"__wbindgen_symbol_new\", &|me| {\n\n me.expose_get_string_from_wasm();\n\n me.expose_add_heap_object();\n\n format!(\"(ptr, len) => {{\n\n let a;\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 80, "score": 79630.16975561561 }, { "content": " // Otherwise if the object is on the heap just bump the\n\n // refcount and move on\n\n const val = slab[idx >> 1];\n\n {}\n\n return idx;\n\n }}\n\n \", bump_cnt)\n\n });\n\n\n\n bind(\"__wbindgen_object_drop_ref\", &|me| {\n\n me.expose_drop_ref();\n\n \"dropRef\".to_string()\n\n });\n\n\n\n bind(\"__wbindgen_string_new\", &|me| {\n\n me.expose_add_heap_object();\n\n me.expose_get_string_from_wasm();\n\n String::from(\"(p, l) => addHeapObject(getStringFromWasm(p, l))\")\n\n });\n\n\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 81, "score": 79629.86220443835 }, { "content": "fn term<'a>(cursor: syn::buffer::Cursor<'a>, name: &str)\n\n -> syn::synom::PResult<'a, ()>\n\n{\n\n if let Some((_span, term, next)) = cursor.term() {\n\n if term.as_str() == name {\n\n return Ok(((), next))\n\n }\n\n }\n\n syn::parse_error()\n\n}\n\n\n", "file_path": "crates/wasm-bindgen-macro/src/ast.rs", "rank": 82, "score": 76693.80846084433 }, { "content": "pub trait FromRefMutWasmBoundary: WasmBoundary {\n\n type RefAnchor: DerefMut<Target = Self>;\n\n\n\n unsafe fn from_js_ref_mut(js: Self::Js) -> Self::RefAnchor;\n\n}\n\n\n", "file_path": "src/convert.rs", "rank": 83, "score": 75785.6579223277 }, { "content": " function getObject(idx) {{\n\n if ((idx & 1) === 1) {{\n\n return stack[idx >> 1];\n\n }} else {{\n\n const val = slab[idx >> 1];\n\n {}\n\n }}\n\n }}\n\n \", get_obj));\n\n }\n\n\n\n fn expose_check_token(&mut self) {\n\n if !self.exposed_globals.insert(\"check_token\") {\n\n return\n\n }\n\n self.globals.push_str(&format!(\"\n\n const token = Symbol('foo');\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 84, "score": 72403.80674471882 }, { "content": " function dropRef(idx) {{\n\n {}\n\n\n\n let obj = slab[idx >> 1];\n\n {}\n\n\n\n // If we hit 0 then free up our space in the slab\n\n slab[idx >> 1] = slab_next;\n\n slab_next = idx >> 1;\n\n }}\n\n \", validate_owned, dec_ref));\n\n }\n\n\n\n fn expose_global_stack(&mut self) {\n\n if !self.exposed_globals.insert(\"stack\") {\n\n return\n\n }\n\n self.globals.push_str(&format!(\"\n\n let stack = [];\n\n \"));\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 85, "score": 72403.80674471882 }, { "content": " function _checkToken(sym) {{\n\n if (token !== sym)\n\n throw new Error('cannot invoke `new` directly');\n\n }}\n\n \"));\n\n }\n\n\n\n fn expose_assert_num(&mut self) {\n\n if !self.exposed_globals.insert(\"assert_num\") {\n\n return\n\n }\n\n self.globals.push_str(&format!(\"\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 86, "score": 72403.80674471882 }, { "content": " function takeObject(idx) {{\n\n const ret = getObject(idx);\n\n dropRef(idx);\n\n return ret;\n\n }}\n\n \"));\n\n }\n\n\n\n fn expose_add_heap_object(&mut self) {\n\n if !self.exposed_globals.insert(\"add_heap_object\") {\n\n return\n\n }\n\n self.expose_global_slab();\n\n self.expose_global_slab_next();\n\n let set_slab_next = if self.config.debug {\n\n String::from(\"\n\n if (typeof(next) !== 'number')\n\n throw new Error('corrupt slab');\n\n slab_next = next;\n\n \")\n\n } else {\n\n String::from(\"\n\n slab_next = next;\n\n \")\n\n };\n\n self.globals.push_str(&format!(\"\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 87, "score": 72403.80674471882 }, { "content": " function passStringToWasm(arg) {{\n\n if (typeof(arg) !== 'string')\n\n throw new Error('expected a string argument');\n\n const buf = new TextEncoder('utf-8').encode(arg);\n\n const len = buf.length;\n\n const ptr = wasm.__wbindgen_malloc(len);\n\n let array = new Uint8Array(wasm.memory.buffer);\n\n array.set(buf, ptr);\n\n return [ptr, len];\n\n }}\n\n \"));\n\n }\n\n }\n\n\n\n fn expose_get_string_from_wasm(&mut self) {\n\n if !self.exposed_globals.insert(\"get_string_from_wasm\") {\n\n return\n\n }\n\n if self.config.nodejs {\n\n self.globals.push_str(&format!(\"\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 88, "score": 70278.61715462961 }, { "content": " function addBorrowedObject(obj) {{\n\n stack.push(obj);\n\n return ((stack.length - 1) << 1) | 1;\n\n }}\n\n \"));\n\n }\n\n\n\n fn expose_take_object(&mut self) {\n\n if !self.exposed_globals.insert(\"take_object\") {\n\n return\n\n }\n\n self.expose_get_object();\n\n self.expose_drop_ref();\n\n self.globals.push_str(&format!(\"\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 89, "score": 70278.61715462961 }, { "content": " function addHeapObject(obj) {{\n\n if (slab_next == slab.length)\n\n slab.push(slab.length + 1);\n\n const idx = slab_next;\n\n const next = slab[idx];\n\n {}\n\n slab[idx] = {{ obj, cnt: 1 }};\n\n return idx << 1;\n\n }}\n\n \", set_slab_next));\n\n }\n\n}\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 90, "score": 70278.61715462961 }, { "content": " function _assertClass(instance, klass) {{\n\n if (!(instance instanceof klass))\n\n throw new Error(`expected instance of ${{klass.name}}`);\n\n return instance.ptr;\n\n }}\n\n \"));\n\n }\n\n\n\n fn expose_borrowed_objects(&mut self) {\n\n if !self.exposed_globals.insert(\"borrowed_objects\") {\n\n return\n\n }\n\n self.expose_global_stack();\n\n self.globals.push_str(&format!(\"\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 91, "score": 70278.61715462961 }, { "content": " function getStringFromWasm(ptr, len) {{\n\n const mem = new Uint8Array(wasm.memory.buffer);\n\n const slice = mem.slice(ptr, ptr + len);\n\n const ret = new TextDecoder('utf-8').decode(slice);\n\n return ret;\n\n }}\n\n \"));\n\n }\n\n }\n\n\n\n fn expose_assert_class(&mut self) {\n\n if !self.exposed_globals.insert(\"assert_class\") {\n\n return\n\n }\n\n self.globals.push_str(&format!(\"\n", "file_path": "crates/wasm-bindgen-cli-support/src/js.rs", "rank": 92, "score": 68274.6268565189 }, { "content": "pub trait WasmBoundary {\n\n type Js: WasmAbi;\n\n const DESCRIPTOR: char;\n\n\n\n fn into_js(self) -> Self::Js;\n\n unsafe fn from_js(js: Self::Js) -> Self;\n\n}\n\n\n", "file_path": "src/convert.rs", "rank": 93, "score": 53567.79883428689 }, { "content": " item.to_tokens(&mut ret);\n\n program.push_impl(i);\n\n }\n\n syn::Item::ForeignMod(ref f) => {\n\n program.push_foreign_mod(f);\n\n }\n\n _ => panic!(\"unexpected item in bindgen macro\"),\n\n }\n\n }\n\n\n\n // Generate wrappers for all the items that we've found\n\n\n\n for function in program.free_functions.iter() {\n\n bindgen_fn(function, &mut ret);\n\n }\n\n for (i, s) in program.structs.iter().enumerate() {\n\n bindgen_struct(i, s, &mut ret);\n\n }\n\n for i in program.imports.iter() {\n\n bindgen_import(i, &mut ret);\n", "file_path": "crates/wasm-bindgen-macro/src/lib.rs", "rank": 94, "score": 52426.194988539755 }, { "content": " module: module.clone(),\n\n function: import,\n\n });\n\n }\n\n }\n\n\n\n pub fn gen_foreign_item(&mut self,\n\n f: &syn::ForeignItem,\n\n allow_self: bool) -> (ImportFunction, bool) {\n\n let f = match *f {\n\n syn::ForeignItem::Fn(ref f) => f,\n\n _ => panic!(\"only foreign functions allowed for now, not statics\"),\n\n };\n\n\n\n let (wasm, mutable) = Function::from_decl(f.ident, &f.decl, allow_self);\n\n let is_method = match mutable {\n\n Some(false) => true,\n\n None => false,\n\n Some(true) => {\n\n panic!(\"mutable self methods not allowed in extern structs\");\n", "file_path": "crates/wasm-bindgen-macro/src/ast.rs", "rank": 95, "score": 52420.99108606361 }, { "content": " }\n\n\n\n Function::from_decl(input.ident, &input.decl, false).0\n\n }\n\n\n\n pub fn from_decl(name: syn::Ident,\n\n decl: &syn::FnDecl,\n\n allow_self: bool) -> (Function, Option<bool>) {\n\n if decl.variadic.is_some() {\n\n panic!(\"can't bindgen variadic functions\")\n\n }\n\n if decl.generics.params.len() > 0 {\n\n panic!(\"can't bindgen functions with lifetime or type parameters\")\n\n }\n\n\n\n let mut mutable = None;\n\n let arguments = decl.inputs.iter()\n\n .filter_map(|arg| {\n\n match *arg {\n\n syn::FnArg::Captured(ref c) => Some(c),\n", "file_path": "crates/wasm-bindgen-macro/src/ast.rs", "rank": 96, "score": 52420.646983827326 }, { "content": "use proc_macro2::Span;\n\nuse quote::{Tokens, ToTokens};\n\nuse shared;\n\nuse syn;\n\n\n\npub struct Program {\n\n pub structs: Vec<Struct>,\n\n pub free_functions: Vec<Function>,\n\n pub imports: Vec<Import>,\n\n pub imported_structs: Vec<ImportStruct>,\n\n}\n\n\n\npub struct Function {\n\n pub name: syn::Ident,\n\n pub arguments: Vec<Type>,\n\n pub ret: Option<Type>,\n\n}\n\n\n\npub struct Import {\n\n pub module: String,\n", "file_path": "crates/wasm-bindgen-macro/src/ast.rs", "rank": 97, "score": 52420.295243966175 } ]
Rust
blocky-net/src/chat/component.rs
JAD3N/minecraft
cfa74fa48ca9c1399eb02cd11332f86121e7bd85
use super::{Style, TextComponent, TranslatableComponent, TranslatableComponentArg}; use crate::{AsJson, FromJson}; use thiserror::Error; #[derive(Error, Debug)] pub enum ComponentError { #[error("failed to parse json")] Parse, } pub trait ComponentClone { fn clone_box(&self) -> Box<dyn Component>; } pub trait ComponentContent { fn contents(&self) -> &str { "" } } pub trait Component: mopa::Any + AsJson + ComponentContent + ComponentClone { fn siblings(&self) -> &Vec<Box<dyn Component>>; fn siblings_mut(&mut self) -> &mut Vec<Box<dyn Component>>; fn append(&mut self, component: Box<dyn Component>) { self.siblings_mut().push(component); } fn style(&self) -> &Style; fn style_mut(&mut self) -> &mut Style; fn set_style(&mut self, style: Style) { *self.style_mut() = style; } fn get_base_json(&self) -> serde_json::Value { let mut value = json!({}); let style = self.style().as_json(); if let serde_json::Value::Object(m) = style { for entry in m { value[entry.0] = entry.1; } } if !self.siblings().is_empty() { value["extra"] = self .siblings() .iter() .map(|sibling| sibling.as_json()) .collect::<serde_json::Value>() .into(); } value } } mopafy!(Component); impl Clone for Box<dyn Component> { fn clone(&self) -> Box<dyn Component> { self.clone_box() } } impl FromJson for Box<dyn Component> { type Err = ComponentError; fn from_json(value: &serde_json::Value) -> Result<Self, Self::Err> { if value.is_string() { Ok(Box::new(TextComponent::new(value.as_str().unwrap()))) } else if value.is_object() { let obj = value.as_object().unwrap(); let mut c: Box<dyn Component> = if let Some(text) = obj.get("text") { let text = text.as_str().ok_or(ComponentError::Parse)?; Box::new(TextComponent::new(text)) } else if let Some(translate) = obj.get("translate") { let translate = translate.as_str().ok_or(ComponentError::Parse)?; let mut args = vec![]; if let Some(with) = obj.get("with") { let with = with.as_array().ok_or(ComponentError::Parse)?; for arg in with { if let Ok(sub_c) = Self::from_json(arg) { if sub_c.is::<TextComponent>() && sub_c.style().is_empty() && sub_c.siblings().is_empty() { let text = sub_c.contents().into(); args.push(TranslatableComponentArg::String(text)); } else { args.push(TranslatableComponentArg::Component(sub_c)); } } } } Box::new(TranslatableComponent::new(translate, args)) } else { return Err(ComponentError::Parse); }; if obj.contains_key("extra") && obj["extra"].is_array() { for entry in value["extra"].as_array().unwrap() { c.append(Self::from_json(entry)?); } } if let Ok(style) = Style::from_json(value) { if !style.is_empty() { c.set_style(style); } } Ok(c) } else { Err(ComponentError::Parse) } } } #[macro_export] macro_rules! component { ($svis:vis struct $name:ident { $($fvis:vis $fname:ident: $fty:ty),* $(,)? }) => { #[derive(Clone)] $svis struct $name { siblings: Vec<Box<dyn Component>>, style: Style, $($fvis $fname: $fty),* } impl $crate::chat::Component for $name { fn siblings(&self) -> &Vec<Box<dyn Component>> { self.siblings.as_ref() } fn siblings_mut(&mut self) -> &mut Vec<Box<dyn Component>> { self.siblings.as_mut() } fn style(&self) -> &Style { &self.style } fn style_mut(&mut self) -> &mut Style { &mut self.style } } }; }
use super::{Style, TextComponent, TranslatableComponent, TranslatableComponentArg}; use crate::{AsJson, FromJson}; use thiserror::Error; #[derive(Error, Debug)] pub enum ComponentError { #[error("failed to parse json")] Parse, } pub trait ComponentClone { fn clone_box(&self) -> Box<dyn Component>; } pub trait ComponentContent { fn contents(&self) -> &str { "" } } pub trait Component: mopa::Any + AsJson + ComponentContent + ComponentClone { fn siblings(&self) -> &Vec<Box<dyn Component>>; fn siblings_mut(&mut self) -> &mut Vec<Box<dyn Component>>; fn append(&mut self, component: Box<dyn Component>) { self.siblings_mut().push(component); } fn style(&self) -> &Style; fn style_mut(&mut self) -> &mut Style; fn set_style(&mut self, style: Style) { *self.style_mut() = style; } fn get_base_json(&self) -> serde_json::Value { let mut value = json!({}); let style = self.style().as_json(); if let serde_json::Value::Object(m) = style { for entry in m { value[entry.0] = entry.1; } } if !self.siblings().is_empty() { value["extra"] = self .siblings() .iter() .map(|sibling| sibling.as_json()) .collect::<serde_json::Value>() .into(); } value } } mopafy!(Component); impl Clone for Box<dyn Component> { fn clone(&self) -> Box<dyn Component> { self.clone_box() } } impl FromJson for Box<dyn Component> { type Err = ComponentError;
} #[macro_export] macro_rules! component { ($svis:vis struct $name:ident { $($fvis:vis $fname:ident: $fty:ty),* $(,)? }) => { #[derive(Clone)] $svis struct $name { siblings: Vec<Box<dyn Component>>, style: Style, $($fvis $fname: $fty),* } impl $crate::chat::Component for $name { fn siblings(&self) -> &Vec<Box<dyn Component>> { self.siblings.as_ref() } fn siblings_mut(&mut self) -> &mut Vec<Box<dyn Component>> { self.siblings.as_mut() } fn style(&self) -> &Style { &self.style } fn style_mut(&mut self) -> &mut Style { &mut self.style } } }; }
fn from_json(value: &serde_json::Value) -> Result<Self, Self::Err> { if value.is_string() { Ok(Box::new(TextComponent::new(value.as_str().unwrap()))) } else if value.is_object() { let obj = value.as_object().unwrap(); let mut c: Box<dyn Component> = if let Some(text) = obj.get("text") { let text = text.as_str().ok_or(ComponentError::Parse)?; Box::new(TextComponent::new(text)) } else if let Some(translate) = obj.get("translate") { let translate = translate.as_str().ok_or(ComponentError::Parse)?; let mut args = vec![]; if let Some(with) = obj.get("with") { let with = with.as_array().ok_or(ComponentError::Parse)?; for arg in with { if let Ok(sub_c) = Self::from_json(arg) { if sub_c.is::<TextComponent>() && sub_c.style().is_empty() && sub_c.siblings().is_empty() { let text = sub_c.contents().into(); args.push(TranslatableComponentArg::String(text)); } else { args.push(TranslatableComponentArg::Component(sub_c)); } } } } Box::new(TranslatableComponent::new(translate, args)) } else { return Err(ComponentError::Parse); }; if obj.contains_key("extra") && obj["extra"].is_array() { for entry in value["extra"].as_array().unwrap() { c.append(Self::from_json(entry)?); } } if let Ok(style) = Style::from_json(value) { if !style.is_empty() { c.set_style(style); } } Ok(c) } else { Err(ComponentError::Parse) } }
function_block-function_prefix_line
[ { "content": "pub trait FromJson {\n\n type Err;\n\n fn from_json(value: &serde_json::Value) -> Result<Self, Self::Err> where Self: Sized;\n\n}\n", "file_path": "blocky-net/src/lib.rs", "rank": 2, "score": 121584.79989159183 }, { "content": "pub trait AsJson {\n\n fn as_json(&self) -> serde_json::Value;\n\n}\n\n\n", "file_path": "blocky-net/src/lib.rs", "rank": 3, "score": 121196.47854878686 }, { "content": "fn encode_nbt(buf: &mut BytesMut, name: &str, tag: &Tag) -> anyhow::Result<()> {\n\n let id = tag.kind().id();\n\n let name = name.as_bytes();\n\n let name_len = name.len();\n\n\n\n // reserve id and name\n\n buf.reserve(1 + name_len * 2);\n\n\n\n // add nbt id\n\n buf.put_u8(id);\n\n\n\n // add name length and contents\n\n buf.put_u16(name_len as u16);\n\n buf.put_slice(name);\n\n\n\n encode_tag(buf, &tag)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "blocky-nbt/src/encoder.rs", "rank": 5, "score": 94729.5878936133 }, { "content": "pub trait Index {\n\n fn index_into<'a>(&self, tag: &'a Tag) -> Option<&'a Tag>;\n\n fn index_into_mut<'a>(&self, tag: &'a mut Tag) -> Option<&'a mut Tag>;\n\n fn index_or_insert<'a>(&self, tag: &'a mut Tag) -> &'a mut Tag;\n\n}\n\n\n\nimpl Index for str {\n\n fn index_into<'a>(&self, tag: &'a Tag) -> Option<&'a Tag> {\n\n match tag {\n\n Tag::Compound(m) => m.get(self),\n\n _ => None,\n\n }\n\n }\n\n\n\n fn index_into_mut<'a>(&self, tag: &'a mut Tag) -> Option<&'a mut Tag> {\n\n match tag {\n\n Tag::Compound(m) => m.get_mut(self),\n\n _ => None,\n\n }\n\n }\n", "file_path": "blocky-nbt/src/tag/index.rs", "rank": 6, "score": 76262.35035830886 }, { "content": "fn quote_and_escape(s: &str) -> String {\n\n let mut builder = String::new();\n\n let mut quote_chr = None;\n\n\n\n for chr in s.chars() {\n\n if chr == '\\\\' {\n\n builder.push('\\\\');\n\n } else if chr == '\"' || chr == '\\'' {\n\n if quote_chr.is_none() {\n\n quote_chr = Some(if chr == '\"' { '\\'' } else { '\"' });\n\n }\n\n\n\n if quote_chr.is_some() && quote_chr.unwrap() == chr {\n\n builder.push('\\\\');\n\n }\n\n }\n\n\n\n builder.push(chr);\n\n }\n\n\n", "file_path": "blocky-nbt/src/tag/mod.rs", "rank": 7, "score": 74480.21153576169 }, { "content": "pub trait ProtocolLength {\n\n fn len(&self) -> usize;\n\n}\n\n\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 8, "score": 74477.0365211668 }, { "content": "fn encode_tag(buf: &mut BytesMut, tag: &Tag) -> anyhow::Result<()> {\n\n let kind = tag.kind();\n\n\n\n match tag {\n\n Tag::End => panic!(\"cannot encode null\"),\n\n\n\n Tag::Byte(value) => {\n\n buf.reserve(1);\n\n buf.put_i8(*value);\n\n },\n\n Tag::Short(value) => {\n\n buf.reserve(2);\n\n buf.put_i16(*value);\n\n },\n\n Tag::Int(value) => {\n\n buf.reserve(4);\n\n buf.put_i32(*value);\n\n },\n\n Tag::Long(value) => {\n\n buf.reserve(8);\n", "file_path": "blocky-nbt/src/encoder.rs", "rank": 9, "score": 69693.21497058134 }, { "content": "pub trait ProtocolRead: ProtocolLength {\n\n fn read<U: Buf>(_src: &mut U) -> Result<Self, ProtocolError> where Self: Sized {\n\n unimplemented!(\"protocol read not implemented\");\n\n }\n\n}\n\n\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 10, "score": 69334.31580462908 }, { "content": "pub trait ProtocolWrite: ProtocolLength {\n\n fn write<U: BufMut>(&self, _dst: &mut U) -> Result<(), ProtocolError> where Self: Sized {\n\n unimplemented!(\"protocol write not implemented\");\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! protocol_struct {\n\n ($name:ident { $($fname:ident: $fty:ty),* $(,)? }) => {\n\n pub struct $name {\n\n $(pub $fname: $fty),*\n\n }\n\n\n\n impl $crate::protocol::ProtocolLength for $name {\n\n fn len(&self) -> usize {\n\n 0 $(+ self.$fname.len())*\n\n }\n\n }\n\n\n\n impl $crate::protocol::ProtocolRead for $name {\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 11, "score": 69334.31580462908 }, { "content": "fn decode_nbt(buf: &mut Bytes) -> anyhow::Result<Nbt> {\n\n let id = buf.get_u8();\n\n\n\n let name_len = if id != 0 {\n\n buf.get_u16() as usize\n\n } else {\n\n 0\n\n };\n\n\n\n let name = if name_len > 0 {\n\n let s = match str::from_utf8(&buf.bytes()[0..name_len]) {\n\n Ok(s) => s.to_owned(),\n\n Err(_) => anyhow::bail!(\"failed to decode name\"),\n\n };\n\n\n\n // skip name string bytes\n\n buf.advance(name_len);\n\n\n\n // return name\n\n s\n", "file_path": "blocky-nbt/src/decoder.rs", "rank": 12, "score": 64816.99240977595 }, { "content": "fn decode_tag(buf: &mut Bytes, id: u8) -> anyhow::Result<Tag> {\n\n match id {\n\n 0 => Ok(Tag::End),\n\n\n\n 1 => Ok(Tag::Byte(buf.get_i8())),\n\n 2 => Ok(Tag::Short(buf.get_i16())),\n\n 3 => Ok(Tag::Int(buf.get_i32())),\n\n 4 => Ok(Tag::Long(buf.get_i64())),\n\n\n\n 5 => Ok(Tag::Float(buf.get_f32())),\n\n 6 => Ok(Tag::Double(buf.get_f64())),\n\n\n\n 8 => {\n\n let len = buf.get_i16() as usize;\n\n let s = match str::from_utf8(&buf.bytes()[0..len]) {\n\n Ok(s) => s.to_owned(),\n\n Err(_) => anyhow::bail!(\"failed to decode string\"),\n\n };\n\n\n\n // skip string bytes\n", "file_path": "blocky-nbt/src/decoder.rs", "rank": 13, "score": 60090.38057968695 }, { "content": "fn main() {\n\n let matches = clap_app!(blocky =>\n\n (version: crate_version!())\n\n (author: crate_authors!())\n\n (about: crate_description!())\n\n\n\n (@arg initSettings: --initSettings \"Initializes 'server.properties' then quits\")\n\n (@arg demo: --demo)\n\n (@arg bonusChest: --bonusChest)\n\n (@arg forceUpgrade: --forceUpgrade)\n\n (@arg eraseCache: --eraseCache)\n\n (@arg safeMode: --safeMode \"Loads level with vanilla datapack only\")\n\n\n\n (@arg singleplayer: --singleplayer +takes_value)\n\n (@arg universe: --universe +takes_value)\n\n (@arg world: --world +takes_value)\n\n (@arg port: --port +takes_value)\n\n (@arg serverId: --serverId +takes_value)\n\n ).get_matches();\n\n\n\n let test = Test {\n\n text: TextComponent::new(\"this is a fun test!\"),\n\n };\n\n}", "file_path": "blocky/src/main.rs", "rank": 14, "score": 44696.12509063697 }, { "content": "trait Test: Sized {\n\n fn test(&self) -> Vec<Box<Self>>;\n\n}\n\n\n\npub mod chat;\n\n#[macro_use]\n\npub mod protocol;\n\n\n", "file_path": "blocky-net/src/lib.rs", "rank": 15, "score": 38733.05523860082 }, { "content": "use std::str::FromStr;\n\nuse blocky_core::ResourceLocation;\n\nuse thiserror::Error;\n\nuse crate::{AsJson, FromJson};\n\nuse super::{ClickEvent, TextColor};\n\n\n\n#[derive(Error, Debug)]\n\npub enum StyleError {\n\n #[error(\"invalid type for: {0}\")]\n\n InvalidType(String),\n\n #[error(\"invalid color: {0}\")]\n\n InvalidColor(String),\n\n #[error(\"failed to parse style\")]\n\n Parse,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Style {\n\n pub color: Option<TextColor>,\n\n pub bold: Option<bool>,\n", "file_path": "blocky-net/src/chat/style.rs", "rank": 16, "score": 27808.803075030417 }, { "content": " obj[\"clickEvent\"] = click_event.as_json();\n\n }\n\n\n\n if let Some(insertion) = &self.insertion {\n\n obj[\"insertion\"] = insertion.clone().into();\n\n }\n\n\n\n if let Some(font) = &self.font {\n\n obj[\"font\"] = font.to_string().into();\n\n }\n\n\n\n obj\n\n }\n\n }\n\n}\n\n\n\nimpl FromJson for Style {\n\n type Err = StyleError;\n\n\n\n fn from_json(value: &serde_json::Value) -> Result<Self, Self::Err> {\n", "file_path": "blocky-net/src/chat/style.rs", "rank": 17, "score": 27808.195833262427 }, { "content": " let mut style = Style::default();\n\n\n\n if let Some(color) = value.get(\"color\") {\n\n let color = color\n\n .as_str()\n\n .ok_or(StyleError::InvalidType(String::from(\"color\")))?;\n\n\n\n if let Some(color) = TextColor::parse(color) {\n\n style.color = Some(color);\n\n } else {\n\n return Err(StyleError::InvalidColor(color.into()));\n\n }\n\n }\n\n\n\n if let Some(bold) = value.get(\"bold\") {\n\n let bold = bold\n\n .as_bool()\n\n .ok_or(StyleError::InvalidType(String::from(\"bold\")))?;\n\n\n\n style.bold = Some(bold);\n", "file_path": "blocky-net/src/chat/style.rs", "rank": 18, "score": 27803.303495737688 }, { "content": "\n\n if let Some(insertion) = value.get(\"insertion\") {\n\n let insertion = insertion\n\n .as_str()\n\n .ok_or(StyleError::InvalidType(String::from(\"insertion\")))?;\n\n\n\n style.insertion = Some(insertion.into());\n\n }\n\n\n\n if let Some(font) = value.get(\"font\") {\n\n let font = font\n\n .as_str()\n\n .ok_or(StyleError::InvalidType(String::from(\"font\")))?;\n\n\n\n let loc = ResourceLocation::from_str(font)\n\n .or(Err(StyleError::Parse))?;\n\n\n\n style.font = Some(loc);\n\n }\n\n\n\n Ok(style)\n\n }\n\n}\n", "file_path": "blocky-net/src/chat/style.rs", "rank": 19, "score": 27803.273049168973 }, { "content": " .as_bool()\n\n .ok_or(StyleError::InvalidType(String::from(\"strikethrough\")))?;\n\n\n\n style.strikethrough = Some(strikethrough);\n\n }\n\n\n\n if let Some(obfuscated) = value.get(\"obfuscated\") {\n\n let obfuscated = obfuscated\n\n .as_bool()\n\n .ok_or(StyleError::InvalidType(String::from(\"obfuscated\")))?;\n\n\n\n style.obfuscated = Some(obfuscated);\n\n }\n\n\n\n if let Some(click_event) = value.get(\"clickEvent\") {\n\n let click_event = ClickEvent::from_json(click_event)\n\n .or(Err(StyleError::Parse))?;\n\n\n\n style.click_event = Some(click_event);\n\n }\n", "file_path": "blocky-net/src/chat/style.rs", "rank": 20, "score": 27801.576594192564 }, { "content": "}\n\n\n\nimpl AsJson for Style {\n\n fn as_json(&self) -> serde_json::Value {\n\n if self.is_empty() {\n\n serde_json::Value::Null\n\n } else {\n\n let mut obj = json!({});\n\n\n\n // only set color if non-null\n\n if let Some(color) = &self.color {\n\n obj[\"color\"] = color.to_string().into();\n\n }\n\n\n\n // only set bold if non-null\n\n if let Some(bold) = self.bold {\n\n obj[\"bold\"] = bold.into();\n\n }\n\n\n\n // only set italic if non-null\n", "file_path": "blocky-net/src/chat/style.rs", "rank": 21, "score": 27801.358508587597 }, { "content": " }\n\n\n\n if let Some(italic) = value.get(\"italic\") {\n\n let italic = italic\n\n .as_bool()\n\n .ok_or(StyleError::InvalidType(String::from(\"italic\")))?;\n\n\n\n style.italic = Some(italic);\n\n }\n\n\n\n if let Some(underlined) = value.get(\"underlined\") {\n\n let underlined = underlined\n\n .as_bool()\n\n .ok_or(StyleError::InvalidType(String::from(\"underlined\")))?;\n\n\n\n style.underlined = Some(underlined);\n\n }\n\n\n\n if let Some(strikethrough) = value.get(\"strikethrough\") {\n\n let strikethrough = strikethrough\n", "file_path": "blocky-net/src/chat/style.rs", "rank": 22, "score": 27798.81566934958 }, { "content": " pub italic: Option<bool>,\n\n pub underlined: Option<bool>,\n\n pub strikethrough: Option<bool>,\n\n pub obfuscated: Option<bool>,\n\n pub click_event: Option<ClickEvent>,\n\n // TODO: pub hover_event: Option<HoverEvent>,\n\n pub insertion: Option<String>,\n\n pub font: Option<ResourceLocation>,\n\n}\n\n\n\nimpl Default for Style {\n\n fn default() -> Self {\n\n Self {\n\n color: None,\n\n bold: None,\n\n italic: None,\n\n underlined: None,\n\n strikethrough: None,\n\n obfuscated: None,\n\n click_event: None,\n", "file_path": "blocky-net/src/chat/style.rs", "rank": 23, "score": 27798.315584363078 }, { "content": " // hover_event: None,\n\n insertion: None,\n\n font: None,\n\n }\n\n }\n\n}\n\n\n\nimpl Style {\n\n pub fn is_empty(&self) -> bool {\n\n self.color.is_none()\n\n && self.bold.is_none()\n\n && self.italic.is_none()\n\n && self.underlined.is_none()\n\n && self.strikethrough.is_none()\n\n && self.obfuscated.is_none()\n\n && self.click_event.is_none()\n\n // && self.hover_event.is_none()\n\n && self.insertion.is_none()\n\n && self.font.is_none()\n\n }\n", "file_path": "blocky-net/src/chat/style.rs", "rank": 24, "score": 27795.482521605445 }, { "content": " if let Some(italic) = self.italic {\n\n obj[\"italic\"] = italic.into();\n\n }\n\n\n\n // only set underlined if non-null\n\n if let Some(underlined) = self.underlined {\n\n obj[\"underlined\"] = underlined.into();\n\n }\n\n\n\n // only set strikethrough if non-null\n\n if let Some(strikethrough) = self.strikethrough {\n\n obj[\"strikethrough\"] = strikethrough.into();\n\n }\n\n\n\n // only set obfuscated if non-null\n\n if let Some(obfuscated) = self.obfuscated {\n\n obj[\"obfuscated\"] = obfuscated.into();\n\n }\n\n\n\n if let Some(click_event) = &self.click_event {\n", "file_path": "blocky-net/src/chat/style.rs", "rank": 25, "score": 27789.61765883695 }, { "content": "use crate::AsJson;\n\nuse super::{\n\n Component,\n\n ComponentClone,\n\n ComponentContent,\n\n Style,\n\n};\n\n\n\n#[derive(Clone)]\n\npub enum TranslatableComponentArg {\n\n String(String),\n\n Component(Box<dyn Component>),\n\n}\n\n\n\nimpl AsJson for TranslatableComponentArg {\n\n fn as_json(&self) -> serde_json::Value {\n\n match self {\n\n Self::String(s) => s.clone().into(),\n\n Self::Component(c) => c.as_json(),\n\n }\n", "file_path": "blocky-net/src/chat/translatable_component.rs", "rank": 33, "score": 26464.37755719399 }, { "content": "use crate::AsJson;\n\nuse super::{\n\n Component,\n\n ComponentClone,\n\n ComponentContent,\n\n Style,\n\n};\n\n\n\ncomponent!(pub struct TextComponent {\n\n text: String,\n\n});\n\n\n\nimpl TextComponent {\n\n pub fn new<S: Into<String>>(text: S) -> Self {\n\n Self {\n\n siblings: vec![],\n\n style: Style::default(),\n\n text: text.into(),\n\n }\n\n }\n", "file_path": "blocky-net/src/chat/text_component.rs", "rank": 34, "score": 26464.19403130457 }, { "content": " }\n\n\n\n pub fn set_args(&mut self, args: Vec<TranslatableComponentArg>) {\n\n self.args = args;\n\n }\n\n}\n\n\n\nimpl AsJson for TranslatableComponent {\n\n fn as_json(&self) -> serde_json::Value {\n\n let mut value = self.get_base_json();\n\n value[\"translate\"] = self.key.clone().into();\n\n value[\"with\"] = self.args\n\n .iter()\n\n .map(|arg| arg.as_json())\n\n .collect::<serde_json::Value>()\n\n .into();\n\n value\n\n }\n\n}\n\n\n", "file_path": "blocky-net/src/chat/translatable_component.rs", "rank": 35, "score": 26460.758390528128 }, { "content": "}\n\n\n\nimpl ComponentContent for TextComponent {\n\n fn contents(&self) -> &str {\n\n &self.text\n\n }\n\n}\n\n\n\nimpl ComponentClone for TextComponent {\n\n fn clone_box(&self) -> Box<dyn Component> {\n\n Box::new(Self {\n\n siblings: vec![],\n\n style: Style::default(),\n\n text: self.text.clone(),\n\n })\n\n }\n\n}\n", "file_path": "blocky-net/src/chat/text_component.rs", "rank": 36, "score": 26460.393502383195 }, { "content": "\n\n pub fn text(&self) -> &String {\n\n &self.text\n\n }\n\n\n\n pub fn text_mut(&mut self) -> &mut String {\n\n &mut self.text\n\n }\n\n\n\n pub fn set_text<S: Into<String>>(&mut self, text: S) {\n\n self.text = text.into();\n\n }\n\n}\n\n\n\nimpl AsJson for TextComponent {\n\n fn as_json(&self) -> serde_json::Value {\n\n let mut value = self.get_base_json();\n\n value[\"text\"] = self.text.clone().into();\n\n value\n\n }\n", "file_path": "blocky-net/src/chat/text_component.rs", "rank": 37, "score": 26459.443636192518 }, { "content": "impl ComponentContent for TranslatableComponent {}\n\n\n\nimpl ComponentClone for TranslatableComponent {\n\n fn clone_box(&self) -> Box<dyn Component> {\n\n Box::new(Self {\n\n siblings: vec![],\n\n style: Style::default(),\n\n key: self.key.clone(),\n\n args: self.args.clone(),\n\n })\n\n }\n\n}\n", "file_path": "blocky-net/src/chat/translatable_component.rs", "rank": 38, "score": 26458.47733646843 }, { "content": " }\n\n}\n\n\n\ncomponent!(pub struct TranslatableComponent {\n\n key: String,\n\n args: Vec<TranslatableComponentArg>,\n\n});\n\n\n\nimpl TranslatableComponent {\n\n pub fn new<S: Into<String>>(key: S, args: Vec<TranslatableComponentArg>) -> Self {\n\n Self {\n\n siblings: vec![],\n\n style: Style::default(),\n\n key: key.into(),\n\n args,\n\n }\n\n }\n\n\n\n pub fn new_with_empty_args<S: Into<String>>(key: S) -> Self {\n\n Self::new(key, vec![])\n", "file_path": "blocky-net/src/chat/translatable_component.rs", "rank": 39, "score": 26456.24711921423 }, { "content": " }\n\n\n\n pub fn key(&self) -> &String {\n\n &self.key\n\n }\n\n\n\n pub fn key_mut(&mut self) -> &mut String {\n\n &mut self.key\n\n }\n\n\n\n pub fn set_key<S: Into<String>>(&mut self, key: S) {\n\n self.key = key.into();\n\n }\n\n\n\n pub fn args(&self) -> &Vec<TranslatableComponentArg> {\n\n &self.args\n\n }\n\n\n\n pub fn args_mut(&mut self) -> &mut Vec<TranslatableComponentArg> {\n\n &mut self.args\n", "file_path": "blocky-net/src/chat/translatable_component.rs", "rank": 40, "score": 26450.602949066324 }, { "content": "}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct ClickEvent {\n\n action: ClickAction,\n\n value: String,\n\n}\n\n\n\nimpl ClickEvent {\n\n pub fn new<S: Into<String>>(action: ClickAction, value: S) -> Self {\n\n let value = value.into();\n\n Self { action, value }\n\n }\n\n\n\n pub fn action(&self) -> &ClickAction {\n\n &self.action\n\n }\n\n\n\n pub fn value(&self) -> &str {\n\n &self.value\n", "file_path": "blocky-net/src/chat/click_event.rs", "rank": 41, "score": 16.911195185594607 }, { "content": "use std::str::FromStr;\n\nuse thiserror::Error;\n\nuse crate::{AsJson, FromJson};\n\n\n\n#[derive(Error, Debug)]\n\npub enum ClickActionError {\n\n #[error(\"invalid click action: {0}\")]\n\n Invalid(String),\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum ClickAction {\n\n OpenUrl,\n\n OpenFile,\n\n RunCommand,\n\n SuggestCommand,\n\n ChangePage,\n\n CopyToClipboard,\n\n}\n\n\n", "file_path": "blocky-net/src/chat/click_event.rs", "rank": 42, "score": 15.282482104179364 }, { "content": "use thiserror::Error;\n\nuse bytes::{Buf, BufMut};\n\nuse crate::FromJson;\n\nuse crate::chat::Component;\n\n\n\n#[derive(Error, Debug)]\n\npub enum ProtocolError {\n\n #[error(\"not enough bytes remaining\")]\n\n NotEnoughBytes,\n\n\n\n #[error(\"invalid value\")]\n\n Invalid,\n\n\n\n #[error(\"value is too large\")]\n\n TooLarge,\n\n\n\n #[error(\"unknown protocol error\")]\n\n Unknown,\n\n}\n\n\n\n\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 43, "score": 15.231413999356116 }, { "content": "impl ProtocolRead for bool {\n\n fn read<U: Buf>(src: &mut U) -> Result<Self, ProtocolError> {\n\n let value = <u8>::read(src)?;\n\n\n\n if value > 1 {\n\n Err(ProtocolError::Invalid)\n\n } else {\n\n Ok(value == 1)\n\n }\n\n }\n\n}\n\n\n\nimpl ProtocolWrite for bool {\n\n fn write<U: BufMut>(&self, dst: &mut U) -> Result<(), ProtocolError> {\n\n <u8>::write(&(*self as u8), dst)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Var<T>(pub T);\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 44, "score": 15.081281641363764 }, { "content": " }\n\n}\n\n\n\nimpl AsJson for ClickEvent {\n\n fn as_json(&self) -> serde_json::Value {\n\n json!({\n\n \"action\": self.action.name(),\n\n \"value\": self.value,\n\n })\n\n }\n\n}\n\n\n\nimpl FromJson for ClickEvent {\n\n type Err = ClickEventError;\n\n\n\n fn from_json(value: &serde_json::Value) -> Result<Self, Self::Err> {\n\n if value.is_object() {\n\n let map = value.as_object().unwrap();\n\n\n\n if map.contains_key(\"action\") && map.contains_key(\"value\") {\n", "file_path": "blocky-net/src/chat/click_event.rs", "rank": 45, "score": 14.919576200951637 }, { "content": "#[macro_use]\n\nextern crate lazy_static;\n\n\n\nmod decoder;\n\nmod encoder;\n\n#[macro_use]\n\nmod tag;\n\n\n\npub use tag::*;\n\n\n\nuse std::str;\n\nuse std::ops;\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Nbt {\n\n pub name: String,\n\n pub tag: Tag,\n\n}\n\n\n\nimpl Nbt {\n", "file_path": "blocky-nbt/src/lib.rs", "rank": 46, "score": 14.635944162114619 }, { "content": "impl FromStr for ClickAction {\n\n type Err = ClickActionError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"open_url\" => Ok(Self::OpenUrl),\n\n \"open_file\" => Ok(Self::OpenFile),\n\n \"run_command\" => Ok(Self::RunCommand),\n\n \"suggest_command\" => Ok(Self::SuggestCommand),\n\n \"change_page\" => Ok(Self::ChangePage),\n\n \"copy_to_clipboard\" => Ok(Self::CopyToClipboard),\n\n _ => Err(ClickActionError::Invalid(s.into())),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Error, Debug)]\n\npub enum ClickEventError {\n\n #[error(\"failed to parse json into click event\")]\n\n Parse,\n", "file_path": "blocky-net/src/chat/click_event.rs", "rank": 47, "score": 14.104201914363188 }, { "content": "#[macro_use]\n\nmod component;\n\nmod text_component;\n\nmod translatable_component;\n\nmod style;\n\nmod text_color;\n\nmod click_event;\n\n\n\npub use component::*;\n\npub use text_component::*;\n\npub use translatable_component::*;\n\npub use style::*;\n\npub use text_color::*;\n\npub use click_event::*;", "file_path": "blocky-net/src/chat/mod.rs", "rank": 48, "score": 13.9736086525216 }, { "content": " m\n\n };\n\n}\n\n\n\nimpl TextColor {\n\n pub fn new(value: u32, name: Option<String>) -> Self {\n\n Self { value, name }\n\n }\n\n\n\n pub fn from_color(value: u32) -> Self {\n\n Self::new(value, None)\n\n }\n\n\n\n pub fn format_value(&self) -> String {\n\n format!(\"#{:06X}\", self.value)\n\n }\n\n\n\n pub fn parse(s: &str) -> Option<TextColor> {\n\n if s.starts_with(\"#\") {\n\n // try hex parse string into u32\n", "file_path": "blocky-net/src/chat/text_color.rs", "rank": 49, "score": 13.5397512421011 }, { "content": "\n\nimpl fmt::Display for ResourceLocation {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}:{}\", self.namespace, self.path)\n\n }\n\n}\n\n\n\nimpl FromStr for ResourceLocation {\n\n type Err = ResourceLocationError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let (namespace, path) = match s.find('c') {\n\n Some(i) => s.split_at(i),\n\n None => (\"minecraft\", s),\n\n };\n\n\n\n Self::new(namespace, path)\n\n }\n\n}\n", "file_path": "blocky-core/src/resource_location.rs", "rank": 50, "score": 13.095667799169858 }, { "content": "use std::fmt;\n\nuse std::collections::HashMap;\n\nuse regex::Regex;\n\n\n\n#[derive(PartialEq, Debug)]\n\npub struct ChatFormatting {\n\n name: &'static str,\n\n code: char,\n\n is_format: bool,\n\n id: i32,\n\n color: Option<u32>,\n\n}\n\n\n\nlazy_static! {\n\n static ref NAME_REGEX: Regex = Regex::new(\"[^a-z]\").unwrap();\n\n static ref FORMATTING_BY_NAME: HashMap<String, &'static ChatFormatting> = {\n\n let mut m = HashMap::new();\n\n // iterate values and add into lookup\n\n for &value in ChatFormatting::VALUES.iter() {\n\n let name = ChatFormatting::clean_name(value.name());\n", "file_path": "blocky-core/src/chat_formatting.rs", "rank": 51, "score": 13.05361973107606 }, { "content": "pub type VarInt = Var<i32>;\n\npub type VarLong = Var<i64>;\n\n\n\nimpl<T: Into<i32>> From<T> for Var<i32> {\n\n fn from(value: T) -> Self {\n\n Var(value.into())\n\n }\n\n}\n\n\n\nimpl<T: Into<i64>> From<T> for Var<i64> {\n\n fn from(value: T) -> Self {\n\n Var(value.into())\n\n }\n\n}\n\n\n\nimpl ProtocolLength for Var<i32> {\n\n fn len(&self) -> usize {\n\n let mut value = self.0;\n\n\n\n for i in 1..5 {\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 52, "score": 12.993077878852862 }, { "content": "impl ResourceLocation {\n\n pub fn new<S: Into<String>>(namespace: S, path: S) -> Result<Self, ResourceLocationError> {\n\n let mut namespace = namespace.into();\n\n let path = path.into();\n\n\n\n if namespace.is_empty() {\n\n namespace.push_str(\"minecraft\");\n\n }\n\n\n\n if !Self::is_valid_namespace(&namespace) {\n\n Err(ResourceLocationError::InvalidNamespace)\n\n } else if !Self::is_valid_path(&path) {\n\n Err(ResourceLocationError::InvalidPath)\n\n } else {\n\n Ok(Self { namespace, path })\n\n }\n\n }\n\n\n\n pub fn namespace(&self) -> &str {\n\n self.namespace.as_ref()\n", "file_path": "blocky-core/src/resource_location.rs", "rank": 53, "score": 12.855797154188885 }, { "content": " pub fn new(name: String, tag: Tag) -> Self {\n\n Self { name, tag }\n\n }\n\n\n\n pub fn get<I: Index>(&self, index: I) -> Option<&Tag> {\n\n self.tag.get(index)\n\n }\n\n\n\n pub fn get_mut<I: Index>(&mut self, index: I) -> Option<&mut Tag> {\n\n self.tag.get_mut(index)\n\n }\n\n\n\n pub fn insert<I: Index>(&mut self, index: I, value: Tag) {\n\n self.tag.insert(index, value);\n\n }\n\n}\n\n\n\nimpl<I: Index> ops::Index<I> for Tag {\n\n type Output = Self;\n\n\n", "file_path": "blocky-nbt/src/lib.rs", "rank": 54, "score": 12.840496583486715 }, { "content": "\n\nimpl<T: Component> ProtocolLength for T {\n\n fn len(&self) -> usize {\n\n let json = self.as_json().to_string();\n\n let len = String::len(&json);\n\n let len_var: Var<i32> = (len as i32).into();\n\n\n\n len_var.len() + len\n\n }\n\n}\n\n\n\nimpl<T: Component> ProtocolRead for T {\n\n fn read<U: Buf>(src: &mut U) -> Result<Self, ProtocolError> {\n\n let c = Box::<dyn Component>::read(src)?;\n\n\n\n if let Ok(c) = c.downcast() {\n\n Ok(*c)\n\n } else {\n\n Err(ProtocolError::Invalid)\n\n }\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 55, "score": 12.745960912776907 }, { "content": " if len > 32767 {\n\n Err(ProtocolError::TooLarge)\n\n } else {\n\n let mut bytes = vec![0u8; len];\n\n\n\n // copy bytes to vec\n\n src.copy_to_slice(&mut bytes);\n\n\n\n // convert bytes to string\n\n let s = String::from_utf8(bytes)\n\n .map_err(|_| ProtocolError::Invalid)?;\n\n\n\n // try convert json into boxed component\n\n Box::<dyn Component>::from_json(&s.into())\n\n .map_err(|_| ProtocolError::Invalid)\n\n }\n\n }\n\n}\n\n\n\nimpl ProtocolWrite for Box<dyn Component> {\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 56, "score": 12.40820725235727 }, { "content": " }\n\n}\n\n\n\nimpl<T: Component> ProtocolWrite for T {\n\n fn write<U: BufMut>(&self, dst: &mut U) -> Result<(), ProtocolError> {\n\n let len = Self::len(self);\n\n let len_var: Var<i32> = (len as i32).into();\n\n\n\n if len > 32767 {\n\n Err(ProtocolError::TooLarge)\n\n } else {\n\n <Var<i32>>::write(&len_var, dst)?;\n\n\n\n // get json data\n\n let json = self.as_json().to_string();\n\n\n\n for &byte in json.as_bytes() {\n\n dst.put_u8(byte);\n\n }\n\n\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\nprotocol_struct!(Test {\n\n text: crate::chat::TextComponent,\n\n});", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 57, "score": 12.079151929352303 }, { "content": "use std::fmt;\n\nuse std::str::FromStr;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum ResourceLocationError {\n\n #[error(\"non [a-z0-9_.-] character in namespace of location\")]\n\n Parse,\n\n #[error(\"non [a-z0-9_.-] character in namespace of location\")]\n\n InvalidNamespace,\n\n #[error(\"non [a-z0-9/._-] character in path of location\")]\n\n InvalidPath,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct ResourceLocation {\n\n namespace: String,\n\n path: String,\n\n}\n\n\n", "file_path": "blocky-core/src/resource_location.rs", "rank": 58, "score": 11.88245577118299 }, { "content": "use std::fmt;\n\nuse std::collections::HashMap;\n\nuse blocky_core::ChatFormatting;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct TextColor {\n\n value: u32,\n\n name: Option<String>,\n\n}\n\n\n\nlazy_static! {\n\n static ref NAMED_COLORS: HashMap<String, TextColor> = {\n\n let mut m = HashMap::new();\n\n\n\n for &format in &ChatFormatting::VALUES {\n\n if format.is_color() {\n\n m.insert(format.name().to_string(), format.into());\n\n }\n\n }\n\n\n", "file_path": "blocky-net/src/chat/text_color.rs", "rank": 59, "score": 11.670078370109696 }, { "content": "#[macro_use]\n\nextern crate clap;\n\n\n\nuse java_props::Properties;\n\nuse blocky::net::{AsJson, FromJson};\n\nuse blocky::net::protocol::{ProtocolWrite, Test};\n\nuse blocky::net::chat::{Style, ClickEvent, ClickAction, TextColor, TextComponent, Component};\n\n\n", "file_path": "blocky/src/main.rs", "rank": 60, "score": 11.451480433547218 }, { "content": "}\n\n\n\nimpl ops::IndexMut<&str> for Nbt {\n\n fn index_mut(&mut self, index: &str) -> &mut Self::Output {\n\n &mut self.tag[index]\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{Kind, Nbt, Tag, tag};\n\n use std::io::Cursor;\n\n\n\n fn nbt(data: &[u8]) -> Nbt {\n\n let data = data.to_vec();\n\n let mut data = Cursor::new(data);\n\n\n\n // decode nbt from data\n\n let nbt = Nbt::decode(&mut data);\n\n\n", "file_path": "blocky-nbt/src/lib.rs", "rank": 61, "score": 11.422452296495898 }, { "content": " v.get(0)\n\n .map(|tag| tag.kind().id())\n\n .unwrap_or(Kind::End.id()),\n\n ),\n\n Self::Compound(_) => Kind::Compound,\n\n Self::IntArray(_) => Kind::IntArray,\n\n Self::LongArray(_) => Kind::LongArray,\n\n }\n\n }\n\n\n\n pub fn get<I: Index>(&self, index: I) -> Option<&Self> {\n\n index.index_into(self)\n\n }\n\n\n\n pub fn get_mut<I: Index>(&mut self, index: I) -> Option<&mut Self> {\n\n index.index_into_mut(self)\n\n }\n\n\n\n pub fn insert<I: Index>(&mut self, index: I, value: Self) {\n\n *index.index_or_insert(self) = value;\n\n }\n\n}\n\n\n", "file_path": "blocky-nbt/src/tag/mod.rs", "rank": 62, "score": 11.205846306452028 }, { "content": " fn index(&self, index: I) -> &Self::Output {\n\n match self.get(index) {\n\n Some(tag) => tag,\n\n None => &Tag::End,\n\n }\n\n }\n\n}\n\n\n\nimpl<I: Index> ops::IndexMut<I> for Tag {\n\n fn index_mut(&mut self, index: I) -> &mut Self::Output {\n\n index.index_or_insert(self)\n\n }\n\n}\n\n\n\nimpl ops::Index<&str> for Nbt {\n\n type Output = Tag;\n\n\n\n fn index<'a>(&self, index: &str) -> &Self::Output {\n\n &self.tag[index]\n\n }\n", "file_path": "blocky-nbt/src/lib.rs", "rank": 63, "score": 11.057820362031062 }, { "content": " Ok(())\n\n }\n\n }\n\n}\n\n\n\nimpl ProtocolLength for Box<dyn Component> {\n\n fn len(&self) -> usize {\n\n let json = self.as_json().to_string();\n\n let len = String::len(&json);\n\n let len_var: Var<i32> = (len as i32).into();\n\n\n\n len_var.len() + len\n\n }\n\n}\n\n\n\nimpl ProtocolRead for Box<dyn Component> {\n\n fn read<U: Buf>(src: &mut U) -> Result<Self, ProtocolError> {\n\n let len_var = <Var<i32>>::read(src)?;\n\n let len = len_var.0 as usize;\n\n\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 64, "score": 10.718903871498679 }, { "content": " let action = map.get(\"action\").unwrap();\n\n let value = map.get(\"value\").unwrap();\n\n\n\n if action.is_string() && value.is_string() {\n\n let action = action.as_str().unwrap();\n\n let value = value.as_str()\n\n .unwrap()\n\n .into();\n\n\n\n if let Ok(action) = ClickAction::from_str(action) {\n\n return Ok(Self {\n\n action,\n\n value,\n\n });\n\n }\n\n }\n\n }\n\n }\n\n\n\n Err(ClickEventError::Parse)\n\n }\n\n}\n", "file_path": "blocky-net/src/chat/click_event.rs", "rank": 65, "score": 10.593862875557864 }, { "content": "}\n\n\n\nimpl ProtocolRead for Var<i64> {\n\n fn read<U: Buf>(src: &mut U) -> Result<Self, ProtocolError> {\n\n let mut value = 0i64;\n\n let mut reads = 0usize;\n\n\n\n loop {\n\n // do remaining check in u8\n\n let byte = <u8>::read(src)? as i64;\n\n\n\n value |= (byte & 0b01111111) << (7 * reads);\n\n reads += 1;\n\n\n\n if reads > 10 {\n\n // VarInt is bigger than 10 bytes\n\n return Err(ProtocolError::TooLarge);\n\n }\n\n\n\n if byte & 0b10000000 == 0 {\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 66, "score": 10.193521965857702 }, { "content": " break;\n\n }\n\n }\n\n\n\n Ok(Var(value))\n\n }\n\n}\n\n\n\nimpl ProtocolWrite for Var<i64> {\n\n fn write<U: BufMut>(&self, dst: &mut U) -> Result<(), ProtocolError> {\n\n // do remaining check beforehand\n\n if dst.remaining_mut() < self.len() {\n\n Err(ProtocolError::NotEnoughBytes)\n\n } else {\n\n let mut value = self.0;\n\n\n\n loop {\n\n let mut byte = (value & 0b01111111) as u8;\n\n\n\n value >>= 7;\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 67, "score": 10.1074342728853 }, { "content": " value >>= 7;\n\n\n\n if value == 0 {\n\n return i;\n\n }\n\n }\n\n\n\n 5\n\n }\n\n}\n\n\n\nimpl ProtocolRead for Var<i32> {\n\n fn read<U: Buf>(src: &mut U) -> Result<Self, ProtocolError> {\n\n let mut value = 0i32;\n\n let mut reads = 0usize;\n\n\n\n loop {\n\n // do remaining check in u8\n\n let byte = <u8>::read(src)? as i32;\n\n\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 68, "score": 9.996290986340858 }, { "content": " let color = u32::from_str_radix(&s[1..], 16).ok()?;\n\n Some(TextColor::from_color(color))\n\n } else {\n\n // clone named color if found\n\n NAMED_COLORS.get(s).and_then(|color| Some(color.clone()))\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for TextColor {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self.name.as_ref() {\n\n Some(name) => write!(f, \"{}\", name),\n\n None => write!(f, \"{}\", self.format_value()),\n\n }\n\n }\n\n}\n\n\n\nimpl Into<TextColor> for &ChatFormatting {\n\n fn into(self) -> TextColor {\n", "file_path": "blocky-net/src/chat/text_color.rs", "rank": 69, "score": 9.896409200203491 }, { "content": " let quote_chr = quote_chr.unwrap_or('\"');\n\n\n\n builder.insert(0, quote_chr);\n\n builder.push(quote_chr);\n\n builder\n\n}\n\n\n\nimpl fmt::Display for Tag {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", match self {\n\n Self::End => panic!(\"cannot convert end to string\"),\n\n\n\n Self::Byte(value) => format!(\"{}b\", *value),\n\n Self::Short(value) => format!(\"{}s\", *value),\n\n Self::Int(value) => format!(\"{}\", *value),\n\n Self::Long(value) => format!(\"{}L\", *value),\n\n\n\n Self::Float(value) => format!(\"{}f\", *value),\n\n Self::Double(value) => format!(\"{}d\", *value),\n\n\n", "file_path": "blocky-nbt/src/tag/mod.rs", "rank": 70, "score": 9.871844141321207 }, { "content": " }\n\n\n\n v.into()\n\n }\n\n };\n\n}\n\n\n\npub struct Parser {\n\n reader: Reader,\n\n}\n\n\n\nimpl Parser {\n\n pub fn new<S: Into<String>>(s: S) -> Self {\n\n Self { reader: Reader::new(s.into()) }\n\n }\n\n\n\n fn expect(&mut self, chr: char) -> anyhow::Result<()> {\n\n self.reader.skip_whitespace()?;\n\n self.reader.expect(chr)?;\n\n\n", "file_path": "blocky-nbt/src/tag/parser/mod.rs", "rank": 71, "score": 9.675437735919516 }, { "content": " Self::String(s) => quote_and_escape(&s),\n\n\n\n Self::List(v) => {\n\n let mut items = vec![];\n\n\n\n for tag in v {\n\n items.push(tag.to_string());\n\n }\n\n\n\n format!(\"[{}]\", items.join(\",\"))\n\n },\n\n\n\n Self::Compound(m) => {\n\n let mut items = vec![];\n\n\n\n for (name, tag) in m {\n\n let value = tag.to_string();\n\n let key = if SIMPLE_PATTERN.is_match(&name) {\n\n name.clone()\n\n } else {\n", "file_path": "blocky-nbt/src/tag/mod.rs", "rank": 72, "score": 9.61108938926128 }, { "content": " impl ProtocolRead for $t {\n\n fn read<U: Buf>(src: &mut U) -> Result<Self, ProtocolError> {\n\n if src.remaining() < $len {\n\n Err(ProtocolError::NotEnoughBytes)\n\n } else {\n\n Ok(src.$r())\n\n }\n\n }\n\n }\n\n\n\n impl ProtocolWrite for $t {\n\n fn write<U: BufMut>(&self, dst: &mut U) -> Result<(), ProtocolError> {\n\n if dst.remaining_mut() < $len {\n\n Err(ProtocolError::NotEnoughBytes)\n\n } else {\n\n dst.$w(*self);\n\n Ok(())\n\n }\n\n }\n\n }\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 73, "score": 9.510470483266001 }, { "content": " } else {\n\n let s = self.reader.read_unquoted_string()?;\n\n\n\n if s.is_empty() {\n\n self.reader.set_position(start);\n\n Err(anyhow::anyhow!(\"missing expected value\"))\n\n } else {\n\n Self::parse_type(&s).or_else(|_| Ok(Tag::String(s)))\n\n }\n\n }\n\n }\n\n\n\n fn parse_type(s: &str) -> anyhow::Result<Tag> {\n\n let len = s.len();\n\n\n\n Ok(if FLOAT_PATTERN.is_match(&s) {\n\n Tag::Float(s[..len - 1].parse()?)\n\n } else if BYTE_PATTERN.is_match(&s) {\n\n Tag::Byte(s[..len - 1].parse()?)\n\n } else if LONG_PATTERN.is_match(&s) {\n", "file_path": "blocky-nbt/src/tag/parser/mod.rs", "rank": 74, "score": 9.32665526589662 }, { "content": "impl ClickAction {\n\n pub fn name(&self) -> &str {\n\n match self {\n\n Self::OpenUrl => \"open_url\",\n\n Self::OpenFile => \"open_file\",\n\n Self::RunCommand => \"run_command\",\n\n Self::SuggestCommand => \"suggest_command\",\n\n Self::ChangePage => \"change_page\",\n\n Self::CopyToClipboard => \"copy_to_clipboard\",\n\n }\n\n }\n\n\n\n pub fn is_allowed_from_server(&self) -> bool {\n\n match self {\n\n Self::OpenFile => false,\n\n _ => true,\n\n }\n\n }\n\n}\n\n\n", "file_path": "blocky-net/src/chat/click_event.rs", "rank": 75, "score": 9.32368384041756 }, { "content": " m.insert(name, value);\n\n }\n\n m\n\n };\n\n}\n\n\n\nimpl ChatFormatting {\n\n pub const BLACK: Self = Self::new_color(\"black\", '0', 0, Some(0));\n\n pub const DARK_BLUE: Self = Self::new_color(\"dark_blue\", '1', 1, Some(170));\n\n pub const DARK_GREEN: Self = Self::new_color(\"dark_green\", '2', 2, Some(43520));\n\n pub const DARK_AQUA: Self = Self::new_color(\"dark_aqua\", '3', 3, Some(43690));\n\n pub const DARK_RED: Self = Self::new_color(\"dark_red\", '4', 4, Some(11141120));\n\n pub const DARK_PURPLE: Self = Self::new_color(\"dark_purple\", '5', 5, Some(11141290));\n\n pub const GOLD: Self = Self::new_color(\"gold\", '6', 6, Some(16755200));\n\n pub const GRAY: Self = Self::new_color(\"gray\", '7', 7, Some(11184810));\n\n pub const DARK_GRAY: Self = Self::new_color(\"dark_gray\", '8', 8, Some(5592405));\n\n pub const BLUE: Self = Self::new_color(\"blue\", '9', 9, Some(5592575));\n\n pub const GREEN: Self = Self::new_color(\"green\", 'a', 10, Some(5635925));\n\n pub const AQUA: Self = Self::new_color(\"aqua\", 'b', 11, Some(5636095));\n\n pub const RED: Self = Self::new_color(\"red\", 'c', 12, Some(16733525));\n", "file_path": "blocky-core/src/chat_formatting.rs", "rank": 76, "score": 9.311371659366495 }, { "content": " fn write<U: BufMut>(&self, dst: &mut U) -> Result<(), ProtocolError> {\n\n let len = Self::len(self);\n\n let len_var: Var<i32> = (len as i32).into();\n\n\n\n if len > 32767 {\n\n Err(ProtocolError::TooLarge)\n\n } else {\n\n <Var<i32>>::write(&len_var, dst)?;\n\n\n\n // get json data\n\n let json = self.as_json().to_string();\n\n\n\n for &byte in json.as_bytes() {\n\n dst.put_u8(byte);\n\n }\n\n\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 77, "score": 9.217290255169786 }, { "content": " Self::new(name, code, false, id, color)\n\n }\n\n\n\n const fn new_format(\n\n name: &'static str,\n\n code: char,\n\n ) -> Self {\n\n Self::new(name, code, true, -1, None)\n\n }\n\n\n\n pub fn is_color(&self) -> bool {\n\n !self.is_format && *self != Self::RESET\n\n }\n\n\n\n pub fn is_format(&self) -> bool {\n\n self.is_format\n\n }\n\n\n\n pub fn name(&self) -> &str {\n\n &self.name\n", "file_path": "blocky-core/src/chat_formatting.rs", "rank": 78, "score": 9.181537477342303 }, { "content": " if dst.remaining_mut() < self.len() {\n\n Err(ProtocolError::NotEnoughBytes)\n\n } else {\n\n let mut value = self.0;\n\n\n\n loop {\n\n let mut byte = (value & 0b01111111) as u8;\n\n\n\n value >>= 7;\n\n\n\n if value != 0 {\n\n byte |= 0b10000000;\n\n }\n\n\n\n dst.put_u8(byte);\n\n\n\n if value == 0 {\n\n break;\n\n }\n\n }\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 79, "score": 8.929610339899114 }, { "content": "#[derive(Debug, PartialEq, Clone)]\n\npub enum Tag {\n\n End,\n\n Byte(i8),\n\n Short(i16),\n\n Int(i32),\n\n Long(i64),\n\n Float(f32),\n\n Double(f64),\n\n ByteArray(Vec<i8>),\n\n String(String),\n\n List(Vec<Self>),\n\n Compound(Map<String, Self>),\n\n IntArray(Vec<i32>),\n\n LongArray(Vec<i64>),\n\n}\n\n\n\nmacro_rules! into_tag {\n\n ($input:ty, $output:ident) => {\n\n impl Into<Tag> for $input {\n", "file_path": "blocky-nbt/src/tag/mod.rs", "rank": 80, "score": 8.872840433536727 }, { "content": "\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\nimpl ProtocolLength for Var<i64> {\n\n fn len(&self) -> usize {\n\n let mut value = self.0;\n\n\n\n for i in 1..10 {\n\n value >>= 7;\n\n\n\n if value == 0 {\n\n return i;\n\n }\n\n }\n\n\n\n 5\n\n }\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 81, "score": 8.784550297026296 }, { "content": " value |= (byte & 0b01111111) << (7 * reads);\n\n reads += 1;\n\n\n\n if reads > 5 {\n\n // VarInt is bigger than 5 bytes\n\n return Err(ProtocolError::TooLarge);\n\n }\n\n\n\n if byte & 0b10000000 == 0 {\n\n break;\n\n }\n\n }\n\n\n\n Ok(Var(value))\n\n }\n\n}\n\n\n\nimpl ProtocolWrite for Var<i32> {\n\n fn write<U: BufMut>(&self, dst: &mut U) -> Result<(), ProtocolError> {\n\n // do remaining check beforehand\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 82, "score": 8.774240839953752 }, { "content": "\n\n fn index_or_insert<'a>(&self, tag: &'a mut Tag) -> &'a mut Tag {\n\n match tag {\n\n Tag::Compound(m) => {\n\n let key = self.to_owned();\n\n\n\n m.entry(key.clone())\n\n .or_insert(Tag::End)\n\n },\n\n _ => panic!(\"cannot index tag type: {:?}\", tag.kind()),\n\n }\n\n }\n\n}\n\n\n\nimpl Index for String {\n\n fn index_into<'a>(&self, tag: &'a Tag) -> Option<&'a Tag> {\n\n self[..].index_into(tag)\n\n }\n\n\n\n fn index_into_mut<'a>(&self, tag: &'a mut Tag) -> Option<&'a mut Tag> {\n", "file_path": "blocky-nbt/src/tag/index.rs", "rank": 83, "score": 8.726375392032992 }, { "content": " pub const LIGHT_PURPLE: Self = Self::new_color(\"light_purple\", 'd', 13, Some(16733695));\n\n pub const YELLOW: Self = Self::new_color(\"yellow\", 'e', 14, Some(16777045));\n\n pub const WHITE: Self = Self::new_color(\"white\", 'f', 15, Some(16777215));\n\n\n\n pub const OBFUSCATED: Self = Self::new_format(\"obfuscated\", 'k');\n\n pub const BOLD: Self = Self::new_format(\"bold\", 'l');\n\n pub const STRIKETHROUGH: Self = Self::new_format(\"strikethrough\", 'm');\n\n pub const UNDERLINE: Self = Self::new_format(\"underline\", 'n');\n\n pub const ITALIC: Self = Self::new_format(\"italic\", 'o');\n\n\n\n pub const RESET: Self = Self::new_color(\"reset\", 'r', -1, None);\n\n\n\n pub const VALUES: [&'static Self; 21] = [\n\n &Self::BLACK,\n\n &Self::DARK_BLUE,\n\n &Self::DARK_GREEN,\n\n &Self::DARK_AQUA,\n\n &Self::DARK_RED,\n\n &Self::DARK_PURPLE,\n\n &Self::GOLD,\n", "file_path": "blocky-core/src/chat_formatting.rs", "rank": 84, "score": 8.51279205639738 }, { "content": " quote_and_escape(&name)\n\n };\n\n\n\n items.push(format!(\"{}:{}\", key, value));\n\n }\n\n\n\n format!(\"{{{}}}\", items.join(\",\"))\n\n },\n\n\n\n Self::ByteArray(v) => {\n\n let mut items = vec![];\n\n\n\n for value in v {\n\n items.push(Tag::Byte(*value).to_string());\n\n }\n\n\n\n format!(\"[B;{}]\", items.join(\",\"))\n\n },\n\n Self::IntArray(v) => {\n\n let mut items = vec![];\n", "file_path": "blocky-nbt/src/tag/mod.rs", "rank": 85, "score": 8.296190302863284 }, { "content": " .and_then(|value| Some(*value))\n\n }\n\n}\n\n\n\nimpl fmt::Display for ChatFormatting {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"§{}\", self.code)\n\n }\n\n}\n", "file_path": "blocky-core/src/chat_formatting.rs", "rank": 86, "score": 8.119642305231649 }, { "content": " } else {\n\n String::new()\n\n };\n\n\n\n // decode nbt tag\n\n let tag = decode_tag(buf, id)?;\n\n\n\n // return nbt with header\n\n Ok(Nbt::new(name, tag ))\n\n}\n\n\n\nimpl Nbt {\n\n pub fn decode<R: Read>(src: &mut R) -> anyhow::Result<Self> {\n\n // decode gz compression\n\n let mut buf = vec![];\n\n\n\n src.read_to_end(&mut buf)?;\n\n\n\n // try to decompress source\n\n {\n", "file_path": "blocky-nbt/src/decoder.rs", "rank": 87, "score": 7.972070171713575 }, { "content": "pub struct Reader {\n\n text: String,\n\n chars: Vec<char>,\n\n position: usize,\n\n}\n\n\n\nimpl Reader {\n\n pub fn new<S: Into<String>>(s: S) -> Self {\n\n let text = s.into();\n\n let chars = text.chars().collect();\n\n\n\n Self {\n\n text,\n\n chars,\n\n position: 0,\n\n }\n\n }\n\n\n\n pub fn position(&self) -> usize {\n\n self.position\n", "file_path": "blocky-nbt/src/tag/parser/reader.rs", "rank": 88, "score": 7.932805272963741 }, { "content": " }\n\n\n\n pub fn code(&self) -> char {\n\n self.code\n\n }\n\n\n\n pub fn color(&self) -> Option<u32> {\n\n self.color\n\n }\n\n\n\n pub fn clean_name(name: &str) -> String {\n\n let name = name.to_lowercase();\n\n\n\n // remove non a-z to make look ups easier\n\n NAME_REGEX.replace_all(&name, \"\")\n\n .to_string()\n\n }\n\n\n\n pub fn get_by_name(s: &str) -> Option<&'static ChatFormatting> {\n\n FORMATTING_BY_NAME.get(&Self::clean_name(s))\n", "file_path": "blocky-core/src/chat_formatting.rs", "rank": 89, "score": 7.575730679872962 }, { "content": " }\n\n\n\n pub fn read_unquoted_string(&mut self) -> anyhow::Result<String> {\n\n let start = self.position;\n\n\n\n while !self.done() && Self::is_allowed_in_unquoted_string(self.peek()?) {\n\n self.skip();\n\n }\n\n\n\n Ok(String::from(&self.text[start..self.position]))\n\n }\n\n\n\n pub fn read_quoted_string(&mut self) -> anyhow::Result<String> {\n\n if self.done() {\n\n return Ok(String::new());\n\n }\n\n\n\n let chr = self.peek()?;\n\n\n\n if !Self::is_quote(chr) {\n", "file_path": "blocky-nbt/src/tag/parser/reader.rs", "rank": 90, "score": 7.556337546692096 }, { "content": " if self.reader.done() {\n\n anyhow::bail!(\"expected array closure\")\n\n }\n\n }\n\n\n\n self.expect(']')?;\n\n Ok(Tag::List(v))\n\n }\n\n\n\n pub fn read_list(&mut self) -> anyhow::Result<Tag> {\n\n if self.reader.has_remaining(3) && !Reader::is_quote(self.reader.peek_nth(1)?) && self.reader.peek_nth(2)? == ';' {\n\n self.read_array_tag()\n\n } else {\n\n self.read_list_tag()\n\n }\n\n }\n\n}\n\n\n\nimpl Nbt {\n\n pub fn parse<S: Into<String>>(s: S) -> anyhow::Result<Tag> {\n\n let mut parser = Parser::new(s);\n\n let tag = parser.read_value()?;\n\n\n\n Ok(tag)\n\n }\n\n}\n", "file_path": "blocky-nbt/src/tag/parser/mod.rs", "rank": 91, "score": 7.458070837537898 }, { "content": " let len_var: Var<i32> = (len as i32).into();\n\n\n\n len_var.len() + len\n\n }\n\n}\n\n\n\nimpl ProtocolRead for String {\n\n fn read<U: Buf>(src: &mut U) -> Result<Self, ProtocolError> {\n\n let len_var = <Var<i32>>::read(src)?;\n\n let len = len_var.0 as usize;\n\n\n\n if len > 32767 {\n\n Err(ProtocolError::TooLarge)\n\n } else {\n\n let mut bytes = vec![0u8; len];\n\n\n\n // copy bytes to vec\n\n src.copy_to_slice(&mut bytes);\n\n\n\n // convert bytes to string\n", "file_path": "blocky-net/src/protocol/mod.rs", "rank": 92, "score": 7.1736322797263234 }, { "content": "#[derive(Debug, PartialEq)]\n\npub enum Kind {\n\n End,\n\n Byte,\n\n Short,\n\n Int,\n\n Long,\n\n Float,\n\n Double,\n\n ByteArray,\n\n String,\n\n List(u8),\n\n Compound,\n\n IntArray,\n\n LongArray,\n\n}\n\n\n\nimpl Kind {\n\n pub fn id(&self) -> u8 {\n\n match self {\n", "file_path": "blocky-nbt/src/tag/kind.rs", "rank": 93, "score": 7.049078677252673 }, { "content": "mod kind;\n\nmod index;\n\nmod parser;\n\n\n\npub use kind::*;\n\npub use index::*;\n\npub use parser::*;\n\n\n\n#[cfg(feature = \"preserve-order\")]\n\npub use indexmap::IndexMap as Map;\n\n#[cfg(not(feature = \"preserve-order\"))]\n\npub use std::collections::HashMap as Map;\n\n\n\nuse regex::Regex;\n\nuse std::fmt;\n\n\n\nlazy_static! {\n\n static ref SIMPLE_PATTERN: Regex = Regex::new(\"^[A-Za-z0-9._+-]+$\").unwrap();\n\n}\n\n\n", "file_path": "blocky-nbt/src/tag/mod.rs", "rank": 94, "score": 6.93160130909345 }, { "content": "use crate::{Nbt, Tag, Kind};\n\nuse flate2::write::GzEncoder;\n\nuse flate2::Compression;\n\nuse bytes::{BytesMut, BufMut};\n\nuse std::io::Write;\n\n\n", "file_path": "blocky-nbt/src/encoder.rs", "rank": 95, "score": 6.779811834122054 }, { "content": " anyhow::bail!(\"invalid escape in string\");\n\n }\n\n } else if read_chr == '\\\\' {\n\n escaped = true;\n\n } else if read_chr == chr {\n\n return Ok(builder);\n\n } else {\n\n builder.push(read_chr);\n\n }\n\n }\n\n\n\n Err(anyhow::anyhow!(\"unexpected end of string\"))\n\n }\n\n\n\n pub fn read_string(&mut self) -> anyhow::Result<String> {\n\n if self.done() {\n\n Ok(String::new())\n\n } else {\n\n let chr = self.peek()?;\n\n\n\n if Self::is_quote(chr) {\n\n self.read_quoted_string()\n\n } else {\n\n self.read_unquoted_string()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "blocky-nbt/src/tag/parser/reader.rs", "rank": 96, "score": 6.755638035275187 }, { "content": "\n\n if self.reader.done() {\n\n anyhow::bail!(\"missing expected value\");\n\n }\n\n\n\n let chr = self.reader.peek()?;\n\n\n\n if chr == '{' {\n\n self.read_struct()\n\n } else if chr == '[' {\n\n self.read_list()\n\n } else {\n\n self.read_typed_value()\n\n }\n\n }\n\n\n\n fn has_separator(&mut self) -> anyhow::Result<bool> {\n\n self.reader.skip_whitespace()?;\n\n\n\n Ok(if !self.reader.done() && self.reader.peek()? == ',' {\n", "file_path": "blocky-nbt/src/tag/parser/mod.rs", "rank": 97, "score": 6.754979264473609 }, { "content": "#[macro_use]\n\nextern crate lazy_static;\n\n\n\nmod chat_formatting;\n\nmod resource_location;\n\n\n\npub use chat_formatting::*;\n\npub use resource_location::*;\n", "file_path": "blocky-core/src/lib.rs", "rank": 98, "score": 6.708158626309837 }, { "content": "\n\n for value in v {\n\n items.push(Tag::Int(*value).to_string());\n\n }\n\n\n\n format!(\"[I;{}]\", items.join(\",\"))\n\n },\n\n Self::LongArray(v) => {\n\n let mut items = vec![];\n\n\n\n for value in v {\n\n items.push(Tag::Long(*value).to_string());\n\n }\n\n\n\n format!(\"[L;{}]\", items.join(\",\"))\n\n },\n\n })\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! tag {\n\n ($s:expr) => {\n\n $crate::Nbt::parse(format!(\"{}\", $s))\n\n .expect(\"failed to parse tag\")\n\n };\n\n}\n", "file_path": "blocky-nbt/src/tag/mod.rs", "rank": 99, "score": 6.641308797367077 } ]
Rust
src/plugins.rs
ivmarkov/edge-frame
c3c8c270189d5f497180774c138de0717d919bb5
use std::collections; use enumset::*; use yew::prelude::Html; use yew::prelude::Properties; use embedded_svc::edge_config::role::Role; use crate::lambda::Lambda; #[derive(EnumSetType, Debug, PartialOrd)] #[cfg_attr(feature = "std", derive(Hash))] pub enum InsertionPoint { Drawer, Appbar, } #[derive(EnumSetType, Debug, PartialOrd)] #[cfg_attr(feature = "std", derive(Hash))] pub enum Category { Header, Regular, Settings, } #[derive(Debug, PartialEq, Clone)] pub struct APIEndpoint { pub uri: String, pub headers: collections::HashMap<String, String>, } #[derive(Properties, Clone, Default, Debug, PartialEq)] pub struct PluginProps<R> where R: PartialEq + Clone, { pub active_route: R, pub active_role: Role, pub app_bar_renderer: Option<Lambda<(), Html>>, pub api_endpoint: Option<APIEndpoint>, } impl<RAPP> PluginProps<RAPP> where RAPP: PartialEq + Clone, { pub fn map<F, R>(&self, mapper: F, api_uri_prefix: &str) -> PluginProps<R> where F: FnOnce(&RAPP) -> R, R: PartialEq + Clone, { PluginProps { active_route: mapper(&self.active_route), active_role: self.active_role, api_endpoint: self.api_endpoint.as_ref().map( |APIEndpoint { ref uri, ref headers, }| APIEndpoint { uri: crate::api::uri_utils::with_path_segment(uri, api_uri_prefix).unwrap(), headers: headers.clone(), }, ), app_bar_renderer: self.app_bar_renderer.clone(), } } } #[derive(PartialEq, Clone, Debug)] pub struct NavigationPlugin<R> where R: PartialEq + Clone, { pub category: Category, pub insertion_point: InsertionPoint, pub component: Lambda<PluginProps<R>, Html>, pub api_uri_prefix: String, } impl<R> NavigationPlugin<R> where R: PartialEq + Clone, { pub fn map<F, RAPP>(&self, mapper: F) -> NavigationPlugin<RAPP> where F: Fn(&RAPP) -> R + 'static, RAPP: PartialEq + Clone, R: 'static, { NavigationPlugin { category: self.category, insertion_point: self.insertion_point, component: map(&self.component, self.api_uri_prefix.as_str(), mapper), api_uri_prefix: "".into(), } } } #[derive(PartialEq, Clone, Debug)] pub struct ContentPlugin<R> where R: PartialEq + Clone, { pub component: Lambda<PluginProps<R>, Html>, pub api_uri_prefix: String, } impl<R> ContentPlugin<R> where R: PartialEq + Clone, { pub fn map<F, RAPP>(&self, mapper: F) -> ContentPlugin<RAPP> where F: Fn(&RAPP) -> R + 'static, RAPP: PartialEq + Clone, R: 'static, { ContentPlugin { component: map(&self.component, self.api_uri_prefix.as_str(), mapper), api_uri_prefix: "".into(), } } } fn map<F, R, RAPP>( component: &Lambda<PluginProps<R>, Html>, api_uri_prefix: &str, mapper: F, ) -> Lambda<PluginProps<RAPP>, Html> where F: Fn(&RAPP) -> R + 'static, R: PartialEq + Clone + 'static, RAPP: PartialEq + Clone, { let plugin_component = component.clone(); let plugin_api_uri_prefix: String = api_uri_prefix.into(); Lambda::from(move |props: PluginProps<RAPP>| { plugin_component.call(props.map(&mapper, plugin_api_uri_prefix.as_str())) }) }
use std::collections; use enumset::*; use yew::prelude::Html; use yew::prelude::Properties; use embedded_svc::edge_config::role::Role; use crate::lambda::Lambda; #[derive(EnumSetType, Debug, PartialOrd)] #[cfg_attr(feature = "std", derive(Hash))] pub enum InsertionPoint { Drawer, Appbar, } #[derive(EnumSetType, Debug, PartialOrd)] #[cfg_attr(feature = "std", derive(Hash))] pub enum Category { Header, Regular, Settings, } #[derive(Debug, PartialEq, Clone)] pub struct APIEndpoint { pub uri: String, pub headers: collections::HashMap<String, String>, } #[derive(Properties, Clone, Default, Debug, PartialEq)] pub struct PluginProps<R> where R: PartialEq + Clone, { pub active_route: R, pub active_role: Role, pub app_bar_renderer: Option<Lambda<(), Html>>, pub api_endpoint: Option<APIEndpoint>, } impl<RAPP> PluginProps<RAPP> where RAPP: PartialEq + Clone, { pub fn map<F, R>(&self, mapper: F, api_uri_prefix: &str) -> PluginProps<R> where F: FnOnce(&RAPP) -> R, R: PartialEq + Clone, { PluginProps { active_route: mapper(&self.active_route), active_role: self.active_role, api_endpoint: self.api_endpoint.as_ref().map( |APIEndpoint { ref uri, ref headers, }| APIEndpoint { uri: crate::api::uri_utils::with_path_segment(uri, api_uri_prefix).unwrap(), headers: headers.clone(), }, ), app_bar_renderer: self.app_bar_renderer.clone(), } } } #[derive(PartialEq, Clone, Debug)] pub struct NavigationPlugin<R> where R: PartialEq + Clone, { pub category: Category, pub insertion_point: InsertionPoint, pub component: Lambda<PluginProps<R>, Html>, pub api_uri_prefix: String, } impl<R> NavigationPlugin<R> where R: PartialEq + Clone, {
} #[derive(PartialEq, Clone, Debug)] pub struct ContentPlugin<R> where R: PartialEq + Clone, { pub component: Lambda<PluginProps<R>, Html>, pub api_uri_prefix: String, } impl<R> ContentPlugin<R> where R: PartialEq + Clone, { pub fn map<F, RAPP>(&self, mapper: F) -> ContentPlugin<RAPP> where F: Fn(&RAPP) -> R + 'static, RAPP: PartialEq + Clone, R: 'static, { ContentPlugin { component: map(&self.component, self.api_uri_prefix.as_str(), mapper), api_uri_prefix: "".into(), } } } fn map<F, R, RAPP>( component: &Lambda<PluginProps<R>, Html>, api_uri_prefix: &str, mapper: F, ) -> Lambda<PluginProps<RAPP>, Html> where F: Fn(&RAPP) -> R + 'static, R: PartialEq + Clone + 'static, RAPP: PartialEq + Clone, { let plugin_component = component.clone(); let plugin_api_uri_prefix: String = api_uri_prefix.into(); Lambda::from(move |props: PluginProps<RAPP>| { plugin_component.call(props.map(&mapper, plugin_api_uri_prefix.as_str())) }) }
pub fn map<F, RAPP>(&self, mapper: F) -> NavigationPlugin<RAPP> where F: Fn(&RAPP) -> R + 'static, RAPP: PartialEq + Clone, R: 'static, { NavigationPlugin { category: self.category, insertion_point: self.insertion_point, component: map(&self.component, self.api_uri_prefix.as_str(), mapper), api_uri_prefix: "".into(), } }
function_block-full_function
[ { "content": "pub fn with_path_segment(uri: &str, segment: &str) -> Result<String> {\n\n let mut url = url::Url::parse(uri)?;\n\n\n\n {\n\n let mut segments = url\n\n .path_segments_mut()\n\n .map_err(|_| anyhow!(\"url cannot be used as a base\"))?;\n\n segments.push(segment);\n\n }\n\n\n\n Ok(url.into())\n\n}\n", "file_path": "src/api/uri_utils.rs", "rank": 0, "score": 119079.10056639009 }, { "content": "fn as_list<T: Description + ToString + FromStr + IntoDomainIterator>(selected: Option<T>) -> Html {\n\n html! {\n\n <>\n\n {\n\n for T::iter().map(|v| {\n\n let selected = selected\n\n .as_ref()\n\n .map_or(false, |s| s.to_string() == v.to_string());\n\n\n\n as_list_item(v, selected)\n\n })\n\n }\n\n </>\n\n }\n\n}\n\n\n", "file_path": "src/components/wifi.rs", "rank": 2, "score": 79496.30425110656 }, { "content": "pub fn main() {\n\n App::<Main>::new().mount_to_body();\n\n}\n", "file_path": "src/main.rs", "rank": 3, "score": 68227.53652229809 }, { "content": "fn as_list_item<T: Description + ToString>(item: T, selected: bool) -> Html {\n\n html! {\n\n <MatListItem\n\n selected=selected\n\n tabindex=0\n\n value=item.to_string()\n\n >\n\n {item.get_description()}\n\n </MatListItem>\n\n }\n\n}\n\n\n\nimpl Component for WiFi {\n\n type Message = Msg;\n\n type Properties = Props;\n\n\n\n fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n let mut wifi = Self {\n\n props,\n\n conf: Model::new(),\n", "file_path": "src/components/wifi.rs", "rank": 4, "score": 67013.70315769734 }, { "content": "pub fn plugin(behavior: PluginBehavior) -> SimplePlugin<bool> {\n\n SimplePlugin {\n\n name: \"WiFi\".into(),\n\n description: Some(\n\n match behavior {\n\n PluginBehavior::STA => \"A settings user interface for configuring WiFi access\",\n\n PluginBehavior::AP => \"A settings user interface for configuring WiFi Access Point\",\n\n PluginBehavior::Mixed => {\n\n \"A settings user interface for configuring WiFi Access Point and STA\"\n\n }\n\n }\n\n .into(),\n\n ),\n\n icon: Some(\"wifi\".into()),\n\n min_role: Role::Admin,\n\n insertion_points: EnumSet::only(InsertionPoint::Drawer)\n\n .union(EnumSet::only(InsertionPoint::Appbar)),\n\n category: Category::Settings,\n\n route: true,\n\n component: Lambda::from(move |plugin_props: PluginProps<bool>| {\n", "file_path": "src/components/wifi.rs", "rank": 5, "score": 57372.496315640536 }, { "content": "#[derive(Default)]\n\nstruct Fields {\n\n ssid: Field<String>,\n\n auth_method: Field<wifi::AuthMethod>,\n\n password: Field<String>,\n\n\n\n subnet: Field<ipv4::Subnet>,\n\n ip: Field<Ipv4Addr>,\n\n dns: Field<Optional<Ipv4Addr>>,\n\n secondary_dns: Field<Optional<Ipv4Addr>>,\n\n}\n\n\n\nimpl Fields {\n\n fn load(&mut self) {\n\n self.ssid.load();\n\n self.auth_method.load();\n\n self.password.load();\n\n\n\n self.subnet.load();\n\n self.ip.load();\n\n self.dns.load();\n\n self.secondary_dns.load();\n\n }\n\n}\n\n\n", "file_path": "src/components/wifi.rs", "rank": 6, "score": 56211.05110665454 }, { "content": "struct SimplePluginIterator<'a, R, I>\n\nwhere\n\n R: PartialEq + Clone,\n\n{\n\n simple_plugin: &'a SimplePlugin<R>,\n\n insertion_point_iter: I,\n\n}\n\n\n\nimpl<'a, R, I> Iterator for SimplePluginIterator<'a, R, I>\n\nwhere\n\n R: Routed + PartialEq + Clone + 'static,\n\n I: Iterator<Item = InsertionPoint>,\n\n{\n\n type Item = NavigationPlugin<R>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if let Some(insertion_point) = self.insertion_point_iter.next() {\n\n Some(NavigationPlugin {\n\n category: self.simple_plugin.category,\n\n insertion_point,\n", "file_path": "src/simple_plugins.rs", "rank": 7, "score": 54693.402553579275 }, { "content": "#[derive(Default)]\n\nstruct ApFields {\n\n ssid: Field<String>,\n\n auth_method: Field<wifi::AuthMethod>,\n\n password: Field<String>,\n\n password_confirmed: Field<String>,\n\n\n\n subnet: Field<ipv4::Subnet>,\n\n dns: Field<Optional<Ipv4Addr>>,\n\n secondary_dns: Field<Optional<Ipv4Addr>>,\n\n}\n\n\n\nimpl ApFields {\n\n fn load(&mut self) {\n\n self.ssid.load();\n\n self.auth_method.load();\n\n self.password.load();\n\n self.password_confirmed.load();\n\n\n\n self.subnet.load();\n\n self.dns.load();\n", "file_path": "src/components/wifi.rs", "rank": 8, "score": 53707.57617849812 }, { "content": "pub trait Description {\n\n fn get_description(&self) -> String;\n\n}\n\n\n", "file_path": "src/components/common.rs", "rank": 9, "score": 50537.40114134208 }, { "content": "pub trait IntoDomainIterator: Sized {\n\n type Iterator: Iterator<Item = Self>;\n\n\n\n fn iter() -> Self::Iterator;\n\n}\n\n\n\nimpl<T: EnumMessage> Description for T {\n\n fn get_description(&self) -> String {\n\n self.get_message().map_or(\"\".into(), |v| v.into())\n\n }\n\n}\n\n\n\nimpl<I: Iterator<Item = T>, T: IntoEnumIterator<Iterator = I>> IntoDomainIterator for T {\n\n type Iterator = <T as IntoEnumIterator>::Iterator;\n\n\n\n fn iter() -> Self::Iterator {\n\n <T as IntoEnumIterator>::iter()\n\n }\n\n}\n\n\n", "file_path": "src/components/common.rs", "rank": 10, "score": 44451.63787517164 }, { "content": "#[derive(Debug, Switch, Copy, Clone, PartialEq, Eq, Hash)]\n\nenum Routes {\n\n #[to = \"/\"]\n\n Root,\n\n}\n\n\n\n#[derive(Properties, Clone, PartialEq, Default)]\n\npub struct Props {}\n\n\n\npub struct Main;\n\n\n\nimpl Component for Main {\n\n type Message = ();\n\n type Properties = Props;\n\n\n\n fn create(_props: Self::Properties, _link: ComponentLink<Self>) -> Self {\n\n Self {}\n\n }\n\n\n\n fn update(&mut self, _msg: Self::Message) -> ShouldRender {\n\n false\n", "file_path": "src/main.rs", "rank": 11, "score": 39073.235984199295 }, { "content": "type LambdaOnce<IN, OUT> = RefCell<Option<Box<dyn FnOnce(IN) -> OUT>>>;\n\n\n\nimpl<IN, OUT, F: Fn(IN) -> OUT + 'static> From<F> for Lambda<IN, OUT> {\n\n fn from(func: F) -> Self {\n\n Lambda::Lambda(Rc::new(func))\n\n }\n\n}\n\n\n\nimpl<IN, OUT> Clone for Lambda<IN, OUT> {\n\n fn clone(&self) -> Self {\n\n match self {\n\n Lambda::Lambda(cb) => Lambda::Lambda(cb.clone()),\n\n Lambda::LambdaOnce(cb) => Lambda::LambdaOnce(cb.clone()),\n\n }\n\n }\n\n}\n\n\n\n#[allow(clippy::vtable_address_comparisons)]\n\nimpl<IN, OUT> PartialEq for Lambda<IN, OUT> {\n\n fn eq(&self, other: &Lambda<IN, OUT>) -> bool {\n", "file_path": "src/lambda.rs", "rank": 12, "score": 37750.437945264166 }, { "content": "use anyhow::*;\n\n\n", "file_path": "src/api/uri_utils.rs", "rank": 13, "score": 24699.09696074672 }, { "content": "pub mod common;\n\npub mod frame;\n\n\n\npub mod router_icon_button;\n\npub mod router_list_item;\n\n\n\npub mod wifi;\n", "file_path": "src/components.rs", "rank": 14, "score": 21507.036070793398 }, { "content": "\n\npub enum Msg {\n\n NavIconClick,\n\n Opened,\n\n Closed,\n\n}\n\n\n\nimpl<R> Frame<R>\n\nwhere\n\n R: Routed + PartialEq + Clone + Copy + Debug + 'static,\n\n{\n\n fn view(props: &Props<R>, drawer_open: bool, link: &ComponentLink<Self>, routed: R) -> Html {\n\n let normal = Self::get_nav_plugins(props, |nav| {\n\n nav.insertion_point == InsertionPoint::Drawer && nav.category != Category::Settings\n\n });\n\n\n\n let settings = Self::get_nav_plugins(props, |nav| {\n\n nav.insertion_point == InsertionPoint::Drawer && nav.category == Category::Settings\n\n });\n\n\n", "file_path": "src/components/frame.rs", "rank": 15, "score": 20127.965470606934 }, { "content": "use std::borrow::Cow;\n\nuse std::fmt::Debug;\n\nuse std::vec;\n\n\n\nuse yew::prelude::*;\n\nuse yew_router::prelude::Switch as Routed;\n\nuse yew_router::prelude::*;\n\n\n\nuse material_yew::top_app_bar_fixed::*;\n\nuse material_yew::*;\n\n\n\nuse embedded_svc::edge_config::role::Role;\n\n\n\nuse crate::lambda;\n\nuse crate::plugins::*;\n\n\n\n#[derive(Properties, Clone, PartialEq)]\n\npub struct Props<R>\n\nwhere\n\n R: Routed + PartialEq + Clone,\n", "file_path": "src/components/frame.rs", "rank": 16, "score": 20125.2021756519 }, { "content": "use std::{cell::RefCell, iter::once, mem, rc::Rc, str::FromStr};\n\n\n\nuse anyhow::*;\n\n\n\nuse strum::{EnumMessage, IntoEnumIterator};\n\n\n\nuse yew::prelude::*;\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Optional<T>(pub Option<T>);\n\n\n\nimpl<T> Optional<T> {\n\n pub fn new(v: Option<T>) -> Self {\n\n Self(v)\n\n }\n\n}\n\n\n\nimpl<T> From<Option<T>> for Optional<T> {\n\n fn from(o: Option<T>) -> Self {\n\n Self(o)\n", "file_path": "src/components/common.rs", "rank": 17, "score": 20124.603637842534 }, { "content": "use std::{cell::RefCell, convert::TryFrom, net::Ipv4Addr, rc::Rc, str::FromStr, vec};\n\n\n\nuse anyhow::*;\n\n\n\nuse enumset::EnumSet;\n\n\n\nuse embedded_svc::wifi;\n\nuse embedded_svc::{\n\n ipv4,\n\n wifi::{AuthMethod, TransitionalState},\n\n};\n\n\n\nuse yew::prelude::*;\n\n\n\nuse yewtil::future::*;\n\n\n\nuse embedded_svc::edge_config::role::Role;\n\n\n\nuse material_yew::list::GraphicType;\n\nuse material_yew::list::RequestSelectedDetail;\n", "file_path": "src/components/wifi.rs", "rank": 18, "score": 20124.194796251202 }, { "content": " lambda::Lambda::from(move |_| Self::view_app_bar(&props, &link, routed))\n\n }\n\n\n\n fn get_nav_plugins<F: Fn(&NavigationPlugin<R>) -> bool>(\n\n props: &Props<R>,\n\n criteria: F,\n\n ) -> vec::Vec<lambda::Lambda<PluginProps<R>, Html>> {\n\n props\n\n .navigation\n\n .iter()\n\n .filter(|nav| criteria(&nav))\n\n .map(|nav| nav.component.clone())\n\n .collect()\n\n }\n\n}\n\n\n\nimpl<R> Component for Frame<R>\n\nwhere\n\n R: Routed + PartialEq + Clone + Copy + Debug + 'static,\n\n{\n", "file_path": "src/components/frame.rs", "rank": 19, "score": 20123.709258039995 }, { "content": "{\n\n fn default() -> Self {\n\n Props {\n\n app_title: \"\".into(),\n\n navigation: vec::Vec::new(),\n\n content: vec::Vec::new(),\n\n active_role: Role::Admin,\n\n api_endpoint: None,\n\n }\n\n }\n\n}\n\n\n\npub struct Frame<R>\n\nwhere\n\n R: Routed + PartialEq + Clone + Copy + Debug + 'static,\n\n{\n\n link: ComponentLink<Self>,\n\n drawer_open: bool,\n\n props: Props<R>,\n\n}\n", "file_path": "src/components/frame.rs", "rank": 20, "score": 20123.514312458392 }, { "content": "{\n\n #[prop_or_default]\n\n pub app_title: Cow<'static, str>,\n\n\n\n #[prop_or(vec::Vec::new())]\n\n pub navigation: vec::Vec<NavigationPlugin<R>>,\n\n\n\n #[prop_or(vec::Vec::new())]\n\n pub content: vec::Vec<ContentPlugin<R>>,\n\n\n\n // TODO: Most likely should be state\n\n #[prop_or_default]\n\n pub active_role: Role,\n\n\n\n pub api_endpoint: Option<APIEndpoint>,\n\n}\n\n\n\nimpl<R> Default for Props<R>\n\nwhere\n\n R: Routed + PartialEq + Clone,\n", "file_path": "src/components/frame.rs", "rank": 21, "score": 20123.156009271766 }, { "content": " ) -> Html {\n\n html! {\n\n for plugins.iter().map(|component|\n\n component.call(PluginProps {\n\n active_route: routed,\n\n active_role: props.active_role,\n\n api_endpoint: props.api_endpoint.clone(),\n\n app_bar_renderer: app_bar_renderer.clone(),\n\n }))\n\n }\n\n }\n\n\n\n fn get_app_bar_renderer(\n\n props: &Props<R>,\n\n link: &ComponentLink<Self>,\n\n routed: R,\n\n ) -> lambda::Lambda<(), Html> {\n\n let props = props.clone();\n\n let link = link.clone();\n\n\n", "file_path": "src/components/frame.rs", "rank": 22, "score": 20121.52375936547 }, { "content": " (self.parser)(self.string.as_ref()).ok()\n\n }\n\n\n\n pub fn is_valid(&self) -> bool {\n\n match &self.error {\n\n None => true,\n\n Some(_) => false,\n\n }\n\n }\n\n\n\n pub fn get_error(&self) -> Option<String> {\n\n self.error.clone()\n\n }\n\n\n\n pub fn get_error_str(&self) -> String {\n\n self.error.clone().unwrap_or(\"\".to_owned())\n\n }\n\n\n\n pub fn get_description(v: T) -> String\n\n where\n", "file_path": "src/components/common.rs", "rank": 23, "score": 20121.426033809974 }, { "content": " html! {\n\n <div class=classes!(classes) style=self.props.style.clone()>\n\n { self.props.children.clone() }\n\n </div>\n\n }\n\n }\n\n}\n\n\n\n#[derive(Properties, Clone, PartialEq, Debug)]\n\npub struct ChunkProps {\n\n #[prop_or(true)]\n\n pub visible: bool,\n\n\n\n #[prop_or_default]\n\n pub children: Children,\n\n}\n\n\n\npub enum ChunkMsg {}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/components/common.rs", "rank": 24, "score": 20120.79221420664 }, { "content": "\n\n <MatList>\n\n { Self::view_drawer_plugins(props, Option::None, normal, routed) }\n\n { Self::view_drawer_plugins(props, Option::Some(\"Settings\"), settings, routed) }\n\n </MatList>\n\n </div>\n\n\n\n <drawer::MatDrawerAppContent>\n\n <div class=\"app-content\">\n\n { Self::view_content(props, link, routed.clone()) }\n\n </div>\n\n </drawer::MatDrawerAppContent>\n\n </MatDrawer>\n\n }\n\n }\n\n\n\n fn view_drawer_plugins(\n\n props: &Props<R>,\n\n title: Option<&str>,\n\n plugins: vec::Vec<lambda::Lambda<PluginProps<R>, Html>>,\n", "file_path": "src/components/frame.rs", "rank": 25, "score": 20120.723362525892 }, { "content": "pub struct Field<T> {\n\n string: String,\n\n error: Option<String>,\n\n parser: Box<dyn Fn(&str) -> Result<T>>,\n\n stringifier: Box<dyn Fn(T) -> String>,\n\n getter: Box<dyn Fn() -> Option<T>>,\n\n updater: Box<dyn Fn(T)>,\n\n}\n\n\n\nimpl<T: FromStr + ToString + 'static> Default for Field<T> {\n\n fn default() -> Self {\n\n Self::new(\n\n |s: &str| {\n\n s.trim()\n\n .parse::<T>()\n\n .map_err(|_| Error::msg(\"Invalid format\"))\n\n },\n\n |t| t.to_string(),\n\n )\n\n }\n", "file_path": "src/components/common.rs", "rank": 26, "score": 20120.638851258038 }, { "content": "\n\n fn change(&mut self, _props: Self::Properties) -> ShouldRender {\n\n // Should only return \"true\" if new properties are different to\n\n // previously received properties.\n\n // This component has no properties so we will always return \"false\".\n\n false\n\n }\n\n\n\n fn view(&self) -> Html {\n\n let props = self.props.clone();\n\n let drawer_open = self.drawer_open;\n\n let link = self.link.clone();\n\n\n\n html! {\n\n <Router<R>\n\n render = Router::render(move |routed: R| Self::view(&props, drawer_open, &link, routed.clone()))\n\n />\n\n }\n\n }\n\n}\n", "file_path": "src/components/frame.rs", "rank": 27, "score": 20120.442969671414 }, { "content": " Ok(data) => {\n\n self.error = None;\n\n (self.updater)(data);\n\n }\n\n Err(err) => {\n\n self.error = Some(err.to_string());\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn get_value_str(&self) -> &str {\n\n self.string.as_ref()\n\n }\n\n\n\n pub fn update_value(&self) -> &str {\n\n self.string.as_ref()\n\n }\n\n\n\n pub fn get_value(&self) -> Option<T> {\n", "file_path": "src/components/common.rs", "rank": 28, "score": 20120.315348161756 }, { "content": " if normal.is_empty() && settings.is_empty() {\n\n return html! {\n\n <>\n\n { Self::view_content(props, link, routed.clone()) }\n\n </>\n\n };\n\n }\n\n\n\n html! {\n\n <MatDrawer\n\n open={drawer_open}\n\n drawer_type=\"modal\"\n\n onopened=link.callback(|_| Msg::Opened)\n\n onclosed=link.callback(|_| Msg::Closed)\n\n >\n\n <div class=\"drawer-content\">\n\n <drawer::MatDrawerHeader>\n\n <drawer::MatDrawerTitle>{\"WATER METER\"}</drawer::MatDrawerTitle>\n\n <drawer::MatDrawerSubtitle>{\"[Admin]\"}</drawer::MatDrawerSubtitle>\n\n </drawer::MatDrawerHeader>\n", "file_path": "src/components/frame.rs", "rank": 29, "score": 20119.591794772383 }, { "content": "use material_yew::select::ListIndex;\n\nuse material_yew::select::SelectedDetail;\n\nuse material_yew::top_app_bar::*;\n\nuse material_yew::*;\n\n\n\nuse lambda::Lambda;\n\n\n\nuse crate::{\n\n lambda,\n\n plugins::{Category, InsertionPoint},\n\n simple_plugins::SimplePlugin,\n\n};\n\n\n\nuse crate::api;\n\nuse crate::plugins::*;\n\n\n\nuse super::common::*;\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\n#[cfg_attr(feature = \"std\", derive(Hash))]\n\npub enum PluginBehavior {\n\n STA,\n\n AP,\n\n Mixed,\n\n}\n\n\n", "file_path": "src/components/wifi.rs", "rank": 30, "score": 20119.228663464437 }, { "content": "}\n\n\n\nimpl<T: 'static> Field<T> {\n\n pub fn new(\n\n parser: impl Fn(&str) -> Result<T> + 'static,\n\n stringifier: impl Fn(T) -> String + 'static,\n\n ) -> Self {\n\n Self {\n\n string: Default::default(),\n\n error: None,\n\n parser: Box::new(parser),\n\n stringifier: Box::new(stringifier),\n\n getter: Box::new(|| None),\n\n updater: Box::new(|_| {}),\n\n }\n\n }\n\n\n\n pub fn bind(\n\n &mut self,\n\n getter: impl Fn() -> Option<T> + 'static,\n", "file_path": "src/components/common.rs", "rank": 31, "score": 20119.07782807203 }, { "content": " html! {}\n\n }\n\n }\n\n\n\n fn view_app_bar(props: &Props<R>, link: &ComponentLink<Self>, routed: R) -> Html {\n\n let plugins =\n\n Self::get_nav_plugins(props, |nav| nav.insertion_point == InsertionPoint::Appbar);\n\n\n\n let has_drawer_plugins = props\n\n .navigation\n\n .iter()\n\n .any(|nav| nav.insertion_point == InsertionPoint::Drawer);\n\n\n\n html! {\n\n <MatTopAppBarFixed>\n\n {\n\n if has_drawer_plugins {\n\n html! {\n\n <MatTopAppBarNavigationIcon>\n\n <span onclick=link.callback(|_| Msg::NavIconClick)><MatIconButton icon=\"menu\"/></span>\n", "file_path": "src/components/frame.rs", "rank": 32, "score": 20118.653888837118 }, { "content": " Align::Right => Some(\"right\"),\n\n Align::Center => None,\n\n }\n\n .map(|align| format!(\"mdc-layout-grid--align-{}\", align))\n\n );\n\n\n\n html! {\n\n <div class=classes>\n\n { grid }\n\n </div>\n\n }\n\n } else {\n\n grid\n\n }\n\n }\n\n}\n\n\n\n#[derive(Properties, Clone, PartialEq, Debug)]\n\npub struct CellProps {\n\n #[prop_or_default]\n", "file_path": "src/components/common.rs", "rank": 33, "score": 20118.051081110276 }, { "content": " Top,\n\n Middle,\n\n Bottom,\n\n Stretch,\n\n}\n\n\n\nimpl Default for VAlign {\n\n fn default() -> Self {\n\n Self::Stretch\n\n }\n\n}\n\n\n\n#[derive(Properties, Clone, PartialEq, Debug)]\n\npub struct GridProps {\n\n #[prop_or_default]\n\n pub inner: bool,\n\n\n\n #[prop_or_default]\n\n pub align: Align,\n\n\n", "file_path": "src/components/common.rs", "rank": 34, "score": 20117.94732362719 }, { "content": " </div>\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub enum Align {\n\n Left,\n\n Right,\n\n Center,\n\n}\n\n\n\nimpl Default for Align {\n\n fn default() -> Self {\n\n Self::Center\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub enum VAlign {\n", "file_path": "src/components/common.rs", "rank": 35, "score": 20117.825077314195 }, { "content": " }\n\n}\n\n\n\nimpl<T: ToString> ToString for Optional<T> {\n\n fn to_string(&self) -> String {\n\n match self.0.as_ref() {\n\n None => \"\".into(),\n\n Some(v) => v.to_string(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub enum Loadable<T> {\n\n Not,\n\n Loading(Option<T>),\n\n Loaded(T),\n\n}\n\n\n\nimpl<T> Loadable<T> {\n", "file_path": "src/components/common.rs", "rank": 36, "score": 20117.625873014324 }, { "content": " T: Description,\n\n {\n\n v.get_description()\n\n }\n\n\n\n pub fn get_domain(&self) -> Vec<T>\n\n where\n\n T: IntoDomainIterator,\n\n {\n\n T::iter().collect()\n\n }\n\n}\n\n\n\n#[derive(Properties, Clone, PartialEq, Debug)]\n\npub struct CenteredGridProps {\n\n #[prop_or_default]\n\n pub children: Children,\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/components/common.rs", "rank": 37, "score": 20117.46125264246 }, { "content": " fn view_content(props: &Props<R>, link: &ComponentLink<Self>, routed: R) -> Html {\n\n let plugins = props\n\n .content\n\n .iter()\n\n .map(|cnt| cnt.component.clone())\n\n .collect();\n\n\n\n Self::view_plugins(\n\n props,\n\n Some(Self::get_app_bar_renderer(props, link, routed)),\n\n plugins,\n\n routed,\n\n )\n\n }\n\n\n\n fn view_plugins(\n\n props: &Props<R>,\n\n app_bar_renderer: Option<lambda::Lambda<(), Html>>,\n\n plugins: vec::Vec<lambda::Lambda<PluginProps<R>, Html>>,\n\n routed: R,\n", "file_path": "src/components/frame.rs", "rank": 38, "score": 20117.31766073808 }, { "content": "impl<T> AsRef<T> for Editable<T> {\n\n fn as_ref(&self) -> &T {\n\n &self.data\n\n }\n\n}\n\n\n\nimpl<T> AsMut<T> for Editable<T> {\n\n fn as_mut(&mut self) -> &mut T {\n\n self.changed = true;\n\n &mut self.data\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Model<T>(pub Rc<RefCell<Loadable<T>>>);\n\n\n\nimpl<T> Default for Model<T> {\n\n fn default() -> Self {\n\n Self(Rc::new(RefCell::new(Loadable::Not)))\n\n }\n\n}\n\n\n\nimpl<T> Model<T> {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/components/common.rs", "rank": 39, "score": 20117.14978537532 }, { "content": " #[prop_or_default]\n\n pub children: Children,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Grid {\n\n props: GridProps,\n\n}\n\n\n\npub enum GridMsg {}\n\n\n\nimpl Component for Grid {\n\n type Message = GridMsg;\n\n type Properties = GridProps;\n\n\n\n fn create(props: Self::Properties, _link: ComponentLink<Self>) -> Self {\n\n Grid { props }\n\n }\n\n\n\n fn update(&mut self, _msg: Self::Message) -> ShouldRender {\n", "file_path": "src/components/common.rs", "rank": 40, "score": 20116.830018729546 }, { "content": " #[prop_or_default]\n\n pub children: Children,\n\n}\n\n\n\npub enum CellMsg {}\n\n\n\n#[derive(Debug)]\n\npub struct Cell {\n\n props: CellProps,\n\n}\n\n\n\nimpl Component for Cell {\n\n type Message = CellMsg;\n\n type Properties = CellProps;\n\n\n\n fn create(props: Self::Properties, _link: ComponentLink<Self>) -> Self {\n\n Cell { props }\n\n }\n\n\n\n fn update(&mut self, _msg: Self::Message) -> ShouldRender {\n", "file_path": "src/components/common.rs", "rank": 41, "score": 20116.830018729546 }, { "content": " pub valign: VAlign,\n\n\n\n #[prop_or_default]\n\n pub order: Option<u32>,\n\n\n\n #[prop_or_default]\n\n pub span: Option<u32>,\n\n\n\n #[prop_or_default]\n\n pub span_desktop: Option<u32>,\n\n\n\n #[prop_or_default]\n\n pub span_tablet: Option<u32>,\n\n\n\n #[prop_or_default]\n\n pub span_phone: Option<u32>,\n\n\n\n #[prop_or_default]\n\n pub style: String,\n\n\n", "file_path": "src/components/common.rs", "rank": 42, "score": 20116.227136388752 }, { "content": " routed: R,\n\n ) -> Html {\n\n if !plugins.is_empty() {\n\n let list = html! {\n\n <MatList activatable=true>\n\n { Self::view_plugins(props, None, plugins, routed) }\n\n </MatList>\n\n };\n\n\n\n if let Some(title) = title {\n\n html! {\n\n <>\n\n <drawer::MatDrawerSubtitle>{title}</drawer::MatDrawerSubtitle>\n\n { list }\n\n </>\n\n }\n\n } else {\n\n list\n\n }\n\n } else {\n", "file_path": "src/components/frame.rs", "rank": 43, "score": 20115.949944491404 }, { "content": " self.secondary_dns.load();\n\n }\n\n}\n\n\n\n#[derive(Properties, Clone, Debug, PartialEq)]\n\npub struct Props {\n\n pub behavior: PluginBehavior,\n\n pub app_bar_renderer: Option<Lambda<(), Html>>,\n\n pub api_endpoint: Option<APIEndpoint>,\n\n}\n\n\n\npub struct WiFi {\n\n props: Props,\n\n conf: Model<Editable<wifi::Configuration>>,\n\n\n\n fields: Fields,\n\n ap_fields: ApFields,\n\n\n\n status: Model<wifi::Status>,\n\n\n", "file_path": "src/components/wifi.rs", "rank": 44, "score": 20115.753639327722 }, { "content": " updater: impl Fn(T) + 'static,\n\n ) {\n\n self.getter = Box::new(getter);\n\n self.updater = Box::new(updater);\n\n\n\n self.load();\n\n }\n\n\n\n pub fn load(&mut self) {\n\n self.error = None;\n\n self.string = (self.getter)().map_or(\"\".to_owned(), |d| (self.stringifier)(d));\n\n }\n\n\n\n pub fn update(&mut self, value: String) {\n\n let value = value.trim();\n\n\n\n if self.string != value {\n\n self.string = value.to_owned();\n\n\n\n match (self.parser)(value) {\n", "file_path": "src/components/common.rs", "rank": 45, "score": 20115.06101647655 }, { "content": " .borrow()\n\n .ap_ip_conf()\n\n .map(getter)\n\n .map(Clone::clone)\n\n .map(Into::into)\n\n },\n\n move |v| *updater(model_w.0.borrow_mut().ap_ip_conf_mut()) = v.into(),\n\n );\n\n }\n\n}\n\n\n\nimpl Loadable<wifi::Status> {\n\n fn client_ip_settings(&self) -> Option<&ipv4::ClientSettings> {\n\n self.data_ref()?\n\n .0\n\n .get_operating()?\n\n .get_operating()?\n\n .get_operating()\n\n }\n\n\n", "file_path": "src/components/wifi.rs", "rank": 46, "score": 20114.36065578204 }, { "content": " aps: Loadable<vec::Vec<wifi::AccessPointInfo>>,\n\n\n\n access_points_shown: bool,\n\n password_confirmed: Rc<RefCell<String>>,\n\n\n\n link: ComponentLink<Self>,\n\n}\n\n\n\npub enum Msg {\n\n GetConfiguration,\n\n GotConfiguration(Result<wifi::Configuration>),\n\n GetStatus,\n\n GotStatus(Result<wifi::Status>),\n\n\n\n GetAccessPoints,\n\n GotAccessPoints(Result<vec::Vec<wifi::AccessPointInfo>>),\n\n\n\n ShowAccessPoints,\n\n ShowConfiguration(Option<(String, AuthMethod)>),\n\n\n", "file_path": "src/components/wifi.rs", "rank": 47, "score": 20114.296215880935 }, { "content": " type Message = Msg;\n\n type Properties = Props<R>;\n\n\n\n fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n Self {\n\n link,\n\n props,\n\n drawer_open: false,\n\n }\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> ShouldRender {\n\n match msg {\n\n Msg::NavIconClick => self.drawer_open = !self.drawer_open,\n\n Msg::Closed => self.drawer_open = false,\n\n Msg::Opened => self.drawer_open = true,\n\n }\n\n\n\n true\n\n }\n", "file_path": "src/components/frame.rs", "rank": 48, "score": 20114.167349761763 }, { "content": " pub fn data_mut(&mut self) -> &mut T {\n\n self.try_data_mut().unwrap()\n\n }\n\n}\n\n\n\nimpl<T> Default for Loadable<T> {\n\n fn default() -> Self {\n\n Self::Not\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Editable<T> {\n\n changed: bool,\n\n data: T,\n\n}\n\n\n\nimpl<T> Editable<T> {\n\n pub fn new(data: T) -> Self {\n\n Self {\n", "file_path": "src/components/common.rs", "rank": 49, "score": 20113.87395471593 }, { "content": " }\n\n\n\n fn client_ip_conf(&self) -> Option<&ipv4::ClientConfiguration> {\n\n self.client_conf()?.as_ip_conf_ref()\n\n }\n\n\n\n fn client_ip_settings(&self) -> Option<&ipv4::ClientSettings> {\n\n self.client_ip_conf()?.as_fixed_settings_ref()\n\n }\n\n\n\n fn client_ip_settings_mut(&mut self) -> &mut ipv4::ClientSettings {\n\n self.client_conf_mut()\n\n .as_ip_conf_mut()\n\n .as_fixed_settings_mut()\n\n }\n\n}\n\n\n\nimpl Loadable<Editable<wifi::Configuration>> {\n\n fn ap_conf(&self) -> Option<&wifi::AccessPointConfiguration> {\n\n self.data_ref()?.as_ref().as_ap_conf_ref()\n", "file_path": "src/components/wifi.rs", "rank": 50, "score": 20113.57683512843 }, { "content": " status: &Model<wifi::Status>,\n\n f: &mut Field<Q>,\n\n getter: fn(&ipv4::ClientSettings) -> &T,\n\n updater: fn(&mut ipv4::ClientSettings) -> &mut T,\n\n ) where\n\n T: From<Q> + Clone + 'static,\n\n Q: From<T> + Clone + 'static,\n\n {\n\n let model_r = self.clone();\n\n let model_w = self.clone();\n\n let status_model = status.clone();\n\n\n\n f.bind(\n\n move || {\n\n model_r\n\n .0\n\n .borrow()\n\n .client_ip_settings()\n\n .or(status_model.0.borrow().client_ip_settings())\n\n .map(getter)\n", "file_path": "src/components/wifi.rs", "rank": 51, "score": 20113.19779712025 }, { "content": " changed: false,\n\n data,\n\n }\n\n }\n\n\n\n pub fn get(self) -> T {\n\n self.data\n\n }\n\n\n\n pub fn is_changed(&self) -> bool {\n\n self.changed\n\n }\n\n}\n\n\n\nimpl<T: Default> Default for Editable<T> {\n\n fn default() -> Self {\n\n Editable::new(Default::default())\n\n }\n\n}\n\n\n", "file_path": "src/components/common.rs", "rank": 52, "score": 20113.169205263395 }, { "content": "\n\n fn view(&self) -> Html {\n\n if self.props.visible {\n\n html! {\n\n <>\n\n { self.props.children.clone() }\n\n </>\n\n }\n\n } else {\n\n html! {}\n\n }\n\n }\n\n}\n", "file_path": "src/components/common.rs", "rank": 53, "score": 20113.163532194965 }, { "content": " {\n\n for self.aps.data_ref().or(Some(&vec![])).unwrap().iter().map(|item| {\n\n let ssid = item.ssid.clone();\n\n let auth_method = item.auth_method;\n\n\n\n let cb = self.link.callback(move |event: RequestSelectedDetail| {\n\n if event.selected {\n\n Msg::ShowConfiguration(Some((ssid.clone(), auth_method)))\n\n } else {\n\n Msg::None\n\n }\n\n });\n\n\n\n html! {\n\n <MatListItem\n\n selected=false\n\n tabindex=-1\n\n value=item.ssid.clone()\n\n graphic={GraphicType::Icon}\n\n on_request_selected=cb\n", "file_path": "src/components/wifi.rs", "rank": 54, "score": 20112.995968878924 }, { "content": " fn view_configuration_cells(\n\n &self,\n\n ap: bool,\n\n sta: bool,\n\n lspan: u32,\n\n mspan: u32,\n\n rspan: u32,\n\n ) -> Html {\n\n // TODO validity_transform={Some(MatTextField::validity_transform(|_, _| *ValidityState::new().set_bad_input(self.fields.ssid.is_valid())))}\n\n\n\n let aspan = lspan + mspan + rspan;\n\n\n\n html! {\n\n <>\n\n // Status\n\n <Chunk visible=ap>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n { self.status.0.borrow().ap_status_str() }\n\n </Cell>\n", "file_path": "src/components/wifi.rs", "rank": 55, "score": 20112.66713504891 }, { "content": " move || Some(password_confirmed_g.borrow().clone()),\n\n move |value| *password_confirmed_u.borrow_mut() = value,\n\n );\n\n\n\n self.conf.bind_model_ip_ap(\n\n &mut self.ap_fields.subnet,\n\n |settings| &settings.subnet,\n\n |settings| &mut settings.subnet,\n\n );\n\n\n\n self.conf.bind_model_ip_ap(\n\n &mut self.ap_fields.dns,\n\n |settings| &settings.dns,\n\n |settings| &mut settings.dns,\n\n );\n\n\n\n self.conf.bind_model_ip_ap(\n\n &mut self.ap_fields.secondary_dns,\n\n |settings| &settings.secondary_dns,\n\n |settings| &mut settings.secondary_dns,\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/components/wifi.rs", "rank": 56, "score": 20112.029105853017 }, { "content": "\n\n None,\n\n}\n\n\n\nimpl WiFi {\n\n fn create_api(\n\n api_endpoint: Option<&APIEndpoint>,\n\n ) -> Box<dyn wifi::WifiAsync<Error = anyhow::Error>> {\n\n match api_endpoint {\n\n None => Box::new(api::wifi::Dummy),\n\n Some(ep) => Box::new(api::wifi::Rest::new(ep.uri.clone(), &ep.headers)),\n\n }\n\n }\n\n\n\n fn is_loaded(&self) -> bool {\n\n self.conf.0.borrow().is_loaded() && self.status.0.borrow().is_loaded()\n\n }\n\n\n\n fn is_dhcp(&self) -> bool {\n\n match self.conf.0.borrow().client_ip_conf() {\n", "file_path": "src/components/wifi.rs", "rank": 57, "score": 20111.831434311 }, { "content": " pub fn data_ref(&self) -> Option<&T> {\n\n if let Loadable::Loaded(data) = self {\n\n Some(data)\n\n } else if let Loadable::Loading(Some(data)) = self {\n\n Some(data)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn try_data_mut(&mut self) -> Option<&mut T> {\n\n if let Loadable::Loaded(data) = self {\n\n Some(data)\n\n } else if let Loadable::Loading(Some(data)) = self {\n\n Some(data)\n\n } else {\n\n None\n\n }\n\n }\n\n\n", "file_path": "src/components/common.rs", "rank": 58, "score": 20111.67831231116 }, { "content": " .map(Clone::clone)\n\n .map(Into::into)\n\n },\n\n move |v| *updater(model_w.0.borrow_mut().client_ip_settings_mut()) = v.into(),\n\n );\n\n }\n\n}\n\n\n\nimpl Model<Editable<wifi::Configuration>> {\n\n fn bind_model_wifi_ap<T: Clone + 'static>(\n\n &self,\n\n f: &mut Field<T>,\n\n getter: fn(&wifi::AccessPointConfiguration) -> &T,\n\n updater: fn(&mut wifi::AccessPointConfiguration) -> &mut T,\n\n ) {\n\n let model_r = self.clone();\n\n let model_w = self.clone();\n\n\n\n f.bind(\n\n move || model_r.0.borrow().ap_conf().map(getter).map(Clone::clone),\n", "file_path": "src/components/wifi.rs", "rank": 59, "score": 20111.665403749197 }, { "content": "pub struct CenteredGrid {\n\n props: CenteredGridProps,\n\n}\n\n\n\npub enum CenteredGridMsg {}\n\n\n\nimpl Component for CenteredGrid {\n\n type Message = CenteredGridMsg;\n\n type Properties = CenteredGridProps;\n\n\n\n fn create(props: Self::Properties, _link: ComponentLink<Self>) -> Self {\n\n CenteredGrid { props }\n\n }\n\n\n\n fn update(&mut self, _msg: Self::Message) -> ShouldRender {\n\n false\n\n }\n\n\n\n fn change(&mut self, props: Self::Properties) -> ShouldRender {\n\n self.props = props;\n", "file_path": "src/components/common.rs", "rank": 60, "score": 20111.630278288278 }, { "content": " }\n\n}\n\n\n\nimpl Loadable<wifi::Status> {\n\n fn ap_status_str(&self) -> &'static str {\n\n if !self.is_loaded() {\n\n return \"Waiting for status info...\";\n\n }\n\n\n\n let status = self.data_ref().unwrap();\n\n\n\n match &status.1 {\n\n wifi::ApStatus::Stopped => \"Stopped\",\n\n wifi::ApStatus::Starting => \"Starting...\",\n\n wifi::ApStatus::Started(ref ss) => match ss {\n\n wifi::ApIpStatus::Disabled => \"Disabled\",\n\n wifi::ApIpStatus::Waiting => \"Waiting for IP...\",\n\n wifi::ApIpStatus::Done => \"Connected\",\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/components/wifi.rs", "rank": 61, "score": 20111.58162951059 }, { "content": " fields: Default::default(),\n\n ap_fields: Default::default(),\n\n aps: Default::default(),\n\n status: Default::default(),\n\n password_confirmed: Rc::new(RefCell::new(\"\".into())),\n\n access_points_shown: false,\n\n link,\n\n };\n\n\n\n wifi.bind_model();\n\n wifi.bind_model_ap();\n\n\n\n wifi.link.send_message(Msg::GetConfiguration);\n\n wifi.link.send_message(Msg::GetStatus);\n\n\n\n wifi\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> ShouldRender {\n\n match msg {\n", "file_path": "src/components/wifi.rs", "rank": 62, "score": 20111.5461848751 }, { "content": " twoline=true\n\n >\n\n <MatIcon>{if item.auth_method == wifi::AuthMethod::None {\"signal_wifi_4_bar\"} else {\"signal_wifi_4_bar_lock\"}}</MatIcon>\n\n <span>{item.ssid.clone()}</span>\n\n <span slot=\"secondary\">{strum::EnumMessage::get_message(&item.auth_method).unwrap()}</span>\n\n </MatListItem>\n\n }\n\n })\n\n }\n\n </MatList>\n\n </CenteredGrid>\n\n </>\n\n }\n\n }\n\n\n\n fn view_configuration(&self) -> Html {\n\n let (lspan, mut mspan, rspan) = (1, 10, 1);\n\n let (mut ap, mut sta) = (false, false);\n\n\n\n match self.props.behavior {\n", "file_path": "src/components/wifi.rs", "rank": 63, "score": 20111.526813587618 }, { "content": " fn client_status_str(&self) -> &'static str {\n\n if !self.is_loaded() {\n\n return \"Waiting for status info...\";\n\n }\n\n\n\n let status = self.data_ref().unwrap();\n\n\n\n match &status.0 {\n\n wifi::ClientStatus::Stopped => \"Stopped\",\n\n wifi::ClientStatus::Starting => \"Starting...\",\n\n wifi::ClientStatus::Started(ref ss) => match ss {\n\n wifi::ClientConnectionStatus::Disconnected => \"Disconnected\",\n\n wifi::ClientConnectionStatus::Connecting => \"Connecting...\",\n\n wifi::ClientConnectionStatus::Connected(ref cc) => match cc {\n\n wifi::ClientIpStatus::Disabled => \"Connected (IP disabled)\",\n\n wifi::ClientIpStatus::Waiting => \"Waiting for IP...\",\n\n wifi::ClientIpStatus::Done(_) => \"Connected\",\n\n },\n\n },\n\n }\n", "file_path": "src/components/wifi.rs", "rank": 64, "score": 20111.48928181117 }, { "content": " let props = Props {\n\n behavior,\n\n api_endpoint: plugin_props.api_endpoint,\n\n app_bar_renderer: plugin_props.app_bar_renderer,\n\n };\n\n\n\n html! {\n\n <WiFi with props/>\n\n }\n\n }),\n\n }\n\n}\n\n\n\nimpl Loadable<Editable<wifi::Configuration>> {\n\n fn client_conf(&self) -> Option<&wifi::ClientConfiguration> {\n\n self.data_ref()?.as_ref().as_client_conf_ref()\n\n }\n\n\n\n fn client_conf_mut(&mut self) -> &mut wifi::ClientConfiguration {\n\n self.data_mut().as_mut().as_client_conf_mut()\n", "file_path": "src/components/wifi.rs", "rank": 65, "score": 20111.3402666484 }, { "content": " false\n\n }\n\n\n\n fn change(&mut self, props: Self::Properties) -> ShouldRender {\n\n self.props = props;\n\n true\n\n }\n\n\n\n fn view(&self) -> Html {\n\n let grid = html! {\n\n <div class=\"mdc-layout-grid__inner\">\n\n { self.props.children.clone() }\n\n </div>\n\n };\n\n\n\n if !self.props.inner {\n\n let classes = classes!(\n\n \"mdc-layout-grid\",\n\n match self.props.align {\n\n Align::Left => Some(\"left\"),\n", "file_path": "src/components/common.rs", "rank": 66, "score": 20111.333076630148 }, { "content": "pub struct Chunk {\n\n props: ChunkProps,\n\n}\n\n\n\nimpl Component for Chunk {\n\n type Message = ChunkMsg;\n\n type Properties = ChunkProps;\n\n\n\n fn create(props: Self::Properties, _link: ComponentLink<Self>) -> Self {\n\n Chunk { props }\n\n }\n\n\n\n fn update(&mut self, _msg: Self::Message) -> ShouldRender {\n\n false\n\n }\n\n\n\n fn change(&mut self, props: Self::Properties) -> ShouldRender {\n\n self.props = props;\n\n true\n\n }\n", "file_path": "src/components/common.rs", "rank": 67, "score": 20111.186340363834 }, { "content": " fn change(&mut self, props: Self::Properties) -> ShouldRender {\n\n if self.props.api_endpoint != props.api_endpoint {\n\n self.conf = Model::new();\n\n self.aps = Default::default();\n\n self.props = props;\n\n self.bind_model();\n\n\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n fn view(&self) -> Html {\n\n if self.access_points_shown {\n\n self.view_access_points()\n\n } else {\n\n self.view_configuration()\n\n }\n\n }\n", "file_path": "src/components/wifi.rs", "rank": 68, "score": 20110.798503169553 }, { "content": " PluginBehavior::STA => sta = true,\n\n PluginBehavior::AP => ap = true,\n\n PluginBehavior::Mixed => {\n\n ap = true;\n\n sta = true;\n\n mspan = 4;\n\n }\n\n }\n\n\n\n html! {\n\n <>\n\n {self.props.app_bar_renderer.as_ref().unwrap().call(())}\n\n\n\n <Grid>\n\n {self.view_configuration_cells(ap, sta, lspan, mspan, rspan)}\n\n </Grid>\n\n </>\n\n }\n\n }\n\n\n", "file_path": "src/components/wifi.rs", "rank": 69, "score": 20110.71176849069 }, { "content": " SSIDChanged(String),\n\n AuthMethodChanged(AuthMethod),\n\n PasswordChanged(String),\n\n\n\n DHCPChanged(bool),\n\n SubnetChanged(String),\n\n IpChanged(String),\n\n DnsChanged(String),\n\n SecondaryDnsChanged(String),\n\n\n\n ApSSIDChanged(String),\n\n ApSSIDHiddenChanged(bool),\n\n ApAuthMethodChanged(AuthMethod),\n\n ApPasswordChanged(String),\n\n ApPasswordConfirmedChanged(String),\n\n\n\n ApSubnetChanged(String),\n\n ApDHCPEnabledChanged(bool),\n\n ApDnsChanged(String),\n\n ApSecondaryDnsChanged(String),\n", "file_path": "src/components/wifi.rs", "rank": 70, "score": 20110.304611369877 }, { "content": " self.conf.bind_model_wifi(\n\n &mut self.fields.password,\n\n |conf| &conf.password,\n\n |conf| &mut conf.password,\n\n );\n\n\n\n self.conf.bind_model_ip(\n\n &self.status,\n\n &mut self.fields.subnet,\n\n |settings| &settings.subnet,\n\n |settings| &mut settings.subnet,\n\n );\n\n\n\n self.conf.bind_model_ip(\n\n &self.status,\n\n &mut self.fields.ip,\n\n |settings| &settings.ip,\n\n |settings| &mut settings.ip,\n\n );\n\n\n", "file_path": "src/components/wifi.rs", "rank": 71, "score": 20110.271685144296 }, { "content": " self.conf.bind_model_ip(\n\n &self.status,\n\n &mut self.fields.dns,\n\n |settings| &settings.dns,\n\n |settings| &mut settings.dns,\n\n );\n\n\n\n self.conf.bind_model_ip(\n\n &self.status,\n\n &mut self.fields.secondary_dns,\n\n |settings| &settings.secondary_dns,\n\n |settings| &mut settings.secondary_dns,\n\n );\n\n }\n\n}\n\n\n\nimpl WiFi {\n\n fn bind_model_ap(&mut self) {\n\n self.conf.bind_model_wifi_ap(\n\n &mut self.ap_fields.ssid,\n", "file_path": "src/components/wifi.rs", "rank": 72, "score": 20110.223400524726 }, { "content": " </MatTopAppBarNavigationIcon>\n\n }\n\n } else {\n\n html! {}\n\n }\n\n }\n\n <div slot=\"title\">{\"WM1 (SHELLY WATER METER)\"}</div>\n\n <MatTopAppBarActionItems>\n\n <span class=\"mdc-typography--body2\">\n\n {\"Sat 16:11\"}\n\n </span>\n\n\n\n { Self::view_plugins(props, None, plugins, routed) }\n\n\n\n <MatIconButton icon=\"power_settings_new\"/>\n\n </MatTopAppBarActionItems>\n\n </MatTopAppBarFixed>\n\n }\n\n }\n\n\n", "file_path": "src/components/frame.rs", "rank": 73, "score": 20110.056941906216 }, { "content": " }\n\n Msg::AuthMethodChanged(value) => {\n\n self.fields.auth_method.update(value.to_string());\n\n true\n\n }\n\n Msg::PasswordChanged(value) => {\n\n self.fields.password.update(value);\n\n true\n\n }\n\n Msg::DHCPChanged(dhcp) => {\n\n *self.conf.0.borrow_mut().client_conf_mut().as_ip_conf_mut() = if dhcp {\n\n ipv4::ClientConfiguration::DHCP\n\n } else {\n\n ipv4::ClientConfiguration::Fixed(Default::default())\n\n };\n\n\n\n true\n\n }\n\n Msg::SubnetChanged(value) => {\n\n self.fields.subnet.update(value);\n", "file_path": "src/components/wifi.rs", "rank": 74, "score": 20110.01012179281 }, { "content": " }\n\n}\n\n\n\nimpl<T> From<Optional<T>> for Option<T> {\n\n fn from(o: Optional<T>) -> Self {\n\n o.0\n\n }\n\n}\n\n\n\nimpl<T: FromStr> FromStr for Optional<T> {\n\n type Err = T::Err;\n\n\n\n fn from_str(mut s: &str) -> Result<Self, Self::Err> {\n\n s = s.trim();\n\n\n\n if s.len() == 0 {\n\n Ok(Optional(None))\n\n } else {\n\n s.parse::<T>().map(|op| Optional(Some(op)))\n\n }\n", "file_path": "src/components/common.rs", "rank": 75, "score": 20109.84780156241 }, { "content": " true\n\n }\n\n\n\n fn view(&self) -> Html {\n\n html! {\n\n <div class=\"page\">\n\n <div class=\"mdc-layout-grid\">\n\n <div class=\"mdc-layout-grid__inner\">\n\n // Spacer\n\n <div class=\"mdc-layout-grid__cell mdc-layout-grid__cell--span-3 mdc-layout-grid__cell--span-2-tablet mdc-layout-grid__cell--span-1-phone\"></div>\n\n\n\n // Content\n\n <div class=\"mdc-layout-grid__cell mdc-layout-grid__cell--span-6 mdc-layout-grid__cell--span-4-tablet mdc-layout-grid__cell--span-2-phone\">\n\n { self.props.children.clone() }\n\n </div>\n\n\n\n // Spacer\n\n <div class=\"mdc-layout-grid__cell mdc-layout-grid__cell--span-3 mdc-layout-grid__cell--span-2-tablet mdc-layout-grid__cell--span-1-phone\"></div>\n\n </div>\n\n </div>\n", "file_path": "src/components/common.rs", "rank": 76, "score": 20109.664888263535 }, { "content": " }\n\n\n\n fn ap_conf_mut(&mut self) -> &mut wifi::AccessPointConfiguration {\n\n self.data_mut().as_mut().as_ap_conf_mut()\n\n }\n\n\n\n fn ap_ip_conf(&self) -> Option<&ipv4::RouterConfiguration> {\n\n self.ap_conf()?.as_ip_conf_ref()\n\n }\n\n\n\n fn ap_ip_conf_mut(&mut self) -> &mut ipv4::RouterConfiguration {\n\n self.ap_conf_mut().as_ip_conf_mut()\n\n }\n\n}\n\n\n\nimpl Model<Editable<wifi::Configuration>> {\n\n fn bind_model_wifi<T: Clone + 'static>(\n\n &self,\n\n f: &mut Field<T>,\n\n getter: fn(&wifi::ClientConfiguration) -> &T,\n", "file_path": "src/components/wifi.rs", "rank": 77, "score": 20109.6358967632 }, { "content": " updater: fn(&mut wifi::ClientConfiguration) -> &mut T,\n\n ) {\n\n let model_r = self.clone();\n\n let model_w = self.clone();\n\n\n\n f.bind(\n\n move || {\n\n model_r\n\n .0\n\n .borrow()\n\n .client_conf()\n\n .map(getter)\n\n .map(Clone::clone)\n\n },\n\n move |v| *updater(model_w.0.borrow_mut().client_conf_mut()) = v,\n\n );\n\n }\n\n\n\n fn bind_model_ip<Q, T>(\n\n &self,\n", "file_path": "src/components/wifi.rs", "rank": 78, "score": 20109.59855544923 }, { "content": " move |v| *updater(model_w.0.borrow_mut().ap_conf_mut()) = v,\n\n );\n\n }\n\n\n\n fn bind_model_ip_ap<Q, T>(\n\n &self,\n\n f: &mut Field<Q>,\n\n getter: fn(&ipv4::RouterConfiguration) -> &T,\n\n updater: fn(&mut ipv4::RouterConfiguration) -> &mut T,\n\n ) where\n\n T: From<Q> + Clone + 'static,\n\n Q: From<T> + Clone + 'static,\n\n {\n\n let model_r = self.clone();\n\n let model_w = self.clone();\n\n\n\n f.bind(\n\n move || {\n\n model_r\n\n .0\n", "file_path": "src/components/wifi.rs", "rank": 79, "score": 20109.468261763323 }, { "content": " pub fn is_loaded(&self) -> bool {\n\n match self {\n\n Self::Loaded(_) | Self::Loading(Some(_)) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_loading(&self) -> bool {\n\n match self {\n\n Self::Loading(_) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn loading(&mut self) {\n\n let old_self = mem::replace(self, Loadable::Not);\n\n match old_self {\n\n Self::Not => *self = Self::Loading(None),\n\n Self::Loaded(data) => *self = Self::Loading(Some(data)),\n\n _ => (),\n", "file_path": "src/components/common.rs", "rank": 80, "score": 20109.38566795476 }, { "content": " validate_on_initial_render=true\n\n auto_validate=true\n\n helper={ self.ap_fields.ssid.get_error_str() }\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n <Chunk visible=sta>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <MatTextField\n\n outlined=true\n\n label=\"SSID\"\n\n disabled=!self.is_loaded()\n\n value=self.fields.ssid.get_value_str().to_owned()\n\n oninput=self.link.callback(|id: InputData| Msg::SSIDChanged(id.value))\n\n validate_on_initial_render=true\n\n auto_validate=true\n\n helper={ self.fields.ssid.get_error_str() }\n\n />\n", "file_path": "src/components/wifi.rs", "rank": 81, "score": 20109.033944156312 }, { "content": " }\n\n }\n\n\n\n pub fn loaded(&mut self, data: T) {\n\n *self = Self::Loaded(data)\n\n }\n\n\n\n pub fn loaded_result(&mut self, result: Result<T>) {\n\n match result {\n\n Ok(data) => *self = Self::Loaded(data),\n\n Err(_) => {\n\n let old_self = mem::replace(self, Loadable::Not);\n\n match old_self {\n\n Self::Loading(Some(data)) => *self = Self::Loaded(data),\n\n other => *self = other,\n\n }\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/components/common.rs", "rank": 82, "score": 20109.02401693627 }, { "content": " outlined=true\n\n label=\"DNS\"\n\n disabled=!self.is_loaded()\n\n value=self.ap_fields.dns.get_value_str().to_owned()\n\n oninput=self.link.callback(|id: InputData| Msg::ApDnsChanged(id.value))\n\n validate_on_initial_render=true\n\n auto_validate=true\n\n helper={ self.ap_fields.dns.get_error_str() }\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n <Chunk visible=sta>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <MatTextField\n\n outlined=true\n\n label=\"DNS\"\n\n disabled=!self.is_loaded() || self.is_dhcp()\n\n value=self.fields.dns.get_value_str().to_owned()\n", "file_path": "src/components/wifi.rs", "rank": 83, "score": 20108.99491639006 }, { "content": " auto_validate=true\n\n helper={ self.ap_fields.secondary_dns.get_error_str() }\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n <Chunk visible=sta>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <MatTextField\n\n outlined=true\n\n label=\"Secondary DNS\"\n\n disabled=!self.is_loaded() || self.is_dhcp()\n\n value=self.fields.secondary_dns.get_value_str().to_owned()\n\n oninput=self.link.callback(|id: InputData| Msg::SecondaryDnsChanged(id.value))\n\n validate_on_initial_render=true\n\n auto_validate=true\n\n helper={ self.fields.secondary_dns.get_error_str() }\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n </>\n\n }\n\n }\n\n}\n", "file_path": "src/components/wifi.rs", "rank": 84, "score": 20108.944440797695 }, { "content": " <MatTextField\n\n outlined=true\n\n label=\"Subnet/Gateway\"\n\n disabled=!self.is_loaded() || self.is_dhcp()\n\n value=self.fields.subnet.get_value_str().to_owned()\n\n oninput=self.link.callback(|id: InputData| Msg::SubnetChanged(id.value))\n\n validate_on_initial_render=true\n\n auto_validate=true\n\n helper={ self.fields.subnet.get_error_str() }\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n\n\n // IP (STA only)\n\n <Chunk visible={sta && ap}>\n\n <Cell span=aspan/>\n\n </Chunk>\n\n <Chunk visible=sta>\n\n <Cell span=lspan/>\n", "file_path": "src/components/wifi.rs", "rank": 85, "score": 20108.689796933886 }, { "content": " let api = Self::create_api(self.props.api_endpoint.as_ref());\n\n\n\n self.status.0.borrow_mut().loading();\n\n self.link\n\n .send_future(async move { Msg::GotStatus(api.get_status().await) });\n\n\n\n true\n\n }\n\n Msg::GotStatus(result) => {\n\n self.status.0.borrow_mut().loaded_result(result);\n\n self.fields.load();\n\n self.ap_fields.load();\n\n true\n\n }\n\n Msg::GetAccessPoints => {\n\n let mut api = Self::create_api(self.props.api_endpoint.as_ref());\n\n\n\n self.aps.loading();\n\n self.link\n\n .send_future(async move { Msg::GotAccessPoints(api.scan().await) });\n", "file_path": "src/components/wifi.rs", "rank": 86, "score": 20108.6400173483 }, { "content": " |conf| &conf.ssid,\n\n |conf| &mut conf.ssid,\n\n );\n\n\n\n self.conf.bind_model_wifi_ap(\n\n &mut self.ap_fields.auth_method,\n\n |conf| &conf.auth_method,\n\n |conf| &mut conf.auth_method,\n\n );\n\n\n\n self.conf.bind_model_wifi_ap(\n\n &mut self.ap_fields.password,\n\n |conf| &conf.password,\n\n |conf| &mut conf.password,\n\n );\n\n\n\n let password_confirmed_g = self.password_confirmed.clone();\n\n let password_confirmed_u = self.password_confirmed.clone();\n\n\n\n self.ap_fields.password_confirmed.bind(\n", "file_path": "src/components/wifi.rs", "rank": 87, "score": 20108.595699344798 }, { "content": " <Cell span=mspan style=\"text-align: center;\">\n\n <MatTextField\n\n outlined=true\n\n label=\"IP\"\n\n disabled=!self.is_loaded() || self.is_dhcp()\n\n value=self.fields.ip.get_value_str().to_owned()\n\n oninput=self.link.callback(|id: InputData| Msg::IpChanged(id.value))\n\n validate_on_initial_render=true\n\n auto_validate=true\n\n helper={ self.fields.ip.get_error_str() }\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n\n\n // DNS\n\n <Chunk visible=ap>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <MatTextField\n", "file_path": "src/components/wifi.rs", "rank": 88, "score": 20108.585754982316 }, { "content": " <Cell span=rspan/>\n\n </Chunk>\n\n <Chunk visible=sta>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n { self.status.0.borrow().client_status_str() }\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n\n\n // SSID\n\n <Chunk visible=ap>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <MatTextField\n\n outlined=true\n\n label=\"SSID\"\n\n disabled=!self.is_loaded()\n\n value=self.ap_fields.ssid.get_value_str().to_owned()\n\n oninput=self.link.callback(|id: InputData| Msg::ApSSIDChanged(id.value))\n", "file_path": "src/components/wifi.rs", "rank": 89, "score": 20108.585754982316 }, { "content": " // Subnet\n\n <Chunk visible=ap>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <MatTextField\n\n outlined=true\n\n label=\"Subnet/Gateway\"\n\n disabled=!self.is_loaded()\n\n value=self.ap_fields.subnet.get_value_str().to_owned()\n\n oninput=self.link.callback(|id: InputData| Msg::ApSubnetChanged(id.value))\n\n validate_on_initial_render=true\n\n auto_validate=true\n\n helper={ self.ap_fields.subnet.get_error_str() }\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n <Chunk visible=sta>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n", "file_path": "src/components/wifi.rs", "rank": 90, "score": 20108.502892894285 }, { "content": "\n\n // Password\n\n <Chunk visible={ap && Some(wifi::AuthMethod::None) != self.ap_fields.auth_method.get_value()}>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <MatTextField\n\n outlined=true\n\n label={\n\n if Some(wifi::AuthMethod::WEP) == self.ap_fields.auth_method.get_value() {\n\n \"Key\"\n\n } else {\n\n \"Password\"\n\n }\n\n }\n\n disabled=!self.is_loaded()\n\n value=self.ap_fields.password.get_value_str().to_owned()\n\n oninput=self.link.callback(|id: InputData| Msg::ApPasswordChanged(id.value))\n\n validate_on_initial_render=true\n\n auto_validate=true\n\n helper={ self.ap_fields.password.get_error_str() }\n", "file_path": "src/components/wifi.rs", "rank": 91, "score": 20108.476463178868 }, { "content": " oninput=self.link.callback(|id: InputData| Msg::DnsChanged(id.value))\n\n validate_on_initial_render=true\n\n auto_validate=true\n\n helper={ self.fields.dns.get_error_str() }\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n\n\n // Secondary DNS\n\n <Chunk visible=ap>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <MatTextField\n\n outlined=true\n\n label=\"Secondary DNS\"\n\n disabled=!self.is_loaded()\n\n value=self.ap_fields.secondary_dns.get_value_str().to_owned()\n\n oninput=self.link.callback(|id: InputData| Msg::ApSecondaryDnsChanged(id.value))\n\n validate_on_initial_render=true\n", "file_path": "src/components/wifi.rs", "rank": 92, "score": 20108.45059719976 }, { "content": " disabled=!self.is_loaded()\n\n value=self.ap_fields.password_confirmed.get_value_str().to_owned()\n\n oninput=self.link.callback(|id: InputData| Msg::ApPasswordConfirmedChanged(id.value))\n\n validate_on_initial_render=true\n\n auto_validate=true\n\n helper={ self.ap_fields.password_confirmed.get_error_str() }\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n <Chunk visible={sta && ap && Some(wifi::AuthMethod::None) != self.ap_fields.auth_method.get_value()}>\n\n <Cell span=aspan/>\n\n </Chunk>\n\n\n\n // DHCP\n\n <Chunk visible=ap>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <span>{\"DHCP Server\"}</span>\n\n <MatSwitch\n", "file_path": "src/components/wifi.rs", "rank": 93, "score": 20108.42527711085 }, { "content": "}\n\n\n\nimpl WiFi {\n\n fn view_access_points(&self) -> Html {\n\n html! {\n\n <>\n\n <MatTopAppBar>\n\n <MatTopAppBarNavigationIcon>\n\n <span onclick=self.link.callback(move |_| Msg::ShowConfiguration(None))><MatIconButton icon=\"close\"/></span>\n\n </MatTopAppBarNavigationIcon>\n\n\n\n <div slot=\"title\">{\"Select WiFi Network\"}</div>\n\n <MatTopAppBarActionItems>\n\n <span onclick=self.link.callback(move |_| Msg::GetAccessPoints)><MatIconButton icon=\"refresh\"/></span>\n\n </MatTopAppBarActionItems>\n\n </MatTopAppBar>\n\n\n\n <CenteredGrid>\n\n <MatLinearProgress closed=!self.aps.is_loading()/>\n\n <MatList>\n", "file_path": "src/components/wifi.rs", "rank": 94, "score": 20108.34061262176 }, { "content": " false\n\n }\n\n\n\n fn change(&mut self, props: Self::Properties) -> ShouldRender {\n\n self.props = props;\n\n true\n\n }\n\n\n\n fn view(&self) -> Html {\n\n let classes = once(\"mdc-layout-grid__cell\".into())\n\n .chain(\n\n [\n\n (\"\", self.props.span),\n\n (\"-desktop\", self.props.span_desktop),\n\n (\"-tablet\", self.props.span_tablet),\n\n (\"-phone\", self.props.span_phone),\n\n ]\n\n .iter()\n\n .filter_map(|(prefix, value)| {\n\n value.map(|span| format!(\"mdc-layout-grid__cell--span-{}{}\", span, prefix))\n", "file_path": "src/components/common.rs", "rank": 95, "score": 20108.108819385892 }, { "content": " Msg::GetConfiguration => {\n\n let api = Self::create_api(self.props.api_endpoint.as_ref());\n\n\n\n self.conf.0.borrow_mut().loading();\n\n self.link.send_future(async move {\n\n Msg::GotConfiguration(api.get_configuration().await)\n\n });\n\n\n\n true\n\n }\n\n Msg::GotConfiguration(result) => {\n\n self.conf\n\n .0\n\n .borrow_mut()\n\n .loaded_result(result.map(|data| Editable::new(data)));\n\n self.fields.load();\n\n self.ap_fields.load();\n\n true\n\n }\n\n Msg::GetStatus => {\n", "file_path": "src/components/wifi.rs", "rank": 96, "score": 20108.06583034756 }, { "content": " true\n\n }\n\n Msg::ApAuthMethodChanged(value) => {\n\n self.ap_fields.auth_method.update(value.to_string());\n\n true\n\n }\n\n Msg::ApPasswordChanged(value) => {\n\n self.ap_fields.password.update(value);\n\n true\n\n }\n\n Msg::ApPasswordConfirmedChanged(value) => {\n\n self.ap_fields.password_confirmed.update(value);\n\n true\n\n }\n\n Msg::ApSubnetChanged(value) => {\n\n self.ap_fields.subnet.update(value);\n\n true\n\n }\n\n Msg::ApDHCPEnabledChanged(value) => {\n\n self.conf\n", "file_path": "src/components/wifi.rs", "rank": 97, "score": 20107.989432484042 }, { "content": " auto_validate=true\n\n helper={ self.fields.password.get_error_str() }\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n\n\n // Confirm password (AP only)\n\n <Chunk visible={ap && Some(wifi::AuthMethod::None) != self.ap_fields.auth_method.get_value()}>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <MatTextField\n\n outlined=true\n\n label={\n\n if Some(wifi::AuthMethod::WEP) == self.ap_fields.auth_method.get_value() {\n\n \"Confirm Key\"\n\n } else {\n\n \"Confirm Password\"\n\n }\n\n }\n", "file_path": "src/components/wifi.rs", "rank": 98, "score": 20107.91393719053 }, { "content": " disabled=!self.is_loaded()\n\n onchange=self.link.callback(|state| Msg::ApDHCPEnabledChanged(state))\n\n checked=self.conf.0.borrow().ap_ip_conf().map(|i| i.dhcp_enabled).unwrap_or(false)\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n <Chunk visible=sta>\n\n <Cell span=lspan/>\n\n <Cell span=mspan style=\"text-align: center;\">\n\n <span>{\"Use DHCP\"}</span>\n\n <MatSwitch\n\n disabled=!self.is_loaded()\n\n onchange=self.link.callback(|state| Msg::DHCPChanged(state))\n\n checked=self.is_dhcp()\n\n />\n\n </Cell>\n\n <Cell span=rspan/>\n\n </Chunk>\n\n\n", "file_path": "src/components/wifi.rs", "rank": 99, "score": 20107.89189002701 } ]
Rust
src/didcomm/mod.rs
chriamue/identity-cloud-agent
810d4498b26547b28e474cdaf5d44c2481d32109
use crate::connection::{invitation::Invitation, Connections, Termination, TerminationResponse}; use crate::credential::{issue::Issuance, Credentials}; use crate::message::MessageRequest; use crate::ping::{PingRequest, PingResponse}; use crate::webhook::Webhook; use async_trait::async_trait; use reqwest::RequestBuilder; use rocket::State; use rocket::{post, serde::json::Json}; use rocket_okapi::openapi; use serde_json::{json, Value}; use uuid::Uuid; pub mod client; #[cfg(test)] pub mod test_client; pub use client::Client; #[async_trait] pub trait DidComm: Send + Sync { fn request(&self, endpoint: &str, body: &Value) -> RequestBuilder; async fn post(&self, endpoint: &str, body: &Value) -> Result<reqwest::Response, reqwest::Error>; } #[openapi(tag = "didcomm")] #[post("/", format = "application/json", data = "<data>")] pub async fn post_endpoint( webhook: &State<Box<dyn Webhook>>, connections: &State<Connections>, credentials: &State<Credentials>, data: Json<Value>, ) -> Json<Value> { match data["type"].as_str().unwrap() { "https://didcomm.org/out-of-band/2.0/invitation" => { let invitation: Invitation = serde_json::from_value(data.into_inner()).unwrap(); info!("invitation = {:?}", invitation.id); Json(json!({})) } "https://didcomm.org/trust-ping/2.0/ping" => { let ping_request: PingRequest = serde_json::from_value(data.into_inner()).unwrap(); let ping_response: PingResponse = PingResponse { type_: "https://didcomm.org/trust-ping/2.0/ping-response".to_string(), id: Uuid::new_v4().to_string(), thid: ping_request.id, }; Json(json!(ping_response)) } "iota/post/0.1/post" => { let message_request: MessageRequest = serde_json::from_value(data.into_inner()).unwrap(); info!("message: {:?}", message_request.payload); webhook .post("message", &message_request.payload) .await .unwrap(); Json(json!({})) } "iota/termination/0.1/termination" => { let termination: Termination = serde_json::from_value(data.into_inner()).unwrap(); let mut lock = connections.connections.lock().await; lock.remove(&termination.id).unwrap(); std::mem::drop(lock); let termination: TerminationResponse = TerminationResponse { typ: "application/didcomm-plain+json".to_string(), type_: "iota/termination/0.1/termination-response".to_string(), id: termination.id, body: Value::default(), }; Json(json!(termination)) } "iota/issuance/0.1/issuance" => { let issuance: Issuance = serde_json::from_value(data.into_inner()).unwrap(); let credential = issuance.signed_credential; info!("issuance: {:?}", credential); let mut lock = credentials.credentials.lock().await; lock.insert(credential.id.clone().unwrap().to_string(), credential); Json(json!({})) } _ => Json(json!({})), } } #[cfg(test)] mod tests { use crate::connection::Connection; use crate::ping::{PingRequest, PingResponse}; use crate::test_rocket; use rocket::http::{ContentType, Status}; use rocket::local::blocking::Client; use serde_json::{from_value, json, Value}; #[test] fn test_send_ping() { let client = Client::tracked(test_rocket()).expect("valid rocket instance"); let response = client.get("/connections").dispatch(); assert_eq!(response.status(), Status::Ok); let response = response.into_json::<Value>().unwrap(); let connections = response.as_array().unwrap(); assert_eq!(connections.len(), 0); let response = client.post("/out-of-band/create-invitation").dispatch(); assert_eq!(response.status(), Status::Ok); let invitation: Value = response.into_json::<Value>().unwrap(); let invitation: String = serde_json::to_string(&invitation).unwrap(); let response = client .post("/out-of-band/receive-invitation") .header(ContentType::JSON) .body(invitation) .dispatch(); assert_eq!(response.status(), Status::Ok); let response = client.get("/connections").dispatch(); assert_eq!(response.status(), Status::Ok); let response = response.into_json::<Value>().unwrap(); let _connections: Vec<Connection> = from_value(response).unwrap(); let body: Value = json!( { "response_requested": true }); let ping_request: PingRequest = PingRequest { type_: "https://didcomm.org/trust-ping/2.0/ping".to_string(), id: "foo".to_string(), from: "bar".to_string(), body, }; let ping_request: String = serde_json::to_string(&ping_request).unwrap(); let response = client .post("/") .header(ContentType::JSON) .body(ping_request) .dispatch(); assert_eq!(response.status(), Status::Ok); let response = response.into_json::<PingResponse>().unwrap(); assert_eq!(response.thid, "foo".to_string()); } }
use crate::connection::{invitation::Invitation, Connections, Termination, TerminationResponse}; use crate::credential::{issue::Issuance, Credentials}; use crate::message::MessageRequest; use crate::ping::{PingRequest, PingResponse}; use crate::webhook::Webhook; use async_trait::async_trait; use reqwest::RequestBuilder; use rocket::State; use rocket::{post, serde::json::Json}; use rocket_okapi::openapi; use serde_json::{json, Value}; use uuid::Uuid; pub mod client; #[cfg(test)] pub mod test_client; pub use client::Client; #[async_trait] pub trait DidComm: Send + Sync { fn request(&self, endpoint: &str, body: &Value) -> RequestBuilder; async fn post(&self, endpoint: &str, body: &Value) -> Result<reqwest::Response, reqwest::Error>; } #[openapi(tag = "didcomm")] #[post("/", format = "application/json", data = "<data>")] pub async fn post_endpoint( webhook: &State<Box<dyn Webhook>>, connections: &State<Connections>, credentials: &State<Credentials>, data: Json<Value>, ) -> Json<Value> { match data["type"].as_str().unwrap() { "https://didcomm.org/out-of-band/2.0/invitation" => { let invitation: Invitation = serde_json::from_value(data.into_inner()).unwrap(); info!("invitation = {:?}", invitation.id); Json(json!({})) } "https://didcomm.org/trust-ping/2.0/ping" => { let ping_request: PingRequest = serde_json::from_value(data.into_inner()).unwrap(); let ping_response: PingResponse = PingResponse { type_: "https://didcomm.org/trust-ping/2.0/ping-response".to_string(), id: Uuid::new_v4().to_string(), thid: ping_request.id, }; Json(json!(ping_response)) } "iota/post/0.1/post" => { let message_request: MessageRequest = serde_json::from_value(data.into_inner()).unwrap(); info!("message: {:?}", message_request.payload); webhook .post("message", &message_request.payload) .await .unwrap(); Json(json!({})) } "iota/termination/0.1/termination" => { let termination: Termination = serde_json::from_value(data.into_inner()).unwrap(); let mut lock = connections.connections.lock().await; lock.remove(&termination.id).unwrap(); std::mem::drop(lock); let termination: TerminationResponse = TerminationResponse { typ: "application/didcomm-plain+json".to_string(), type_: "iota/termination/0.1/termination-response".to_string(), id: termination.id, body: Value::default(), }; Json(json!(termination)) } "iota/issuance/0.1/issuance" => { let issuance: Issuance = serde_json::from_value(data.into_inner()).unwrap(); let credential = issuance.signed_credential; info!("issuance: {:?}", credential); let mut lock = credentials.credentials.lock().await; lock.insert(credential.id.clone().unwrap().to_string(), credential); Json(json!({})) } _ => Json(json!({})), } } #[cfg(test)] mod tests { use crate::connection::Connection; use crate::ping::{PingRequest, PingResponse}; use crate::test_rocket; use rocket::http::{ContentType, Status}; use rocket::local::blocking::Client; use serde_json::{from_value, json, Value}; #[test] fn test_send_ping() { let client = Client::tracked(test_rocket()).expect("valid rocket instance"); let response = client.get("/connections").dispatch(); assert_eq!(response.status(), Status::Ok); let response = response.into_json::<Value>().unwrap(); let connections = response.as_array().unwrap(); assert_eq!(connections.len(), 0); let response = client.post("/out-of-band/create-invitation").dispatch(); assert_eq!(response.status(), Status::Ok); let invitation: Value = response.into_json::<Value>().unwrap(); let invitation: String = serde_json::to_string(&invitation).unwrap(); let response = client .post("/out-of-band/receive-invitation") .header(ContentType::JSON) .body(invitation) .dispatch(); assert_eq!(response.status(), Status::Ok); let response = client.get("/connections").dispatch(); assert_eq!(response.status(), Status::Ok); let response = response.into_json::<Value>().unwrap(); let _connections: Vec<Connection> = from_value(response).unwrap(); let body: Value = json!( { "response_requested": true });
}
let ping_request: PingRequest = PingRequest { type_: "https://didcomm.org/trust-ping/2.0/ping".to_string(), id: "foo".to_string(), from: "bar".to_string(), body, }; let ping_request: String = serde_json::to_string(&ping_request).unwrap(); let response = client .post("/") .header(ContentType::JSON) .body(ping_request) .dispatch(); assert_eq!(response.status(), Status::Ok); let response = response.into_json::<PingResponse>().unwrap(); assert_eq!(response.thid, "foo".to_string()); }
function_block-function_prefix_line
[ { "content": "#[async_trait]\n\npub trait Webhook: Send + Sync {\n\n fn request(&self, topic: &str, body: &Value) -> RequestBuilder;\n\n async fn post(&self, topic: &str, body: &Value) -> Result<reqwest::Response, reqwest::Error>;\n\n}\n", "file_path": "src/webhook/mod.rs", "rank": 0, "score": 189896.72663041885 }, { "content": "pub fn build_issue_vc_invitation(endpoint: String) -> Invitation {\n\n let body: Value = json!({\n\n \"goal_code\": \"issue-vc\",\n\n \"goal\": \"To issue a credential\",\n\n \"accept\": [\n\n \"didcomm/v2\"\n\n ]\n\n });\n\n\n\n let json: Value = json!({\n\n \"service\": {\n\n \"serviceEndpoint\": endpoint\n\n }\n\n });\n\n\n\n let attachment: Attachment = serde_json::from_value(json!(\n\n {\n\n \"@id\": \"request-0\",\n\n \"mime-type\": \"application/json\",\n\n \"data\": {\n", "file_path": "src/connection/invitation.rs", "rank": 2, "score": 155098.2134728755 }, { "content": "#[openapi(tag = \"wallet\")]\n\n#[get(\"/wallet/get-did-endpoint?<did>\")]\n\npub fn get_did_endpoint(did: String) -> Json<String> {\n\n let client: ClientMap = ClientMap::new();\n\n let input: InputMetadata = Default::default();\n\n\n\n let runtime = tokio::runtime::Runtime::new().unwrap();\n\n\n\n let output = thread::spawn(move || {\n\n let out = runtime.block_on(resolution::resolve(did.as_str(), input, &client));\n\n out\n\n })\n\n .join()\n\n .expect(\"Thread panicked\")\n\n .unwrap();\n\n\n\n let document = output.document.unwrap();\n\n let services = document.service();\n\n let service = services.get(0).unwrap();\n\n let endpoint = service.service_endpoint().to_string();\n\n let endpoint = endpoint.replace(\"\\\"\", \"\");\n\n Json(endpoint)\n", "file_path": "src/wallet.rs", "rank": 3, "score": 147301.73467630945 }, { "content": "#[openapi(tag = \"ledger\")]\n\n#[get(\"/ledger/did-endpoint?<did>\")]\n\npub fn get_did_endpoint(did: String) -> Json<EndpointResponse> {\n\n let client: ClientMap = ClientMap::new();\n\n let input: InputMetadata = Default::default();\n\n\n\n let runtime = tokio::runtime::Runtime::new().unwrap();\n\n\n\n let output = thread::spawn(move || {\n\n let out = runtime.block_on(resolution::resolve(did.as_str(), input, &client));\n\n out\n\n })\n\n .join()\n\n .expect(\"Thread panicked\")\n\n .unwrap();\n\n\n\n let document = output.document.unwrap();\n\n let services = document.service();\n\n let service = services.get(0).unwrap();\n\n Json(EndpointResponse {\n\n endpoint: service.service_endpoint().to_string(),\n\n })\n", "file_path": "src/ledger.rs", "rank": 4, "score": 137712.1762065666 }, { "content": "#[openapi(tag = \"resolver\")]\n\n#[get(\"/resolver/resolve/<did>\")]\n\npub fn get_resolve(did: String) -> Json<String> {\n\n let iota_did: IotaDID = IotaDID::try_from(did).unwrap();\n\n let explorer: &ExplorerUrl = ExplorerUrl::mainnet();\n\n Json(explorer.resolver_url(&iota_did).unwrap().to_string())\n\n}\n", "file_path": "src/resolver.rs", "rank": 5, "score": 128320.93983220178 }, { "content": "#[launch]\n\npub fn rocket() -> _ {\n\n let rocket = rocket::build();\n\n let figment = rocket.figment();\n\n let config: Config = figment.extract().expect(\"config\");\n\n\n\n let webhook =\n\n Box::new(webhook::Client::new(config.webhook_url.to_string())) as Box<dyn webhook::Webhook>;\n\n let didcomm = Box::new(didcomm::Client::new()) as Box<dyn didcomm::DidComm>;\n\n\n\n identity_cloud_agent::rocket(rocket, config, webhook, didcomm)\n\n}\n", "file_path": "src/bin.rs", "rank": 6, "score": 122772.0925003287 }, { "content": "#[cfg(test)]\n\npub fn test_rocket() -> Rocket<Build> {\n\n let rocket = rocket::build();\n\n let figment = rocket.figment();\n\n let config: Config = figment.extract().expect(\"config\");\n\n\n\n let webhook = Box::new(webhook::test_client::TestClient::new(\n\n config.webhook_url.to_string(),\n\n )) as Box<dyn webhook::Webhook>;\n\n let didcomm = Box::new(didcomm::test_client::TestClient::new()) as Box<dyn didcomm::DidComm>;\n\n self::rocket(rocket, config, webhook, didcomm)\n\n}\n", "file_path": "src/lib.rs", "rank": 7, "score": 121625.97620066845 }, { "content": "fn example_connection_id() -> &'static str {\n\n \"2fecc993-b92c-4152-8c81-35adde124382\"\n\n}\n\n\n", "file_path": "src/credential/issue.rs", "rank": 8, "score": 119175.23629798344 }, { "content": "pub fn rocket(\n\n rocket: Rocket<Build>,\n\n config: Config,\n\n webhook: Box<dyn webhook::Webhook>,\n\n didcomm: Box<dyn didcomm::DidComm>,\n\n) -> Rocket<Build> {\n\n let connections: Connections = Connections::default();\n\n let credentials: Credentials = Credentials::default();\n\n let schemas: Schemas = Schemas::default();\n\n\n\n let runtime = tokio::runtime::Runtime::new().unwrap();\n\n\n\n let stronghold_path = config.stronghold_path.to_string();\n\n let password = config.password.to_string();\n\n let endpoint = config.endpoint.to_string();\n\n let did = config.did.to_string();\n\n\n\n let wallet = thread::spawn(move || {\n\n let wallet = runtime.block_on(Wallet::load(\n\n stronghold_path.into(),\n", "file_path": "src/lib.rs", "rank": 9, "score": 112845.74001093186 }, { "content": "fn example_credential_id() -> &'static str {\n\n \"https://example.edu/credentials/3732\"\n\n}\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct ProofRequest {\n\n #[schemars(example = \"example_connection_id\")]\n\n pub connection_id: String,\n\n #[schemars(example = \"example_credential_id\")]\n\n pub credential_id: String,\n\n}\n\n\n\n#[openapi(tag = \"present-proof\")]\n\n#[post(\"/present-proof/send-proposal\", data = \"<proof_request>\")]\n\npub async fn post_send_proposal(\n\n wallet: &State<Wallet>,\n\n credentials: &State<Credentials>,\n\n proof_request: Json<ProofRequest>,\n\n) -> Json<Value> {\n\n let account = wallet.account.lock().await;\n", "file_path": "src/presentation/proposal.rs", "rank": 10, "score": 100938.56136004854 }, { "content": "fn example_connection_id() -> &'static str {\n\n \"2fecc993-b92c-4152-8c81-35adde124382\"\n\n}\n\n\n", "file_path": "src/presentation/proposal.rs", "rank": 11, "score": 100578.15462263019 }, { "content": "#[openapi(tag = \"server\")]\n\n#[get(\"/server/live\")]\n\npub fn get_live() -> Json<LiveResponse> {\n\n Json(LiveResponse { alive: true })\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 12, "score": 91176.86736904911 }, { "content": "#[openapi(tag = \"server\")]\n\n#[get(\"/server/ready\")]\n\npub fn get_ready() -> Json<ReadyResponse> {\n\n Json(ReadyResponse { ready: true })\n\n}\n", "file_path": "src/server.rs", "rank": 13, "score": 91176.86736904911 }, { "content": "fn example_type() -> &'static str {\n\n \"UniversityDegreeCredential\"\n\n}\n\n\n", "file_path": "src/credential/issue.rs", "rank": 14, "score": 84627.18880667098 }, { "content": "fn example_schema() -> &'static str {\n\n include_str!(\"../assets/degree_schema.json\")\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, JsonSchema)]\n\n#[schemars(example = \"example_schema\")]\n\npub struct SchemaRequest(Value);\n\n\n\n#[openapi(tag = \"schema\")]\n\n#[post(\"/schemas\", data = \"<schema_request>\")]\n\npub async fn post_schemas(\n\n schemas: &State<Schemas>,\n\n schema_request: Json<SchemaRequest>,\n\n) -> Json<SchemaRequest> {\n\n let schema = schema_request.clone();\n\n let schema = serde_json::to_string(&schema).unwrap();\n\n let schema: Schema = Schema::from_json(&schema).unwrap();\n\n let mut schemas = schemas.schemas.lock().await;\n\n schemas.insert(schema.id.to_string(), schema);\n\n schema_request\n", "file_path": "src/schema/mod.rs", "rank": 15, "score": 83582.4938136542 }, { "content": "fn example_attributes() -> Value {\n\n json!({\n\n \"degree\": {\n\n \"type\": \"BachelorDegree\",\n\n \"name\": \"Bachelor of Science and Arts\"\n\n }\n\n })\n\n}\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct IssueRequest {\n\n #[serde(rename = \"type\")]\n\n #[schemars(example = \"example_type\")]\n\n pub type_: String,\n\n #[schemars(example = \"example_connection_id\")]\n\n pub connection_id: String,\n\n #[schemars(example = \"example_attributes\")]\n\n pub attributes: Value,\n\n}\n\n\n", "file_path": "src/credential/issue.rs", "rank": 16, "score": 75051.54389865395 }, { "content": " pub fn response(&mut self, response: Value) {\n\n self.response = response;\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Webhook for TestClient {\n\n fn request(&self, topic: &str, body: &Value) -> RequestBuilder {\n\n let client = reqwest::Client::new();\n\n client\n\n .post(format!(\"{}/topic/{}\", self.url, topic))\n\n .json(body)\n\n }\n\n async fn post(&self, topic: &str, _body: &Value) -> Result<reqwest::Response, reqwest::Error> {\n\n let url = Url::parse(&format!(\"{}/topic/{}\", self.url, topic)).unwrap();\n\n let response = Builder::new()\n\n .status(200)\n\n .url(url.clone())\n\n .body(serde_json::to_string(&self.response).unwrap())\n\n .unwrap();\n", "file_path": "src/webhook/test_client.rs", "rank": 17, "score": 68736.83696458262 }, { "content": " let response = Response::from(response);\n\n Ok(response)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde_json::json;\n\n\n\n #[test]\n\n fn test_topic() {\n\n let response = json!({ \"hello\": \"world!\"});\n\n let mut client = TestClient::new(\"http://example.com\".to_string());\n\n client.response(response);\n\n\n\n let post = client.request(\"foo\", &json!({}));\n\n assert_eq!(\n\n post.build().unwrap().url().as_str(),\n\n \"http://example.com/topic/foo\"\n\n );\n\n }\n\n}\n", "file_path": "src/webhook/test_client.rs", "rank": 18, "score": 68730.29543338892 }, { "content": "use super::Webhook;\n\nuse http::response::Builder;\n\nuse reqwest::RequestBuilder;\n\nuse reqwest::{Response, ResponseBuilderExt};\n\nuse serde_json::Value;\n\nuse url::Url;\n\n\n\npub struct TestClient {\n\n url: String,\n\n response: Value,\n\n}\n\n\n\nimpl TestClient {\n\n pub fn new(url: String) -> Self {\n\n TestClient {\n\n url,\n\n response: Value::default(),\n\n }\n\n }\n\n\n", "file_path": "src/webhook/test_client.rs", "rank": 19, "score": 68728.33838979487 }, { "content": " }\n\n}\n\n\n\n#[async_trait]\n\nimpl DidComm for TestClient {\n\n fn request(&self, endpoint: &str, body: &Value) -> RequestBuilder {\n\n let client = reqwest::Client::new();\n\n client.post(format!(\"{}/\", endpoint)).json(body)\n\n }\n\n async fn post(\n\n &self,\n\n endpoint: &str,\n\n _body: &Value,\n\n ) -> Result<reqwest::Response, reqwest::Error> {\n\n let url = Url::parse(&format!(\"{}/\", endpoint)).unwrap();\n\n let response = Builder::new()\n\n .status(200)\n\n .url(url.clone())\n\n .body(serde_json::to_string(&self.response).unwrap())\n\n .unwrap();\n", "file_path": "src/didcomm/test_client.rs", "rank": 20, "score": 68436.01616882425 }, { "content": " let response = Response::from(response);\n\n Ok(response)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde_json::json;\n\n\n\n #[test]\n\n fn test_topic() {\n\n let response = json!({ \"hello\": \"world!\"});\n\n let mut client = TestClient::new();\n\n client.response(response);\n\n\n\n let post = client.request(\"http://example.com\", &json!({}));\n\n assert_eq!(post.build().unwrap().url().as_str(), \"http://example.com/\");\n\n }\n\n}\n", "file_path": "src/didcomm/test_client.rs", "rank": 21, "score": 68431.8952784053 }, { "content": "use super::DidComm;\n\nuse http::response::Builder;\n\nuse reqwest::RequestBuilder;\n\nuse reqwest::{Response, ResponseBuilderExt};\n\nuse serde_json::Value;\n\nuse url::Url;\n\n\n\npub struct TestClient {\n\n response: Value,\n\n}\n\n\n\nimpl TestClient {\n\n pub fn new() -> Self {\n\n TestClient {\n\n response: Value::default(),\n\n }\n\n }\n\n\n\n pub fn response(&mut self, response: Value) {\n\n self.response = response;\n", "file_path": "src/didcomm/test_client.rs", "rank": 22, "score": 68429.0823532438 }, { "content": " .json(body)\n\n }\n\n\n\n async fn post(&self, topic: &str, body: &Value) -> Result<reqwest::Response, reqwest::Error> {\n\n self.request(topic, body).send().await\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde_json::json;\n\n\n\n #[test]\n\n fn test_topic() {\n\n let client = Client::new(\"http://example.com\".to_string());\n\n let post = client.request(\"foo\", &json!({}));\n\n assert_eq!(\n\n post.build().unwrap().url().as_str(),\n\n \"http://example.com/topic/foo\"\n\n );\n\n }\n\n}\n", "file_path": "src/webhook/client.rs", "rank": 23, "score": 48518.60676751429 }, { "content": "use super::Webhook;\n\nuse reqwest::RequestBuilder;\n\nuse serde_json::Value;\n\n\n\npub struct Client {\n\n url: String,\n\n}\n\n\n\nimpl Client {\n\n pub fn new(url: String) -> Self {\n\n Client { url }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Webhook for Client {\n\n fn request(&self, topic: &str, body: &Value) -> RequestBuilder {\n\n let client = reqwest::Client::new();\n\n client\n\n .post(format!(\"{}/topic/{}\", self.url, topic))\n", "file_path": "src/webhook/client.rs", "rank": 24, "score": 48515.44077028747 }, { "content": " fn request(&self, endpoint: &str, body: &Value) -> RequestBuilder {\n\n let client = reqwest::Client::new();\n\n client.post(format!(\"{}/\", endpoint)).json(body)\n\n }\n\n\n\n async fn post(\n\n &self,\n\n endpoint: &str,\n\n body: &Value,\n\n ) -> Result<reqwest::Response, reqwest::Error> {\n\n self.request(endpoint, body).send().await\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde_json::json;\n\n\n\n #[test]\n\n fn test_topic() {\n\n let client = Client::new();\n\n let post = client.request(\"http://example.com\", &json!({}));\n\n assert_eq!(post.build().unwrap().url().as_str(), \"http://example.com/\");\n\n }\n\n}\n", "file_path": "src/didcomm/client.rs", "rank": 25, "score": 48202.549500861 }, { "content": "use super::DidComm;\n\nuse reqwest::RequestBuilder;\n\nuse serde_json::Value;\n\n\n\npub struct Client {}\n\n\n\nimpl Client {\n\n pub fn new() -> Self {\n\n Client {}\n\n }\n\n}\n\n\n\nimpl Default for Client {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl DidComm for Client {\n", "file_path": "src/didcomm/client.rs", "rank": 26, "score": 48191.223141463764 }, { "content": " \"json\": json\n\n }\n\n }\n\n ))\n\n .unwrap();\n\n\n\n let invitation: Invitation = Invitation {\n\n typ: \"application/didcomm-plain+json\".to_string(),\n\n type_: \"https://didcomm.org/out-of-band/2.0/invitation\".to_string(),\n\n id: Uuid::new_v4().to_string(),\n\n body,\n\n attachments: vec![attachment],\n\n };\n\n invitation\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/connection/invitation.rs", "rank": 27, "score": 48167.422856084086 }, { "content": "use rocket_okapi::okapi::schemars;\n\nuse rocket_okapi::okapi::schemars::JsonSchema;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::{json, Value};\n\nuse uuid::Uuid;\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct Attachment {\n\n #[serde(rename = \"@id\")]\n\n pub id: String,\n\n #[serde(rename = \"mime-type\")]\n\n pub mime_type: String,\n\n pub data: Value,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct Invitation {\n\n pub typ: String,\n\n #[serde(rename = \"type\")]\n\n pub type_: String,\n\n pub id: String,\n\n pub body: Value,\n\n pub attachments: Vec<Attachment>,\n\n}\n\n\n", "file_path": "src/connection/invitation.rs", "rank": 28, "score": 48165.42939361561 }, { "content": " #[test]\n\n fn test_build_issue_vc_invitation() {\n\n let endpoint = \"https://example.com\";\n\n let invitation = build_issue_vc_invitation(endpoint.to_string());\n\n assert_eq!(invitation.body[\"goal_code\"].as_str().unwrap(), \"issue-vc\");\n\n assert_eq!(\n\n invitation.attachments[0].data[\"json\"][\"service\"][\"serviceEndpoint\"]\n\n .as_str()\n\n .unwrap(),\n\n endpoint\n\n )\n\n }\n\n}\n", "file_path": "src/connection/invitation.rs", "rank": 29, "score": 48161.593564407034 }, { "content": "use async_trait::async_trait;\n\nuse reqwest::RequestBuilder;\n\nuse serde_json::Value;\n\n\n\npub mod client;\n\n\n\n#[cfg(test)]\n\npub mod test_client;\n\n\n\npub use client::Client;\n\n\n\n#[async_trait]\n", "file_path": "src/webhook/mod.rs", "rank": 30, "score": 47159.67991595856 }, { "content": " let credentials: Vec<Credential> = lock.values().cloned().collect();\n\n let credentials = serde_json::to_value(credentials).unwrap();\n\n Json(credentials)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::test_rocket;\n\n use rocket::http::Status;\n\n use rocket::local::blocking::Client;\n\n use serde_json::Value;\n\n\n\n #[test]\n\n fn test_credentials() {\n\n let client = Client::tracked(test_rocket()).expect(\"valid rocket instance\");\n\n let response = client.get(\"/credentials\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let response = response.into_json::<Value>().unwrap();\n\n let connections = response.as_array().unwrap();\n\n assert_eq!(connections.len(), 0);\n\n }\n\n}\n", "file_path": "src/credential/mod.rs", "rank": 31, "score": 46913.62134278607 }, { "content": "use identity::credential::Credential;\n\nuse rocket::State;\n\nuse rocket::{get, serde::json::Json};\n\nuse rocket_okapi::openapi;\n\nuse serde_json::Value;\n\nuse std::collections::HashMap;\n\nuse std::sync::Arc;\n\nuse tokio::sync::Mutex;\n\n\n\npub mod issue;\n\n\n\n#[derive(Default)]\n\npub struct Credentials {\n\n pub credentials: Arc<Mutex<HashMap<String, Credential>>>,\n\n}\n\n\n\n#[openapi(tag = \"credentials\")]\n\n#[get(\"/credentials\")]\n\npub async fn get_all_credentials(credentials: &State<Credentials>) -> Json<Value> {\n\n let lock = credentials.credentials.lock().await;\n", "file_path": "src/credential/mod.rs", "rank": 32, "score": 46910.48418688338 }, { "content": " let connection = lock.get(&conn_id).unwrap().clone();\n\n std::mem::drop(lock);\n\n let endpoint = connection.endpoint.to_string();\n\n let termination: Termination = Termination {\n\n typ: \"application/didcomm-plain+json\".to_string(),\n\n type_: \"iota/termination/0.1/termination\".to_string(),\n\n id: connection.id.clone(),\n\n body: Value::default(),\n\n };\n\n match didcomm.post(&endpoint, &json!(termination)).await {\n\n Ok(_) => (),\n\n Err(err) => error!(\"{:?}\", err),\n\n };\n\n let mut lock = connections.connections.lock().await;\n\n lock.remove(&conn_id).unwrap();\n\n Status::Ok\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/connection/mod.rs", "rank": 39, "score": 46453.08746430067 }, { "content": " use super::*;\n\n use crate::test_rocket;\n\n use rocket::http::{ContentType, Status};\n\n use rocket::local::blocking::Client;\n\n use serde_json::Value;\n\n\n\n #[test]\n\n fn test_connections() {\n\n let client = Client::tracked(test_rocket()).expect(\"valid rocket instance\");\n\n let response = client.get(\"/connections\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let response = response.into_json::<Value>().unwrap();\n\n let connections = response.as_array().unwrap();\n\n assert_eq!(connections.len(), 0);\n\n\n\n let response = client.post(\"/out-of-band/create-invitation\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let invitation: Value = response.into_json::<Value>().unwrap();\n\n let invitation: String = serde_json::to_string(&invitation).unwrap();\n\n\n", "file_path": "src/connection/mod.rs", "rank": 40, "score": 46446.99157979672 }, { "content": " #[serde(rename = \"type\")]\n\n pub type_: String,\n\n pub id: String,\n\n pub body: Value,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct TerminationResponse {\n\n pub typ: String,\n\n #[serde(rename = \"type\")]\n\n pub type_: String,\n\n pub id: String,\n\n pub body: Value,\n\n}\n\n\n\n#[openapi(tag = \"out-of-band\")]\n\n#[post(\"/out-of-band/create-invitation\")]\n\npub async fn post_create_invitation(wallet: &State<Wallet>) -> Json<Invitation> {\n\n let lock = wallet.account.lock().await;\n\n let did: &IotaDID = lock.did();\n", "file_path": "src/connection/mod.rs", "rank": 41, "score": 46446.62451359945 }, { "content": " .to_string();\n\n\n\n let client = reqwest::Client::new();\n\n match client\n\n .post(endpoint.to_string())\n\n .json(&invitation)\n\n .send()\n\n .await\n\n {\n\n Ok(_) => (),\n\n Err(err) => error!(\"{:?}\", err),\n\n };\n\n let connection = Connection { id, endpoint };\n\n let mut lock = connections.connections.lock().await;\n\n lock.insert(connection.id.to_string(), connection);\n\n\n\n Json(invitation)\n\n}\n\n\n\n#[openapi(tag = \"connection\")]\n", "file_path": "src/connection/mod.rs", "rank": 42, "score": 46446.5587224405 }, { "content": " let endpoint = get_did_endpoint(did.to_string()).as_str().to_string();\n\n let invitation: Invitation = build_issue_vc_invitation(endpoint);\n\n Json(invitation)\n\n}\n\n\n\n#[openapi(tag = \"out-of-band\")]\n\n#[post(\n\n \"/out-of-band/receive-invitation\",\n\n format = \"application/json\",\n\n data = \"<invitation>\"\n\n)]\n\npub async fn post_receive_invitation(\n\n connections: &State<Connections>,\n\n invitation: Json<Invitation>,\n\n) -> Json<Invitation> {\n\n let invitation: Invitation = invitation.into_inner();\n\n let id = invitation.id.to_string();\n\n let endpoint: String = invitation.attachments[0].data[\"json\"][\"service\"][\"serviceEndpoint\"]\n\n .as_str()\n\n .unwrap()\n", "file_path": "src/connection/mod.rs", "rank": 43, "score": 46444.37983107705 }, { "content": " let response = client\n\n .post(\"/out-of-band/receive-invitation\")\n\n .header(ContentType::JSON)\n\n .body(invitation)\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n\n\n let response = client.get(\"/connections\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let response = response.into_json::<Value>().unwrap();\n\n let connections = response.as_array().unwrap();\n\n assert_eq!(connections.len(), 1);\n\n }\n\n\n\n #[test]\n\n fn test_termination() {\n\n let client = Client::tracked(test_rocket()).expect(\"valid rocket instance\");\n\n let response = client.get(\"/connections\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let response = response.into_json::<Value>().unwrap();\n", "file_path": "src/connection/mod.rs", "rank": 44, "score": 46444.2244817601 }, { "content": " conn_id: String,\n\n) -> Json<ConnectionEndpoints> {\n\n let lock = connections.connections.lock().await;\n\n let endpoint = config.endpoint.to_string();\n\n let connection = lock.get(&conn_id).unwrap().clone();\n\n let their_endpoint = connection.endpoint;\n\n Json(ConnectionEndpoints {\n\n my_endpoint: endpoint,\n\n their_endpoint,\n\n })\n\n}\n\n\n\n#[openapi(tag = \"connection\")]\n\n#[delete(\"/connections/<conn_id>\")]\n\npub async fn delete_connection(\n\n didcomm: &State<Box<dyn DidComm>>,\n\n connections: &State<Connections>,\n\n conn_id: String,\n\n) -> Status {\n\n let lock = connections.connections.lock().await;\n", "file_path": "src/connection/mod.rs", "rank": 45, "score": 46442.669483048616 }, { "content": "#[get(\"/connections\")]\n\npub async fn get_all_connections(connections: &State<Connections>) -> Json<Vec<Connection>> {\n\n let lock = connections.connections.lock().await;\n\n let connections = lock.values().cloned().collect();\n\n Json(connections)\n\n}\n\n\n\n#[openapi(tag = \"connection\")]\n\n#[get(\"/connections/<conn_id>\")]\n\npub async fn get_connection(connections: &State<Connections>, conn_id: String) -> Json<Connection> {\n\n let lock = connections.connections.lock().await;\n\n let connection = lock.get(&conn_id).unwrap().clone();\n\n Json(connection)\n\n}\n\n\n\n#[openapi(tag = \"connection\")]\n\n#[get(\"/connections/<conn_id>/endpoints\")]\n\npub async fn get_connection_endpoints(\n\n config: &State<Config>,\n\n connections: &State<Connections>,\n", "file_path": "src/connection/mod.rs", "rank": 46, "score": 46440.984114708415 }, { "content": " let connections = response.as_array().unwrap();\n\n assert_eq!(connections.len(), 0);\n\n\n\n let response = client.post(\"/out-of-band/create-invitation\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let invitation: Value = response.into_json::<Value>().unwrap();\n\n let invitation: String = serde_json::to_string(&invitation).unwrap();\n\n\n\n let response = client\n\n .post(\"/out-of-band/receive-invitation\")\n\n .header(ContentType::JSON)\n\n .body(invitation)\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n\n\n let response = client.get(\"/connections\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let response = response.into_json::<Value>().unwrap();\n\n let connections = response.as_array().unwrap();\n\n assert_eq!(connections.len(), 1);\n", "file_path": "src/connection/mod.rs", "rank": 47, "score": 46440.37959067888 }, { "content": "use crate::config::Config;\n\nuse crate::didcomm::DidComm;\n\nuse crate::wallet::get_did_endpoint;\n\nuse crate::wallet::Wallet;\n\nuse identity::iota::IotaDID;\n\nuse rocket::http::Status;\n\nuse rocket::State;\n\nuse rocket::{delete, get, post, serde::json::Json};\n\nuse rocket_okapi::okapi::schemars;\n\nuse rocket_okapi::okapi::schemars::JsonSchema;\n\nuse rocket_okapi::openapi;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::{json, Value};\n\nuse std::collections::HashMap;\n\nuse std::sync::Arc;\n\nuse tokio::sync::Mutex;\n\n\n\npub mod invitation;\n\nuse invitation::{build_issue_vc_invitation, Invitation};\n\n\n", "file_path": "src/connection/mod.rs", "rank": 48, "score": 46438.269718995354 }, { "content": "#[derive(Default, Debug, PartialEq, Serialize, Deserialize, Clone, JsonSchema)]\n\npub struct Connection {\n\n pub id: String,\n\n pub endpoint: String,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Connections {\n\n pub connections: Arc<Mutex<HashMap<String, Connection>>>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct ConnectionEndpoints {\n\n pub my_endpoint: String,\n\n pub their_endpoint: String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct Termination {\n\n pub typ: String,\n", "file_path": "src/connection/mod.rs", "rank": 49, "score": 46436.670610130575 }, { "content": "\n\n let connection: Connection = serde_json::from_value(connections[0].clone()).unwrap();\n\n let response = client\n\n .delete(format!(\"/connections/{}\", connection.id))\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n\n\n let response = client.get(\"/connections\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let response = response.into_json::<Value>().unwrap();\n\n let connections = response.as_array().unwrap();\n\n assert_eq!(connections.len(), 0);\n\n }\n\n}\n", "file_path": "src/connection/mod.rs", "rank": 50, "score": 46434.06512767341 }, { "content": "#[openapi(skip)]\n\n#[get(\"/\")]\n\nfn index() -> Redirect {\n\n Redirect::to(\"/swagger-ui\")\n\n}\n\n\n\nasync fn log_wallet(wallet: &Wallet) {\n\n let lock = wallet.account.lock().await;\n\n let iota_did: &IotaDID = lock.did();\n\n info!(\"Local Document from {} = {:#?}\", iota_did, lock.document());\n\n let explorer: &ExplorerUrl = ExplorerUrl::mainnet();\n\n println!(\n\n \"Explore the DID Document = {}\",\n\n explorer.resolver_url(iota_did).unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 51, "score": 35850.53762561946 }, { "content": "#[cfg(test)]\n\nmod client_test {\n\n use crate::test_rocket;\n\n use rocket::http::Status;\n\n use rocket::local::blocking::Client;\n\n\n\n #[test]\n\n fn hello_world() {\n\n let client = Client::tracked(test_rocket()).expect(\"valid rocket instance\");\n\n let response = client.get(\"/\").dispatch();\n\n assert_eq!(response.status(), Status::SeeOther);\n\n }\n\n}\n", "file_path": "src/tests.rs", "rank": 52, "score": 25818.338804986684 }, { "content": " let issue = Issuance {\n\n type_: \"iota/issuance/0.1/issuance\".to_string(),\n\n signed_credential: credential.clone(),\n\n };\n\n\n\n let client = reqwest::Client::new();\n\n let _res = client\n\n .post(connection.endpoint.to_string())\n\n .json(&issue)\n\n .send()\n\n .await\n\n .unwrap();\n\n Json(json!(credential))\n\n}\n", "file_path": "src/credential/issue.rs", "rank": 53, "score": 24112.855585312336 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\npub struct Issuance {\n\n #[serde(rename = \"type\")]\n\n pub type_: String,\n\n #[serde(rename = \"signedCredential\")]\n\n pub signed_credential: Credential,\n\n}\n\n\n\n#[openapi(tag = \"issue-credential\")]\n\n#[post(\"/issue-credential/send-offer\", data = \"<issue_request>\")]\n\npub async fn post_send_offer(\n\n wallet: &State<Wallet>,\n\n connections: &State<Connections>,\n\n issue_request: Json<IssueRequest>,\n\n) -> Json<Value> {\n\n let account = wallet.account.lock().await;\n\n let iota_did: &IotaDID = account.did();\n\n let did = iota_did.clone();\n\n std::mem::drop(account);\n\n\n", "file_path": "src/credential/issue.rs", "rank": 54, "score": 24112.799927520384 }, { "content": " let subject_key: KeyPair = KeyPair::new_ed25519().unwrap();\n\n let subject_did: IotaDID = IotaDID::new(subject_key.public().as_ref()).unwrap();\n\n\n\n let mut subject: Subject = Subject::from_json_value(issue_request.attributes).unwrap();\n\n subject.id = Some(Url::parse(subject_did.as_str()).unwrap());\n\n\n\n let mut credential: Credential = CredentialBuilder::default()\n\n .id(Url::parse(\"https://example.edu/credentials/3732\").unwrap())\n\n .issuer(Url::parse(document.id().as_str()).unwrap())\n\n .type_(issue_request.type_)\n\n .subject(subject)\n\n .build()\n\n .unwrap();\n\n\n\n let account = wallet.account.lock().await;\n\n account\n\n .sign(\"sign-0\", &mut credential, SignatureOptions::default())\n\n .await\n\n .unwrap();\n\n\n", "file_path": "src/credential/issue.rs", "rank": 55, "score": 24106.877079063597 }, { "content": "use crate::connection::Connections;\n\nuse crate::wallet::Wallet;\n\nuse identity::core::FromJson;\n\nuse identity::core::Url;\n\nuse identity::credential::Credential;\n\nuse identity::credential::CredentialBuilder;\n\nuse identity::credential::Subject;\n\nuse identity::crypto::SignatureOptions;\n\nuse identity::did::resolution;\n\nuse identity::did::resolution::InputMetadata;\n\nuse identity::did::DID;\n\nuse identity::iota::ClientMap;\n\nuse identity::iota::IotaDID;\n\nuse identity::prelude::KeyPair;\n\nuse rocket::State;\n\nuse rocket::{post, serde::json::Json};\n\nuse rocket_okapi::okapi::schemars;\n\nuse rocket_okapi::okapi::schemars::JsonSchema;\n\nuse rocket_okapi::openapi;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::{json, Value};\n\nuse std::thread;\n\n\n", "file_path": "src/credential/issue.rs", "rank": 56, "score": 24104.276959133116 }, { "content": " let client: ClientMap = ClientMap::new();\n\n let input: InputMetadata = Default::default();\n\n let runtime = tokio::runtime::Runtime::new().unwrap();\n\n\n\n let output = thread::spawn(move || {\n\n runtime.block_on(resolution::resolve(did.to_string(), input, &client))\n\n })\n\n .join()\n\n .expect(\"Thread panicked\")\n\n .unwrap();\n\n\n\n let document = output.document.unwrap();\n\n\n\n let issue_request = issue_request.into_inner();\n\n let conn_id = issue_request.connection_id;\n\n\n\n let connections = connections.connections.lock().await;\n\n let connection = connections.get(&conn_id).unwrap().clone();\n\n std::mem::drop(connections);\n\n\n", "file_path": "src/credential/issue.rs", "rank": 57, "score": 24103.34943599433 }, { "content": "}\n\n\n\n#[openapi(tag = \"schema\")]\n\n#[get(\"/schemas\")]\n\npub async fn get_all_schemas(schemas: &State<Schemas>) -> Json<Value> {\n\n let lock = schemas.schemas.lock().await;\n\n let schemas: Vec<Schema> = lock.values().cloned().collect();\n\n let schemas = serde_json::to_value(schemas).unwrap();\n\n Json(schemas)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::test_rocket;\n\n use rocket::http::{ContentType, Status};\n\n use rocket::local::blocking::Client;\n\n use serde_json::Value;\n\n\n\n #[test]\n\n fn test_schema() {\n", "file_path": "src/schema/mod.rs", "rank": 58, "score": 22821.677411258574 }, { "content": " let client = Client::tracked(test_rocket()).expect(\"valid rocket instance\");\n\n\n\n let schema = include_str!(\"../assets/degree_schema.json\");\n\n\n\n let response = client\n\n .post(\"/schemas\")\n\n .header(ContentType::JSON)\n\n .body(schema)\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n\n\n let response = client.get(\"/schemas\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let response = response.into_json::<Value>().unwrap();\n\n let schemas = response.as_array().unwrap();\n\n assert_eq!(schemas.len(), 1);\n\n }\n\n}\n", "file_path": "src/schema/mod.rs", "rank": 59, "score": 22817.624431536224 }, { "content": "use rocket::http::Status;\n\nuse rocket::{post, serde::json::Json};\n\nuse rocket_okapi::openapi;\n\nuse serde_json::Value;\n\n\n\n#[openapi(tag = \"topic\")]\n\n#[post(\"/topic/<name>\")]\n\npub async fn post_topic(name: String) -> Status {\n\n info!(\"name: {}\", name);\n\n Status::Ok\n\n}\n\n\n\n#[openapi(tag = \"topic\")]\n\n#[post(\"/topic/message\", data = \"<payload>\")]\n\npub async fn post_message_topic(payload: Json<Value>) -> Status {\n\n debug!(\"payload: {}\", payload.into_inner());\n\n Status::Ok\n\n}\n", "file_path": "src/topic/mod.rs", "rank": 60, "score": 22816.599826813883 }, { "content": "use identity::core::FromJson;\n\nuse identity::credential::Schema;\n\nuse rocket::State;\n\nuse rocket::{post, serde::json::Json};\n\nuse rocket_okapi::okapi::schemars;\n\nuse rocket_okapi::okapi::schemars::JsonSchema;\n\nuse rocket_okapi::openapi;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::Value;\n\nuse std::collections::HashMap;\n\nuse std::sync::Arc;\n\nuse tokio::sync::Mutex;\n\n\n\n#[derive(Default)]\n\npub struct Schemas {\n\n pub schemas: Arc<Mutex<HashMap<String, Schema>>>,\n\n}\n\n\n", "file_path": "src/schema/mod.rs", "rank": 61, "score": 22815.11277606998 }, { "content": "pub mod proposal;\n", "file_path": "src/presentation/mod.rs", "rank": 62, "score": 22802.155856207704 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::test_rocket;\n\n use rocket::http::{ContentType, Status};\n\n use rocket::local::blocking::Client;\n\n\n\n #[test]\n\n fn test_send_proposal() {\n\n let client = Client::tracked(test_rocket()).expect(\"valid rocket instance\");\n\n\n\n let proof_request = ProofRequest {\n\n connection_id: \"foo\".to_string(),\n\n credential_id: \"bar\".to_string(),\n\n };\n\n let invitation: String = serde_json::to_string(&proof_request).unwrap();\n\n\n\n let response = client\n\n .post(\"/present-proof/send-proposal\")\n\n .header(ContentType::JSON)\n\n .body(invitation)\n\n .dispatch();\n\n assert_eq!(response.status(), Status::InternalServerError);\n\n }\n\n}\n", "file_path": "src/presentation/proposal.rs", "rank": 63, "score": 32.80162315233434 }, { "content": " let lock = connections.connections.lock().await;\n\n let connection = lock.get(&conn_id).unwrap().clone();\n\n\n\n let body: Value = json!( {\n\n \"response_requested\": true\n\n });\n\n\n\n let ping_request: PingRequest = PingRequest {\n\n type_: \"https://didcomm.org/trust-ping/2.0/ping\".to_string(),\n\n id: Uuid::new_v4().to_string(),\n\n from: did.to_string(),\n\n body,\n\n };\n\n\n\n let client = reqwest::Client::new();\n\n let res = client\n\n .post(connection.endpoint.to_string())\n\n .json(&ping_request)\n\n .send()\n\n .await\n", "file_path": "src/ping.rs", "rank": 64, "score": 30.142181738038083 }, { "content": " .unwrap();\n\n let json = res.json();\n\n let ping_response: PingResponse = json.await.unwrap();\n\n Json(ping_response)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::connection::Connection;\n\n use crate::test_rocket;\n\n use rocket::http::{ContentType, Status};\n\n use rocket::local::blocking::Client;\n\n use serde_json::{from_value, Value};\n\n\n\n #[test]\n\n fn test_send_ping() {\n\n let client = Client::tracked(test_rocket()).expect(\"valid rocket instance\");\n\n let response = client.get(\"/connections\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let response = response.into_json::<Value>().unwrap();\n", "file_path": "src/ping.rs", "rank": 65, "score": 28.696314052591706 }, { "content": "}\n\n\n\n#[openapi(tag = \"basicmessage\")]\n\n#[post(\"/connections/<conn_id>/send-message\", data = \"<payload>\")]\n\npub async fn post_send_message(\n\n wallet: &State<Wallet>,\n\n connections: &State<Connections>,\n\n conn_id: String,\n\n payload: Json<Value>,\n\n) -> Status {\n\n let lock = wallet.account.lock().await;\n\n let did: &IotaDID = lock.did();\n\n\n\n let lock = connections.connections.lock().await;\n\n let connection = lock.get(&conn_id).unwrap().clone();\n\n\n\n let payload = payload.into_inner();\n\n\n\n let message_request: MessageRequest = MessageRequest {\n\n type_: \"iota/post/0.1/post\".to_string(),\n", "file_path": "src/message.rs", "rank": 66, "score": 27.390963803100004 }, { "content": " let connections = response.as_array().unwrap();\n\n assert_eq!(connections.len(), 0);\n\n\n\n let response = client.post(\"/out-of-band/create-invitation\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let invitation: Value = response.into_json::<Value>().unwrap();\n\n let invitation: String = serde_json::to_string(&invitation).unwrap();\n\n\n\n let response = client\n\n .post(\"/out-of-band/receive-invitation\")\n\n .header(ContentType::JSON)\n\n .body(invitation)\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n\n\n let response = client.get(\"/connections\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let response = response.into_json::<Value>().unwrap();\n\n let connections: Vec<Connection> = from_value(response).unwrap();\n\n\n\n let connection_id = connections[0].id.to_string();\n\n\n\n let response = client\n\n .post(format!(\"/connections/{}/send-ping\", connection_id))\n\n .dispatch();\n\n assert_eq!(response.status(), Status::InternalServerError);\n\n }\n\n}\n", "file_path": "src/ping.rs", "rank": 67, "score": 27.12748923687655 }, { "content": "}\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct PingResponse {\n\n #[serde(rename = \"type\")]\n\n pub type_: String,\n\n pub id: String,\n\n pub thid: String,\n\n}\n\n\n\n#[openapi(tag = \"trustping\")]\n\n#[post(\"/connections/<conn_id>/send-ping\")]\n\npub async fn post_send_ping(\n\n wallet: &State<Wallet>,\n\n connections: &State<Connections>,\n\n conn_id: String,\n\n) -> Json<PingResponse> {\n\n let lock = wallet.account.lock().await;\n\n let did: &IotaDID = lock.did();\n\n\n", "file_path": "src/ping.rs", "rank": 68, "score": 26.138465069456924 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::test_rocket;\n\n use rocket::http::Status;\n\n use rocket::local::blocking::Client;\n\n use serde_json::Value;\n\n\n\n #[test]\n\n fn test_public_did() {\n\n let client = Client::tracked(test_rocket()).expect(\"valid rocket instance\");\n\n let response = client.get(\"/wallet/did/public\").dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n let response = response.into_json::<Value>().unwrap();\n\n assert!(response.get(\"id\").is_some());\n\n }\n\n}\n", "file_path": "src/wallet.rs", "rank": 69, "score": 24.65774623943377 }, { "content": "}\n\n\n\n#[openapi(tag = \"wallet\")]\n\n#[post(\"/wallet/set-did-endpoint\", data = \"<post_data>\")]\n\npub async fn post_did_endpoint(\n\n wallet: &State<Wallet>,\n\n post_data: Json<DidEndpoint>,\n\n) -> Result<(), NotFound<String>> {\n\n let mut account = wallet.account.lock().await;\n\n account\n\n .update_identity()\n\n .create_service()\n\n .fragment(\"endpoint\")\n\n .type_(\"Endpoint\")\n\n .endpoint(Url::parse(post_data.endpoint.to_string()).unwrap())\n\n .apply()\n\n .await\n\n .unwrap();\n\n\n\n Ok(())\n", "file_path": "src/wallet.rs", "rank": 70, "score": 24.398410285588174 }, { "content": "use crate::connection::Connections;\n\nuse crate::wallet::Wallet;\n\nuse identity::iota::IotaDID;\n\n\n\nuse rocket::State;\n\nuse rocket::{post, serde::json::Json};\n\nuse rocket_okapi::okapi::schemars;\n\nuse rocket_okapi::okapi::schemars::JsonSchema;\n\nuse rocket_okapi::openapi;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::{json, Value};\n\nuse uuid::Uuid;\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct PingRequest {\n\n #[serde(rename = \"type\")]\n\n pub type_: String,\n\n pub id: String,\n\n pub from: String,\n\n pub body: Value,\n", "file_path": "src/ping.rs", "rank": 71, "score": 22.465226518650454 }, { "content": "pub mod schema;\n\npub mod server;\n\nmod tests;\n\npub mod topic;\n\npub mod wallet;\n\npub mod webhook;\n\n\n\npub use config::Config;\n\nuse connection::Connections;\n\nuse credential::Credentials;\n\npub use didcomm::DidComm;\n\nuse schema::Schemas;\n\nuse wallet::Wallet;\n\npub use webhook::Webhook;\n\n\n\n#[openapi(skip)]\n\n#[get(\"/\")]\n", "file_path": "src/lib.rs", "rank": 72, "score": 22.004416884013505 }, { "content": " id: Uuid::new_v4().to_string(),\n\n from: did.to_string(),\n\n payload,\n\n };\n\n\n\n let client = reqwest::Client::new();\n\n let _res = client\n\n .post(connection.endpoint.to_string())\n\n .json(&message_request)\n\n .send()\n\n .await\n\n .unwrap();\n\n Status::Ok\n\n}\n", "file_path": "src/message.rs", "rank": 73, "score": 20.87289377958138 }, { "content": "use crate::connection::Connections;\n\nuse crate::wallet::Wallet;\n\nuse identity::iota::IotaDID;\n\nuse rocket::http::Status;\n\nuse rocket::State;\n\nuse rocket::{post, serde::json::Json};\n\nuse rocket_okapi::okapi::schemars;\n\nuse rocket_okapi::okapi::schemars::JsonSchema;\n\nuse rocket_okapi::openapi;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::Value;\n\nuse uuid::Uuid;\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct MessageRequest {\n\n #[serde(rename = \"type\")]\n\n pub type_: String,\n\n pub id: String,\n\n pub from: String,\n\n pub payload: Value,\n", "file_path": "src/message.rs", "rank": 74, "score": 20.787512974469305 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::test_rocket;\n\n use crate::Config;\n\n use rocket::http::Status;\n\n use rocket::local::blocking::Client;\n\n use rocket::State;\n\n\n\n #[test]\n\n fn test_get_endpoint() {\n\n let rocket = test_rocket();\n\n let config: &State<Config> = State::get(&rocket).expect(\"managed `ConfigState`\");\n\n let did = config.did.to_string();\n\n let client = Client::tracked(rocket).expect(\"valid rocket instance\");\n\n let response = client\n\n .get(format!(\"/ledger/did-endpoint?did={}\", did))\n\n .dispatch();\n\n assert_eq!(response.status(), Status::Ok);\n\n }\n\n}\n", "file_path": "src/ledger.rs", "rank": 75, "score": 20.03129309030416 }, { "content": "#[macro_use]\n\nextern crate rocket;\n\nuse rocket::{Build, Rocket};\n\n\n\nuse identity::iota::ExplorerUrl;\n\nuse identity::iota::IotaDID;\n\nuse rocket::get;\n\nuse rocket::response::Redirect;\n\nuse rocket_okapi::{openapi, openapi_get_routes, swagger_ui::*};\n\nuse std::thread;\n\n\n\npub mod config;\n\npub mod connection;\n\npub mod credential;\n\npub mod didcomm;\n\npub mod ledger;\n\npub mod message;\n\npub mod ping;\n\npub mod presentation;\n\npub mod resolver;\n", "file_path": "src/lib.rs", "rank": 76, "score": 17.869719482205284 }, { "content": "}\n\n\n\n#[openapi(tag = \"wallet\")]\n\n#[get(\"/wallet/did\")]\n\npub async fn get_all_dids(wallet: &State<Wallet>) -> Json<Vec<Did>> {\n\n let lock = wallet.account.lock().await;\n\n let did: &IotaDID = lock.did();\n\n let key_type = \"Ed25519VerificationKey2018\".to_string();\n\n Json(vec![Did {\n\n id: did.to_string(),\n\n key_type,\n\n }])\n\n}\n\n\n\n#[openapi(tag = \"wallet\")]\n\n#[get(\"/wallet/did/public\")]\n\npub async fn get_public_did(wallet: &State<Wallet>) -> Json<Did> {\n\n let lock = wallet.account.lock().await;\n\n let did: &IotaDID = lock.did();\n\n let key_type = \"Ed25519VerificationKey2018\".to_string();\n\n Json(Did {\n\n id: did.to_string(),\n\n key_type,\n\n })\n\n}\n\n\n\n#[openapi(tag = \"wallet\")]\n\n#[get(\"/wallet/get-did-endpoint?<did>\")]\n", "file_path": "src/wallet.rs", "rank": 77, "score": 16.460944084974834 }, { "content": " connection::get_connection_endpoints,\n\n credential::issue::post_send_offer,\n\n credential::get_all_credentials,\n\n didcomm::post_endpoint,\n\n ledger::get_did_endpoint,\n\n message::post_send_message,\n\n ping::post_send_ping,\n\n presentation::proposal::post_send_proposal,\n\n resolver::get_resolve,\n\n schema::post_schemas,\n\n schema::get_all_schemas,\n\n server::get_live,\n\n server::get_ready,\n\n topic::post_topic,\n\n topic::post_message_topic,\n\n wallet::get_all_dids,\n\n wallet::get_public_did,\n\n wallet::get_did_endpoint,\n\n wallet::post_did_endpoint\n\n ],\n", "file_path": "src/lib.rs", "rank": 78, "score": 15.84972411693909 }, { "content": " let iota_did: &IotaDID = account.did();\n\n let did = iota_did.clone();\n\n std::mem::drop(account);\n\n\n\n let proof_request = proof_request.into_inner();\n\n\n\n let presentation_key: KeyPair = KeyPair::new_ed25519().unwrap();\n\n let presentation_did: IotaDID = IotaDID::new(presentation_key.public().as_ref()).unwrap();\n\n\n\n let credentials = credentials.credentials.lock().await;\n\n let credential = credentials.get(&proof_request.credential_id).unwrap();\n\n\n\n let presentation: Presentation = PresentationBuilder::default()\n\n .id(Url::parse(presentation_did.as_str()).unwrap())\n\n .holder(Url::parse(did.as_str()).unwrap())\n\n .credential(credential.clone())\n\n .build()\n\n .unwrap();\n\n\n\n Json(json!(presentation))\n", "file_path": "src/presentation/proposal.rs", "rank": 79, "score": 15.582911118744443 }, { "content": "use identity::did::resolution;\n\nuse identity::did::resolution::InputMetadata;\n\nuse identity::iota::ClientMap;\n\nuse rocket::{get, serde::json::Json};\n\nuse rocket_okapi::okapi::schemars;\n\nuse rocket_okapi::okapi::schemars::JsonSchema;\n\nuse rocket_okapi::openapi;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::thread;\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct EndpointResponse {\n\n pub endpoint: String,\n\n}\n\n\n\n#[openapi(tag = \"ledger\")]\n\n#[get(\"/ledger/did-endpoint?<did>\")]\n", "file_path": "src/ledger.rs", "rank": 80, "score": 15.321396059136916 }, { "content": "use crate::credential::Credentials;\n\nuse crate::wallet::Wallet;\n\nuse identity::core::Url;\n\nuse identity::credential::Presentation;\n\nuse identity::credential::PresentationBuilder;\n\nuse identity::did::DID;\n\nuse identity::iota::IotaDID;\n\nuse identity::prelude::KeyPair;\n\nuse rocket::State;\n\nuse rocket::{post, serde::json::Json};\n\nuse rocket_okapi::okapi::schemars;\n\nuse rocket_okapi::okapi::schemars::JsonSchema;\n\nuse rocket_okapi::openapi;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::{json, Value};\n\n\n", "file_path": "src/presentation/proposal.rs", "rank": 81, "score": 14.654721978205359 }, { "content": "use std::thread;\n\nuse tokio::sync::Mutex;\n\n\n\npub struct Wallet {\n\n pub account: Arc<Mutex<identity::account::Account>>,\n\n}\n\n\n\nimpl Wallet {\n\n pub async fn load(\n\n stronghold_path: PathBuf,\n\n password: String,\n\n endpoint: String,\n\n did: String,\n\n ) -> Self {\n\n let iota_did: IotaDID = IotaDID::try_from(did).unwrap();\n\n info!(\"account: {:?}\", iota_did);\n\n let account: Account = match Account::builder()\n\n .autosave(AutoSave::Every)\n\n .storage(AccountStorage::Stronghold(\n\n stronghold_path.clone(),\n", "file_path": "src/wallet.rs", "rank": 82, "score": 14.15394918414516 }, { "content": " password.to_string(),\n\n endpoint.to_string(),\n\n did.to_string(),\n\n ));\n\n runtime.block_on(log_wallet(&wallet));\n\n wallet\n\n })\n\n .join()\n\n .expect(\"Thread panicked\");\n\n\n\n rocket\n\n .mount(\n\n \"/\",\n\n openapi_get_routes![\n\n index,\n\n connection::post_create_invitation,\n\n connection::post_receive_invitation,\n\n connection::get_all_connections,\n\n connection::get_connection,\n\n connection::delete_connection,\n", "file_path": "src/lib.rs", "rank": 83, "score": 13.331135047957625 }, { "content": "use identity::account::Account;\n\nuse identity::account::AccountStorage;\n\nuse identity::account::AutoSave;\n\nuse identity::account::IdentitySetup;\n\nuse identity::account::Result;\n\nuse identity::core::Url;\n\nuse identity::did::resolution;\n\nuse identity::did::resolution::InputMetadata;\n\nuse identity::iota::ClientMap;\n\nuse identity::iota::IotaDID;\n\nuse rocket::response::status::NotFound;\n\nuse rocket::State;\n\nuse rocket::{get, post, serde::json::Json};\n\nuse rocket_okapi::okapi::schemars;\n\nuse rocket_okapi::okapi::schemars::JsonSchema;\n\nuse rocket_okapi::openapi;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::path::PathBuf;\n\nuse std::str;\n\nuse std::sync::Arc;\n", "file_path": "src/wallet.rs", "rank": 84, "score": 13.234506945581938 }, { "content": " account\n\n }\n\n };\n\n\n\n Wallet {\n\n account: Arc::new(Mutex::new(account)),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct Did {\n\n id: String,\n\n key_type: String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct DidEndpoint {\n\n did: String,\n\n endpoint: String,\n", "file_path": "src/wallet.rs", "rank": 85, "score": 12.28327880546746 }, { "content": "use rocket::{get, serde::json::Json};\n\nuse rocket_okapi::okapi::schemars;\n\nuse rocket_okapi::okapi::schemars::JsonSchema;\n\nuse rocket_okapi::openapi;\n\nuse serde::{Deserialize, Serialize};\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct LiveResponse {\n\n pub alive: bool,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, JsonSchema)]\n\npub struct ReadyResponse {\n\n pub ready: bool,\n\n}\n\n\n\n#[openapi(tag = \"server\")]\n\n#[get(\"/server/live\")]\n", "file_path": "src/server.rs", "rank": 86, "score": 11.367907046587092 }, { "content": "#[macro_use]\n\nextern crate rocket;\n\nuse identity_cloud_agent::{didcomm, webhook, Config};\n\n\n\n#[launch]\n", "file_path": "src/bin.rs", "rank": 87, "score": 11.023219492583596 }, { "content": "use serde::Deserialize;\n\n\n\n#[derive(Debug, PartialEq, Deserialize)]\n\npub struct Config {\n\n pub ext_hostname: String,\n\n pub stronghold_path: String,\n\n pub password: String,\n\n pub endpoint: String,\n\n pub webhook_url: String,\n\n pub did: String,\n\n}\n", "file_path": "src/config.rs", "rank": 88, "score": 10.944204886317237 }, { "content": " )\n\n .mount(\n\n \"/swagger-ui/\",\n\n make_swagger_ui(&SwaggerUIConfig {\n\n url: \"../openapi.json\".to_owned(),\n\n ..Default::default()\n\n }),\n\n )\n\n .manage(config)\n\n .manage(wallet)\n\n .manage(connections)\n\n .manage(credentials)\n\n .manage(schemas)\n\n .manage(webhook)\n\n .manage(didcomm)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 89, "score": 10.49932740125765 }, { "content": " Some(password.to_string()),\n\n None,\n\n ))\n\n .autopublish(true)\n\n .load_identity(iota_did)\n\n .await\n\n {\n\n Ok(account) => account,\n\n Err(err) => {\n\n error!(\"{:?}\", err);\n\n let mut account = Account::builder()\n\n .autosave(AutoSave::Every)\n\n .storage(AccountStorage::Stronghold(\n\n stronghold_path,\n\n Some(password),\n\n None,\n\n ))\n\n .autopublish(true)\n\n .create_identity(IdentitySetup::default())\n\n .await\n", "file_path": "src/wallet.rs", "rank": 90, "score": 9.078441315018978 }, { "content": "use identity::iota::ExplorerUrl;\n\nuse identity::iota::IotaDID;\n\nuse rocket::{get, serde::json::Json};\n\nuse rocket_okapi::openapi;\n\n\n\n#[openapi(tag = \"resolver\")]\n\n#[get(\"/resolver/resolve/<did>\")]\n", "file_path": "src/resolver.rs", "rank": 91, "score": 8.336906390918244 }, { "content": " .unwrap();\n\n\n\n account\n\n .update_identity()\n\n .create_method()\n\n .fragment(\"key-1\")\n\n .apply()\n\n .await\n\n .unwrap();\n\n\n\n account\n\n .update_identity()\n\n .create_service()\n\n .fragment(\"endpoint\")\n\n .type_(\"Endpoint\")\n\n .endpoint(Url::parse(endpoint).unwrap())\n\n .apply()\n\n .await\n\n .unwrap();\n\n info!(\"created new identity: {:?}\", account.did());\n", "file_path": "src/wallet.rs", "rank": 92, "score": 7.9983213609947255 }, { "content": "# identity-cloud-agent\n\n\n\n[![Build Status](https://github.com/chriamue/identity-cloud-agent/actions/workflows/coverage.yml/badge.svg)](https://github.com/chriamue/identity-cloud-agent/actions)\n\n[![codecov](https://codecov.io/gh/chriamue/identity-cloud-agent/branch/main/graph/badge.svg?token=QEH2EW6LX4)](https://codecov.io/gh/chriamue/identity-cloud-agent)\n\n[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)\n\n\n\nCloud Agent for IOTA Idendity\n\n\n\n## quickstart\n\n\n\nConfigure the config file. Set the stronghold_path.\n\n\n\n```toml\n\n# Rocket.toml\n\n[default]\n\nident = \"identity-cloud-agent\"\n\n\n\nstronghold_path = \"account-stronghold.hodl\"\n\npassword = \"changeme\"\n\nendpoint = \"http://localhost:8000\"\n\nwebhook_url = \"http://localhost:8000\"\n\ndid = \"did:iota:6HnYPKwSAzf3yRLtkWN7uAUHEf8cCAfdyRSK1EJXSaUU\"\n\n\n\n[debug]\n\nport = 8000\n\next_hostname = \"http://localhost:8000\"\n\n\n\n[release]\n\naddress = \"0.0.0.0\"\n\nport = 8080\n\n```\n\n\n\nNow start using cargo.\n\n\n\n```sh\n\ncargo run\n\n```\n\n\n\nA new identity will be created and the did will be printed.\n\nStop the agent and change the did in the config file.\n\nStart the agent again.\n\n\n\nVisit http://localhost:8000 which redirects to the swagger-ui.\n", "file_path": "README.md", "rank": 93, "score": 3.572342993541706 } ]
Rust
src/kernel/x86/x86avxbitreversal.rs
yvt/yfft-rs
2ba9934e9a3213a528e7ba4573fe08b96609d20b
use super::super::Num; use super::utils::{if_compatible, AlignInfo, AlignReqKernel, AlignReqKernelWrapper}; use super::{Kernel, KernelParams, SliceAccessor}; use packed_simd::{u32x4, u64x2, u64x4}; use std::{mem, ptr}; pub unsafe fn new_x86_avx_bit_reversal_kernel<T>(indices: &Vec<usize>) -> Option<Box<Kernel<T>>> where T: Num, { if indices.len() < 8 { return None; } if_compatible(|| { Some( Box::new(AlignReqKernelWrapper::new(AvxDWordBitReversalKernel { indices: indices.clone(), })) as Box<Kernel<f32>>, ) }) } #[derive(Debug)] struct AvxDWordBitReversalKernel { indices: Vec<usize>, } impl<T: Num> AlignReqKernel<T> for AvxDWordBitReversalKernel { fn transform<I: AlignInfo>(&self, params: &mut KernelParams<T>) { assert_eq!(mem::size_of::<T>(), 4); let indices = unsafe { SliceAccessor::new(&self.indices) }; let size = self.indices.len(); let mut data = unsafe { SliceAccessor::new(&mut params.coefs[0..size * 2]) }; let mut wa = unsafe { SliceAccessor::new(&mut params.work_area[0..size * 2]) }; wa.copy_from_slice(*data); let mut i = 0; while i + 7 < size { let index1 = indices[i]; let index2 = indices[i + 1]; let index3 = indices[i + 2]; let index4 = indices[i + 3]; let index5 = indices[i + 4]; let index6 = indices[i + 5]; let index7 = indices[i + 6]; let index8 = indices[i + 7]; let src1: *const u64 = &wa[index1 * 2] as *const T as *const u64; let src2: *const u64 = &wa[index2 * 2] as *const T as *const u64; let src3: *const u64 = &wa[index3 * 2] as *const T as *const u64; let src4: *const u64 = &wa[index4 * 2] as *const T as *const u64; let src5: *const u64 = &wa[index5 * 2] as *const T as *const u64; let src6: *const u64 = &wa[index6 * 2] as *const T as *const u64; let src7: *const u64 = &wa[index7 * 2] as *const T as *const u64; let src8: *const u64 = &wa[index8 * 2] as *const T as *const u64; let dest: *mut u64x4 = &mut data[i * 2] as *mut T as *mut u64x4; unsafe { I::write(dest, u64x4::new(*src1, *src2, *src3, *src4)); I::write(dest.offset(1), u64x4::new(*src5, *src6, *src7, *src8)); } i += 8; } while i < size { let index = indices[i]; let src: *const u64 = &wa[index * 2] as *const T as *const u64; let dest: *mut u64 = &mut data[i * 2] as *mut T as *mut u64; unsafe { *dest = *src; } i += 1; } } fn required_work_area_size(&self) -> usize { self.indices.len() * 2 } fn alignment_requirement(&self) -> usize { 32 } } pub unsafe fn new_x86_avx_radix2_bit_reversal_kernel<T>( indices: &Vec<usize>, ) -> Option<Box<Kernel<T>>> where T: Num, { if indices.len() < 8 || indices.len() % 8 != 0 { return None; } for i in 0..indices.len() / 2 { if indices[i] > (<u32>::max_value() / 2) as usize { return None; } } let (f1, f2, f3) = ( indices[1] - indices[0], indices[2] - indices[0], indices[3] - indices[0], ); for i in 0..indices.len() / 8 { let (b0, b1, b2, b3) = ( indices[i * 4], indices[i * 4 + 1], indices[i * 4 + 2], indices[i * 4 + 3], ); if b1 != b0 + f1 || b2 != b0 + f2 || b3 != b0 + f3 { return None; } } if_compatible(|| { Some(Box::new(AlignReqKernelWrapper::new( AvxDWordRadix2BitReversalKernel { indices: (0..indices.len() / 8) .map(|i| (indices[i * 4] as u32) * 2) .collect(), offs: u32x4::new(0, f1 as u32 * 2, f2 as u32 * 2, f3 as u32 * 2), }, )) as Box<Kernel<f32>>) }) } #[derive(Debug)] struct AvxDWordRadix2BitReversalKernel { indices: Vec<u32>, offs: u32x4, } impl<T: Num> AlignReqKernel<T> for AvxDWordRadix2BitReversalKernel { fn transform<I: AlignInfo>(&self, params: &mut KernelParams<T>) { assert_eq!(mem::size_of::<T>(), 4); let indices = unsafe { SliceAccessor::new(&self.indices) }; let size = self.indices.len(); let mut data = unsafe { SliceAccessor::new(&mut params.coefs[0..size * 16]) }; let mut wa = unsafe { SliceAccessor::new(&mut params.work_area[0..size * 16]) }; wa.copy_from_slice(*data); let offs = self.offs; let mut i = 0; while i < size { let index1234 = offs + u32x4::splat(indices[i]); let index1 = index1234.extract(0) as usize; let index2 = index1234.extract(1) as usize; let index3 = index1234.extract(2) as usize; let index4 = index1234.extract(3) as usize; let src1 = unsafe { ptr::read_unaligned(&wa[index1] as *const T as *const u64x2) }; let src2 = unsafe { ptr::read_unaligned(&wa[index2] as *const T as *const u64x2) }; let src3 = unsafe { ptr::read_unaligned(&wa[index3] as *const T as *const u64x2) }; let src4 = unsafe { ptr::read_unaligned(&wa[index4] as *const T as *const u64x2) }; let t1a: u64x2 = shuffle!(src1, src2, [0, 2]); let t2a: u64x2 = shuffle!(src3, src4, [0, 2]); let t1b: u64x2 = shuffle!(src1, src2, [1, 3]); let t2b: u64x2 = shuffle!(src3, src4, [1, 3]); let out1: u64x4 = shuffle!(t1a, t2a, [0, 1, 2, 3]); let out2: u64x4 = shuffle!(t1b, t2b, [0, 1, 2, 3]); let dest1: *mut u64x4 = &mut data[i * 8] as *mut T as *mut u64x4; let dest2: *mut u64x4 = &mut data[(i + size) * 8] as *mut T as *mut u64x4; unsafe { I::write(dest1, out1); I::write(dest2, out2); } i += 1; } } fn required_work_area_size(&self) -> usize { self.indices.len() * 16 } fn alignment_requirement(&self) -> usize { 32 } } pub unsafe fn new_x86_avx_radix4_bit_reversal_kernel<T>( indices: &Vec<usize>, ) -> Option<Box<Kernel<T>>> where T: Num, { if indices.len() < 32 || indices.len() % 32 != 0 { return None; } for i in 0..indices.len() / 4 { if indices[i] > (<u32>::max_value() / 2) as usize { return None; } } let (f1, f2, f3) = ( indices[1] - indices[0], indices[2] - indices[0], indices[3] - indices[0], ); for i in 0..indices.len() / 16 { let (b0, b1, b2, b3) = ( indices[i * 4], indices[i * 4 + 1], indices[i * 4 + 2], indices[i * 4 + 3], ); if b1 != b0 + f1 || b2 != b0 + f2 || b3 != b0 + f3 { return None; } } if_compatible(|| { Some(Box::new(AlignReqKernelWrapper::new( AvxDWordRadix4BitReversalKernel { indices: (0..indices.len() / 16) .map(|i| (indices[i * 4] as u32) * 2) .collect(), offs: u32x4::new(0, f1 as u32 * 2, f2 as u32 * 2, f3 as u32 * 2), }, )) as Box<Kernel<f32>>) }) } #[derive(Debug)] struct AvxDWordRadix4BitReversalKernel { indices: Vec<u32>, offs: u32x4, } impl<T: Num> AlignReqKernel<T> for AvxDWordRadix4BitReversalKernel { fn transform<I: AlignInfo>(&self, params: &mut KernelParams<T>) { assert_eq!(mem::size_of::<T>(), 4); let indices = unsafe { SliceAccessor::new(&self.indices) }; let size = self.indices.len(); let mut data = unsafe { SliceAccessor::new(&mut params.coefs[0..size * 32]) }; let mut wa = unsafe { SliceAccessor::new(&mut params.work_area[0..size * 32]) }; wa.copy_from_slice(*data); let offs = self.offs; let mut i = 0; while i + 1 < size { for _ in 0..2 { let index1234 = offs + u32x4::splat(indices[i]); let index1 = index1234.extract(0) as usize; let index2 = index1234.extract(1) as usize; let index3 = index1234.extract(2) as usize; let index4 = index1234.extract(3) as usize; let src1 = unsafe { ptr::read_unaligned(&wa[index1] as *const T as *const u64x4) }; let src2 = unsafe { ptr::read_unaligned(&wa[index2] as *const T as *const u64x4) }; let src3 = unsafe { ptr::read_unaligned(&wa[index3] as *const T as *const u64x4) }; let src4 = unsafe { ptr::read_unaligned(&wa[index4] as *const T as *const u64x4) }; let t1a: u64x4 = shuffle!(src1, src2, [0, 4, 2, 6]); let t2a: u64x4 = shuffle!(src3, src4, [0, 4, 2, 6]); let t1b: u64x4 = shuffle!(src1, src2, [1, 5, 3, 7]); let t2b: u64x4 = shuffle!(src3, src4, [1, 5, 3, 7]); let out1: u64x4 = shuffle!(t1a, t2a, [0, 1, 4, 5]); let out2: u64x4 = shuffle!(t1b, t2b, [0, 1, 4, 5]); let out3: u64x4 = shuffle!(t1a, t2a, [2, 3, 6, 7]); let out4: u64x4 = shuffle!(t1b, t2b, [2, 3, 6, 7]); let dest1: *mut u64x4 = &mut data[i * 8] as *mut T as *mut u64x4; let dest2: *mut u64x4 = &mut data[(i + size) * 8] as *mut T as *mut u64x4; let dest3: *mut u64x4 = &mut data[(i + size * 2) * 8] as *mut T as *mut u64x4; let dest4: *mut u64x4 = &mut data[(i + size * 3) * 8] as *mut T as *mut u64x4; unsafe { I::write(dest1, out1); I::write(dest2, out2); I::write(dest3, out3); I::write(dest4, out4); } i += 1; } } assert_eq!(i, size); } fn required_work_area_size(&self) -> usize { self.indices.len() * 32 } fn alignment_requirement(&self) -> usize { 32 } }
use super::super::Num; use super::utils::{if_compatible, AlignInfo, AlignReqKernel, AlignReqKernelWrapper}; use super::{Kernel, KernelParams, SliceAccessor}; use packed_simd::{u32x4, u64x2, u64x4}; use std::{mem, ptr}; pub unsafe fn new_x86_avx_bit_reversal_kernel<T>(indices: &Vec<usize>) -> Option<Box<Kernel<T>>> where T: Num, { if indices.len() < 8 { return None; } if_compatible(|| { Some( Box::new(AlignReqKernelWrapper::new(AvxD
#[derive(Debug)] struct AvxDWordBitReversalKernel { indices: Vec<usize>, } impl<T: Num> AlignReqKernel<T> for AvxDWordBitReversalKernel { fn transform<I: AlignInfo>(&self, params: &mut KernelParams<T>) { assert_eq!(mem::size_of::<T>(), 4); let indices = unsafe { SliceAccessor::new(&self.indices) }; let size = self.indices.len(); let mut data = unsafe { SliceAccessor::new(&mut params.coefs[0..size * 2]) }; let mut wa = unsafe { SliceAccessor::new(&mut params.work_area[0..size * 2]) }; wa.copy_from_slice(*data); let mut i = 0; while i + 7 < size { let index1 = indices[i]; let index2 = indices[i + 1]; let index3 = indices[i + 2]; let index4 = indices[i + 3]; let index5 = indices[i + 4]; let index6 = indices[i + 5]; let index7 = indices[i + 6]; let index8 = indices[i + 7]; let src1: *const u64 = &wa[index1 * 2] as *const T as *const u64; let src2: *const u64 = &wa[index2 * 2] as *const T as *const u64; let src3: *const u64 = &wa[index3 * 2] as *const T as *const u64; let src4: *const u64 = &wa[index4 * 2] as *const T as *const u64; let src5: *const u64 = &wa[index5 * 2] as *const T as *const u64; let src6: *const u64 = &wa[index6 * 2] as *const T as *const u64; let src7: *const u64 = &wa[index7 * 2] as *const T as *const u64; let src8: *const u64 = &wa[index8 * 2] as *const T as *const u64; let dest: *mut u64x4 = &mut data[i * 2] as *mut T as *mut u64x4; unsafe { I::write(dest, u64x4::new(*src1, *src2, *src3, *src4)); I::write(dest.offset(1), u64x4::new(*src5, *src6, *src7, *src8)); } i += 8; } while i < size { let index = indices[i]; let src: *const u64 = &wa[index * 2] as *const T as *const u64; let dest: *mut u64 = &mut data[i * 2] as *mut T as *mut u64; unsafe { *dest = *src; } i += 1; } } fn required_work_area_size(&self) -> usize { self.indices.len() * 2 } fn alignment_requirement(&self) -> usize { 32 } } pub unsafe fn new_x86_avx_radix2_bit_reversal_kernel<T>( indices: &Vec<usize>, ) -> Option<Box<Kernel<T>>> where T: Num, { if indices.len() < 8 || indices.len() % 8 != 0 { return None; } for i in 0..indices.len() / 2 { if indices[i] > (<u32>::max_value() / 2) as usize { return None; } } let (f1, f2, f3) = ( indices[1] - indices[0], indices[2] - indices[0], indices[3] - indices[0], ); for i in 0..indices.len() / 8 { let (b0, b1, b2, b3) = ( indices[i * 4], indices[i * 4 + 1], indices[i * 4 + 2], indices[i * 4 + 3], ); if b1 != b0 + f1 || b2 != b0 + f2 || b3 != b0 + f3 { return None; } } if_compatible(|| { Some(Box::new(AlignReqKernelWrapper::new( AvxDWordRadix2BitReversalKernel { indices: (0..indices.len() / 8) .map(|i| (indices[i * 4] as u32) * 2) .collect(), offs: u32x4::new(0, f1 as u32 * 2, f2 as u32 * 2, f3 as u32 * 2), }, )) as Box<Kernel<f32>>) }) } #[derive(Debug)] struct AvxDWordRadix2BitReversalKernel { indices: Vec<u32>, offs: u32x4, } impl<T: Num> AlignReqKernel<T> for AvxDWordRadix2BitReversalKernel { fn transform<I: AlignInfo>(&self, params: &mut KernelParams<T>) { assert_eq!(mem::size_of::<T>(), 4); let indices = unsafe { SliceAccessor::new(&self.indices) }; let size = self.indices.len(); let mut data = unsafe { SliceAccessor::new(&mut params.coefs[0..size * 16]) }; let mut wa = unsafe { SliceAccessor::new(&mut params.work_area[0..size * 16]) }; wa.copy_from_slice(*data); let offs = self.offs; let mut i = 0; while i < size { let index1234 = offs + u32x4::splat(indices[i]); let index1 = index1234.extract(0) as usize; let index2 = index1234.extract(1) as usize; let index3 = index1234.extract(2) as usize; let index4 = index1234.extract(3) as usize; let src1 = unsafe { ptr::read_unaligned(&wa[index1] as *const T as *const u64x2) }; let src2 = unsafe { ptr::read_unaligned(&wa[index2] as *const T as *const u64x2) }; let src3 = unsafe { ptr::read_unaligned(&wa[index3] as *const T as *const u64x2) }; let src4 = unsafe { ptr::read_unaligned(&wa[index4] as *const T as *const u64x2) }; let t1a: u64x2 = shuffle!(src1, src2, [0, 2]); let t2a: u64x2 = shuffle!(src3, src4, [0, 2]); let t1b: u64x2 = shuffle!(src1, src2, [1, 3]); let t2b: u64x2 = shuffle!(src3, src4, [1, 3]); let out1: u64x4 = shuffle!(t1a, t2a, [0, 1, 2, 3]); let out2: u64x4 = shuffle!(t1b, t2b, [0, 1, 2, 3]); let dest1: *mut u64x4 = &mut data[i * 8] as *mut T as *mut u64x4; let dest2: *mut u64x4 = &mut data[(i + size) * 8] as *mut T as *mut u64x4; unsafe { I::write(dest1, out1); I::write(dest2, out2); } i += 1; } } fn required_work_area_size(&self) -> usize { self.indices.len() * 16 } fn alignment_requirement(&self) -> usize { 32 } } pub unsafe fn new_x86_avx_radix4_bit_reversal_kernel<T>( indices: &Vec<usize>, ) -> Option<Box<Kernel<T>>> where T: Num, { if indices.len() < 32 || indices.len() % 32 != 0 { return None; } for i in 0..indices.len() / 4 { if indices[i] > (<u32>::max_value() / 2) as usize { return None; } } let (f1, f2, f3) = ( indices[1] - indices[0], indices[2] - indices[0], indices[3] - indices[0], ); for i in 0..indices.len() / 16 { let (b0, b1, b2, b3) = ( indices[i * 4], indices[i * 4 + 1], indices[i * 4 + 2], indices[i * 4 + 3], ); if b1 != b0 + f1 || b2 != b0 + f2 || b3 != b0 + f3 { return None; } } if_compatible(|| { Some(Box::new(AlignReqKernelWrapper::new( AvxDWordRadix4BitReversalKernel { indices: (0..indices.len() / 16) .map(|i| (indices[i * 4] as u32) * 2) .collect(), offs: u32x4::new(0, f1 as u32 * 2, f2 as u32 * 2, f3 as u32 * 2), }, )) as Box<Kernel<f32>>) }) } #[derive(Debug)] struct AvxDWordRadix4BitReversalKernel { indices: Vec<u32>, offs: u32x4, } impl<T: Num> AlignReqKernel<T> for AvxDWordRadix4BitReversalKernel { fn transform<I: AlignInfo>(&self, params: &mut KernelParams<T>) { assert_eq!(mem::size_of::<T>(), 4); let indices = unsafe { SliceAccessor::new(&self.indices) }; let size = self.indices.len(); let mut data = unsafe { SliceAccessor::new(&mut params.coefs[0..size * 32]) }; let mut wa = unsafe { SliceAccessor::new(&mut params.work_area[0..size * 32]) }; wa.copy_from_slice(*data); let offs = self.offs; let mut i = 0; while i + 1 < size { for _ in 0..2 { let index1234 = offs + u32x4::splat(indices[i]); let index1 = index1234.extract(0) as usize; let index2 = index1234.extract(1) as usize; let index3 = index1234.extract(2) as usize; let index4 = index1234.extract(3) as usize; let src1 = unsafe { ptr::read_unaligned(&wa[index1] as *const T as *const u64x4) }; let src2 = unsafe { ptr::read_unaligned(&wa[index2] as *const T as *const u64x4) }; let src3 = unsafe { ptr::read_unaligned(&wa[index3] as *const T as *const u64x4) }; let src4 = unsafe { ptr::read_unaligned(&wa[index4] as *const T as *const u64x4) }; let t1a: u64x4 = shuffle!(src1, src2, [0, 4, 2, 6]); let t2a: u64x4 = shuffle!(src3, src4, [0, 4, 2, 6]); let t1b: u64x4 = shuffle!(src1, src2, [1, 5, 3, 7]); let t2b: u64x4 = shuffle!(src3, src4, [1, 5, 3, 7]); let out1: u64x4 = shuffle!(t1a, t2a, [0, 1, 4, 5]); let out2: u64x4 = shuffle!(t1b, t2b, [0, 1, 4, 5]); let out3: u64x4 = shuffle!(t1a, t2a, [2, 3, 6, 7]); let out4: u64x4 = shuffle!(t1b, t2b, [2, 3, 6, 7]); let dest1: *mut u64x4 = &mut data[i * 8] as *mut T as *mut u64x4; let dest2: *mut u64x4 = &mut data[(i + size) * 8] as *mut T as *mut u64x4; let dest3: *mut u64x4 = &mut data[(i + size * 2) * 8] as *mut T as *mut u64x4; let dest4: *mut u64x4 = &mut data[(i + size * 3) * 8] as *mut T as *mut u64x4; unsafe { I::write(dest1, out1); I::write(dest2, out2); I::write(dest3, out3); I::write(dest4, out4); } i += 1; } } assert_eq!(i, size); } fn required_work_area_size(&self) -> usize { self.indices.len() * 32 } fn alignment_requirement(&self) -> usize { 32 } }
WordBitReversalKernel { indices: indices.clone(), })) as Box<Kernel<f32>>, ) }) }
function_block-function_prefixed
[ { "content": "pub trait Num:\n\n Clone\n\n + Debug\n\n + AddAssign\n\n + SubAssign\n\n + MulAssign\n\n + DivAssign\n\n + Default\n\n + num_traits::Float\n\n + num_traits::FloatConst\n\n + num_traits::Zero\n\n + 'static\n\n + Sync\n\n + Send\n\n{\n\n}\n\nimpl<T> Num for T where\n\n T: Clone\n\n + Debug\n\n + AddAssign\n", "file_path": "src/lib.rs", "rank": 0, "score": 121059.66269672132 }, { "content": "#[cfg(target_feature = \"avx\")]\n\n#[allow(dead_code)]\n\npub fn f32x8_to_array(x: f32x8) -> [f32; 8] {\n\n [\n\n x.extract(0),\n\n x.extract(1),\n\n x.extract(2),\n\n x.extract(3),\n\n x.extract(4),\n\n x.extract(5),\n\n x.extract(6),\n\n x.extract(7),\n\n ]\n\n}\n", "file_path": "src/simdutils.rs", "rank": 1, "score": 120402.318245351 }, { "content": "fn conv<T: Num>() {\n\n let size = 32;\n\n\n\n let setup1: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Natural,\n\n input_data_format: DataFormat::Real,\n\n output_data_format: DataFormat::HalfComplex,\n\n len: size,\n\n inverse: false,\n\n })\n\n .unwrap();\n\n let setup2: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Natural,\n\n input_data_format: DataFormat::HalfComplex,\n\n output_data_format: DataFormat::Real,\n\n len: size,\n\n inverse: true,\n\n })\n", "file_path": "tests/conv.rs", "rank": 2, "score": 111616.87608631408 }, { "content": "fn fft_roundtrip_shortcut<T: Num>() {\n\n for size_ref in &[1, 2, 3, 4, 5, 6, 7, 8, 16, 32, 40, 49, 64, 128] {\n\n let size = *size_ref;\n\n\n\n let setup1: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Swizzled,\n\n input_data_format: DataFormat::Complex,\n\n output_data_format: DataFormat::Complex,\n\n len: size,\n\n inverse: false,\n\n })\n\n .unwrap();\n\n let setup2: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Swizzled,\n\n output_data_order: DataOrder::Natural,\n\n input_data_format: DataFormat::Complex,\n\n output_data_format: DataFormat::Complex,\n\n len: size,\n\n inverse: true,\n", "file_path": "tests/transform.rs", "rank": 3, "score": 107260.00141079782 }, { "content": "fn fft_roundtrip_real<T: Num>() {\n\n for size_ref in &[1, 2, 3, 4, 5, 6, 7, 8, 16, 32, 40, 49, 64, 128] {\n\n let size = *size_ref;\n\n\n\n let setup1: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Natural,\n\n input_data_format: DataFormat::Real,\n\n output_data_format: DataFormat::HalfComplex,\n\n len: size * 2,\n\n inverse: false,\n\n })\n\n .unwrap();\n\n let setup2: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Natural,\n\n input_data_format: DataFormat::HalfComplex,\n\n output_data_format: DataFormat::Real,\n\n len: size * 2,\n\n inverse: true,\n", "file_path": "tests/realfft.rs", "rank": 4, "score": 107260.00141079782 }, { "content": "// assumes complex FFT is okay\n\nfn fft_real_backward<T: Num>() {\n\n for size_ref in &[1, 2, 3, 4, 5, 6, 7, 8, 16, 32, 40, 49, 64, 128] {\n\n let size = *size_ref;\n\n\n\n // real FFT\n\n let setup1: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Natural,\n\n input_data_format: DataFormat::HalfComplex,\n\n output_data_format: DataFormat::Complex,\n\n len: size * 2,\n\n inverse: true,\n\n })\n\n .unwrap();\n\n\n\n // complex FFT\n\n let setup2: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Natural,\n\n input_data_format: DataFormat::Complex,\n", "file_path": "tests/realfft.rs", "rank": 5, "score": 107260.00141079782 }, { "content": "// assumes complex FFT is okay\n\nfn fft_real_forward<T: Num>() {\n\n for size_ref in &[1, 2, 3, 4, 5, 6, 7, 8, 16, 32, 40, 49, 64, 128] {\n\n let size = *size_ref;\n\n\n\n // real FFT\n\n let setup1: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Natural,\n\n input_data_format: DataFormat::Real,\n\n output_data_format: DataFormat::Complex,\n\n len: size * 2,\n\n inverse: false,\n\n })\n\n .unwrap();\n\n\n\n // complex FFT\n\n let setup2: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Natural,\n\n input_data_format: DataFormat::Complex,\n", "file_path": "tests/realfft.rs", "rank": 6, "score": 107260.00141079782 }, { "content": "fn simple_fft<T: Num>(inverse: bool) {\n\n for size_ref in &[1, 2, 3, 4, 5, 6, 7, 8, 16, 32, 40, 49, 64, 128] {\n\n let size = *size_ref;\n\n let setup: Setup<T> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Natural,\n\n input_data_format: DataFormat::Complex,\n\n output_data_format: DataFormat::Complex,\n\n len: size,\n\n inverse: inverse,\n\n })\n\n .unwrap();\n\n let mut se = Env::new(&setup);\n\n let mut result_1 = vec![T::zero(); size * 2];\n\n let mut result_2 = vec![T::zero(); size * 2];\n\n for pat in test_patterns::<T>(size) {\n\n result_1.copy_from_slice(pat.as_slice());\n\n se.transform(result_1.as_mut_slice());\n\n\n\n naive_dft(pat.as_slice(), result_2.as_mut_slice(), inverse);\n\n\n\n assert_num_slice_approx_eq(\n\n result_1.as_slice(),\n\n result_2.as_slice(),\n\n T::from(1.0e-3).unwrap(),\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/transform.rs", "rank": 7, "score": 99711.43058585368 }, { "content": "#[allow(dead_code)]\n\npub fn f32x4_to_array(x: f32x4) -> [f32; 4] {\n\n [x.extract(0), x.extract(1), x.extract(2), x.extract(3)]\n\n}\n\n\n\n/// `neg_mask` must be `[0x80000000, 0x80000000, 0, 0]`\n", "file_path": "src/simdutils.rs", "rank": 8, "score": 97600.73095126294 }, { "content": "pub fn new_x86_real_fft_pre_post_process_kernel<T>(\n\n len: usize,\n\n inverse: bool,\n\n) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n None.or_else(|| {\n\n x86avxf32realfft::new_x86_avx_f32_real_fft_pre_post_process_kernel(len, inverse)\n\n })\n\n .or_else(|| x86sse3f32realfft::new_x86_sse3_f32_real_fft_pre_post_process_kernel(len, inverse))\n\n .or_else(|| x86sse1realfft::new_x86_sse_real_fft_pre_post_process_kernel(len, inverse))\n\n}\n", "file_path": "src/kernel/x86/mod.rs", "rank": 9, "score": 93938.090515555 }, { "content": "pub fn factorize(mut x: usize) -> Vec<usize> {\n\n let mut vec = Vec::new();\n\n let mut possible_factor_min = 3;\n\n\n\n while x > 1 {\n\n let radix = if x % 4 == 0 {\n\n 4\n\n } else if x % 2 == 0 {\n\n 2\n\n } else {\n\n let found_radix = (0..)\n\n .map(|r| r * 2 + possible_factor_min)\n\n .filter(|r| x % r == 0)\n\n .nth(0)\n\n .unwrap();\n\n possible_factor_min = found_radix;\n\n found_radix\n\n };\n\n vec.push(radix);\n\n x /= radix;\n", "file_path": "src/setup.rs", "rank": 10, "score": 93625.13284619339 }, { "content": "#[inline]\n\nfn complex_from_slice<T: Num>(x: &[T]) -> Complex<T> {\n\n Complex::new(x[0], x[1])\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 11, "score": 93443.88241644582 }, { "content": "/// Creates a real FFT post-processing or backward real FFT pre-processing kernel.\n\npub fn new_x86_sse_real_fft_pre_post_process_kernel<T>(\n\n len: usize,\n\n inverse: bool,\n\n) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n if_compatible(|| {\n\n if len % 8 == 0 && len > 8 {\n\n Some(Box::new(AlignReqKernelWrapper::new(\n\n SseRealFFTPrePostProcessKernel::new(len, inverse),\n\n )) as Box<Kernel<f32>>)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n\npub(super) fn new_real_fft_coef_table(len: usize, inverse: bool) -> [AlignedVec<f32>; 2] {\n\n assert!(len % 2 == 0);\n", "file_path": "src/kernel/x86/x86sse1realfft.rs", "rank": 12, "score": 92668.47161136771 }, { "content": "/// Creates a real FFT post-processing or backward real FFT pre-processing kernel.\n\npub fn new_x86_sse3_f32_real_fft_pre_post_process_kernel<T>(\n\n len: usize,\n\n inverse: bool,\n\n) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n if_compatible(|| {\n\n if len % 8 == 0 && len > 8 {\n\n Some(Box::new(AlignReqKernelWrapper::new(\n\n Sse3F32RealFFTPrePostProcessKernel::new(len, inverse),\n\n )) as Box<Kernel<f32>>)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/kernel/x86/x86sse3f32realfft.rs", "rank": 13, "score": 91465.73276963824 }, { "content": "/// Creates a real FFT post-processing or backward real FFT pre-processing kernel.\n\npub fn new_x86_avx_f32_real_fft_pre_post_process_kernel<T>(\n\n len: usize,\n\n inverse: bool,\n\n) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n if_compatible(|| {\n\n if len % 16 == 0 && len > 16 {\n\n Some(Box::new(AlignReqKernelWrapper::new(\n\n AvxF32RealFFTPrePostProcessKernel::new(len, inverse),\n\n )) as Box<Kernel<f32>>)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/kernel/x86/x86avxf32realfft.rs", "rank": 14, "score": 91465.73276963824 }, { "content": "#[allow(dead_code)]\n\n#[inline]\n\npub fn f32x4_bitxor(lhs: f32x4, rhs: f32x4) -> f32x4 {\n\n let x2: i32x4 = unsafe { mem::transmute(lhs) };\n\n let y2: i32x4 = unsafe { mem::transmute(rhs) };\n\n let z = x2 ^ y2;\n\n unsafe { mem::transmute(z) }\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 15, "score": 90100.80345359353 }, { "content": "#[inline]\n\nfn mul_pos_i<T: Num>(x: Complex<T>) -> Complex<T> {\n\n Complex::new(-x.im, x.re)\n\n}\n\n\n\npub use env::Env;\n\npub use setup::{DataFormat, DataOrder, Options, PlanError, Setup};\n", "file_path": "src/lib.rs", "rank": 16, "score": 89834.5710314813 }, { "content": "#[cfg(target_feature = \"avx\")]\n\n#[inline]\n\n#[allow(dead_code)]\n\npub fn avx_f32x8_complex_mul_riri(x: f32x8, y: f32x8) -> f32x8 {\n\n // (r1, i1, ...) * (r3, i3, ...)\n\n // --> ((r1 * r3) - (i1 * i3), (r1 * i3) + (i1 * r3), ...)\n\n let x1: f32x8 = shuffle!(x, x, [0, 0, 2, 2, 4, 4, 6, 6]); // vmovsldup\n\n let x2: f32x8 = shuffle!(x, x, [1, 1, 3, 3, 5, 5, 7, 7]); // vmovshdup\n\n let y1: f32x8 = y;\n\n let y2: f32x8 = shuffle!(y, y, [1, 0, 3, 2, 5, 4, 7, 6]); // vpermilps\n\n let z = avx_fma_f32x8_fmaddsub(x1, y1, x2 * y2); // vaddsubps/vfmaddsubXXXps\n\n return z;\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 17, "score": 88498.99572485973 }, { "content": "#[cfg(target_feature = \"avx\")]\n\n#[allow(dead_code)]\n\n#[inline]\n\npub fn avx_f32x8_bitxor(lhs: f32x8, rhs: f32x8) -> f32x8 {\n\n let x2: i32x8 = unsafe { mem::transmute(lhs) };\n\n let y2: i32x8 = unsafe { mem::transmute(rhs) };\n\n let z = x2 ^ y2;\n\n unsafe { mem::transmute(z) }\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 18, "score": 88498.99572485973 }, { "content": "#[cfg(target_feature = \"sse3\")]\n\n#[inline]\n\n#[allow(dead_code)]\n\npub fn sse3_f32x4_complex_mul_riri(x: f32x4, y: f32x4) -> f32x4 {\n\n // (r1, i1, ...) * (r3, i3, ...)\n\n // --> ((r1 * r3) - (i1 * i3), (r1 * i3) + (i1 * r3), ...)\n\n let x1: f32x4 = shuffle!(x, x, [0, 0, 2, 2]); // movsldup\n\n let x2: f32x4 = shuffle!(x, x, [1, 1, 3, 3]); // movshdup\n\n let y1: f32x4 = y;\n\n let y2: f32x4 = shuffle!(y, y, [1, 0, 3, 2]); // shufps\n\n let z = sse3_fma_f32x4_fmaddsub(x1, y1, x2 * y2); // vaddsubps/vfmaddsubXXXps\n\n return z;\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 19, "score": 88498.99572485973 }, { "content": "fn assert_num_slice_approx_eq<T: yfft::Num>(got: &[T], expected: &[T], releps: T) {\n\n assert_eq!(got.len(), expected.len());\n\n // We can't use `Iterator::max()` because T doesn't implement Ord\n\n let maxabs = expected\n\n .iter()\n\n .map(|x| x.abs())\n\n .fold(T::zero() / T::zero(), |x, y| x.max(y))\n\n + T::from(0.01).unwrap();\n\n let eps = maxabs * releps;\n\n for i in 0..got.len() {\n\n let a = got[i];\n\n let b = expected[i];\n\n if (a - b).abs() > eps {\n\n assert!(\n\n (a - b).abs() < eps,\n\n \"assertion failed: `got almost equal to expected` \\\n\n (got: `{:?}`, expected: `{:?}`, diff=`{:?}`)\",\n\n got,\n\n expected,\n\n (a - b).abs()\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/transform.rs", "rank": 20, "score": 87831.12085562068 }, { "content": "fn assert_num_slice_approx_eq<T: yfft::Num>(got: &[T], expected: &[T], releps: T) {\n\n assert_eq!(got.len(), expected.len());\n\n // We can't use `Iterator::max()` because T doesn't implement Ord\n\n let maxabs = expected\n\n .iter()\n\n .map(|x| x.abs())\n\n .fold(T::zero() / T::zero(), |x, y| x.max(y))\n\n + T::from(0.01).unwrap();\n\n let eps = maxabs * releps;\n\n for i in 0..got.len() {\n\n let a = got[i];\n\n let b = expected[i];\n\n if (a - b).abs() > eps {\n\n assert!(\n\n (a - b).abs() < eps,\n\n \"assertion failed: `got almost equal to expected` \\\n\n (got: `{:?}`, expected: `{:?}`, diff=`{:?}`)\",\n\n got,\n\n expected,\n\n (a - b).abs()\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/realfft.rs", "rank": 21, "score": 87831.12085562068 }, { "content": "fn assert_num_slice_approx_eq<T: yfft::Num>(got: &[T], expected: &[T], releps: T) {\n\n assert_eq!(got.len(), expected.len());\n\n // We can't use `Iterator::max()` because T doesn't implement Ord\n\n let maxabs = expected\n\n .iter()\n\n .map(|x| x.abs())\n\n .fold(T::zero() / T::zero(), |x, y| x.max(y))\n\n + T::from(0.01).unwrap();\n\n let eps = maxabs * releps;\n\n for i in 0..got.len() {\n\n let a = got[i];\n\n let b = expected[i];\n\n if (a - b).abs() > eps {\n\n assert!(\n\n (a - b).abs() < eps,\n\n \"assertion failed: `got almost equal to expected` \\\n\n (got: `{:?}`, expected: `{:?}`, diff=`{:?}`)\",\n\n got,\n\n expected,\n\n (a - b).abs()\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/conv.rs", "rank": 22, "score": 87831.12085562068 }, { "content": "fn spectrum_convolve<T: Num>(buffer: &mut [T], ir_fq: &[T]) {\n\n buffer[0] = buffer[0] * ir_fq[0];\n\n buffer[1] = buffer[1] * ir_fq[1];\n\n for i in 1..buffer.len() / 2 {\n\n let (r1, i1) = (buffer[i * 2], buffer[i * 2 + 1]);\n\n let (r2, i2) = (ir_fq[i * 2], ir_fq[i * 2 + 1]);\n\n buffer[i * 2] = r1 * r2 - i1 * i2;\n\n buffer[i * 2 + 1] = r1 * i2 + r2 * i1;\n\n }\n\n}\n", "file_path": "tests/conv.rs", "rank": 23, "score": 86700.18614198311 }, { "content": "pub fn factorize_radix2(x: usize) -> Result<Vec<usize>, PlanError> {\n\n if (x & (x - 1)) == 0 {\n\n Ok(vec![2; x.trailing_zeros() as usize])\n\n } else {\n\n Err(PlanError::InvalidInput)\n\n }\n\n}\n\n\n", "file_path": "src/setup.rs", "rank": 24, "score": 86667.53589251381 }, { "content": "#[cfg(all(target_feature = \"avx\", target_feature = \"fma\"))]\n\n#[allow(dead_code)]\n\npub fn avx_fma_f32x8_fmaddsub(x: f32x8, y: f32x8, z: f32x8) -> f32x8 {\n\n let x: vendor::__m256 = unsafe { mem::transmute(x) };\n\n let y: vendor::__m256 = unsafe { mem::transmute(y) };\n\n let z: vendor::__m256 = unsafe { mem::transmute(z) };\n\n unsafe { mem::transmute(vendor::_mm256_fmaddsub_ps(x, y, z)) }\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 25, "score": 85007.53727886878 }, { "content": "#[cfg(all(target_feature = \"avx\", target_feature = \"fma\"))]\n\n#[allow(dead_code)]\n\npub fn avx_fma_f32x8_fmadd(x: f32x8, y: f32x8, z: f32x8) -> f32x8 {\n\n let x: vendor::__m256 = unsafe { mem::transmute(x) };\n\n let y: vendor::__m256 = unsafe { mem::transmute(y) };\n\n let z: vendor::__m256 = unsafe { mem::transmute(z) };\n\n unsafe { mem::transmute(vendor::_mm256_fmadd_ps(x, y, z)) }\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 26, "score": 85007.53727886878 }, { "content": "#[cfg(all(target_feature = \"avx\", target_feature = \"fma\"))]\n\n#[allow(dead_code)]\n\npub fn avx_fma_f32x8_fmsub(x: f32x8, y: f32x8, z: f32x8) -> f32x8 {\n\n let x: vendor::__m256 = unsafe { mem::transmute(x) };\n\n let y: vendor::__m256 = unsafe { mem::transmute(y) };\n\n let z: vendor::__m256 = unsafe { mem::transmute(z) };\n\n unsafe { mem::transmute(vendor::_mm256_fmsub_ps(x, y, z)) }\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 27, "score": 85007.53727886878 }, { "content": "#[cfg(all(target_feature = \"sse3\", target_feature = \"fma\"))]\n\n#[allow(dead_code)]\n\npub fn sse3_fma_f32x4_fmaddsub(x: f32x4, y: f32x4, z: f32x4) -> f32x4 {\n\n let x = unsafe { mem::transmute(x) };\n\n let y = unsafe { mem::transmute(y) };\n\n let z = unsafe { mem::transmute(z) };\n\n let w = unsafe { vendor::_mm_fmaddsub_ps(x, y, z) };\n\n unsafe { mem::transmute(w) }\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 28, "score": 85007.53727886878 }, { "content": "// thanks to the linearity of DFT, we only need as many test cases as the DFT size\n\n// (unless some buggy code breaks it)\n\nfn test_patterns<T: yfft::Num>(size: usize) -> Vec<Vec<T>> {\n\n let mut vec = Vec::new();\n\n vec.push(vec![T::zero(); size * 2]);\n\n for x in 0..size {\n\n let mut vec2 = vec![T::zero(); size * 2];\n\n vec2[x * 2] = One::one();\n\n vec.push(vec2);\n\n }\n\n for x in 0..size {\n\n let mut vec2 = vec![T::zero(); size * 2];\n\n vec2[x * 2 + 1] = One::one();\n\n vec.push(vec2);\n\n }\n\n vec.push(\n\n (0..size * 2)\n\n .map(|x| -> T { T::from(x).unwrap() })\n\n .collect::<Vec<T>>(),\n\n );\n\n vec.push(\n\n (0..size * 2)\n", "file_path": "tests/transform.rs", "rank": 29, "score": 84986.86167102931 }, { "content": "// thanks to the linearity of DFT, we only need as many test cases as the DFT size\n\n// (unless some buggy code breaks it)\n\nfn test_patterns<T: yfft::Num>(size: usize) -> Vec<Vec<T>> {\n\n let mut vec = Vec::new();\n\n vec.push(vec![T::zero(); size]);\n\n for x in 0..size {\n\n let mut vec2 = vec![T::zero(); size];\n\n vec2[x] = One::one();\n\n vec.push(vec2);\n\n }\n\n vec.push(\n\n (0..size)\n\n .map(|x| -> T { T::from(x).unwrap() })\n\n .collect::<Vec<T>>(),\n\n );\n\n vec.push(\n\n (0..size)\n\n .map(|x| -> T { T::from(x * 3 + 7).unwrap() })\n\n .collect::<Vec<T>>(),\n\n );\n\n vec.push(\n\n (0..size)\n", "file_path": "tests/realfft.rs", "rank": 30, "score": 84986.86167102931 }, { "content": "// The number of patterns are reduced compared to that of `realfft.rs`\n\nfn test_patterns<T: yfft::Num>(size: usize) -> Vec<Vec<T>> {\n\n let mut vec = Vec::new();\n\n vec.push(vec![T::zero(); size]);\n\n for x in 0..size {\n\n let mut vec2 = vec![T::zero(); size];\n\n vec2[x] = T::one();\n\n vec.push(vec2);\n\n }\n\n vec.push(\n\n (0..size)\n\n .map(|x| -> T { T::from(x).unwrap() })\n\n .collect::<Vec<T>>(),\n\n );\n\n vec.push(\n\n (0..size)\n\n .map(|x| -> T { T::from((x * 3 + 7) & 0xf).unwrap() })\n\n .collect::<Vec<T>>(),\n\n );\n\n vec.push(\n\n (0..size)\n\n .map(|x| -> T { T::from((x * 3 + 7) ^ (x * 7 + 3) ^ (x >> 1)).unwrap() })\n\n .collect::<Vec<T>>(),\n\n );\n\n\n\n vec\n\n}\n\n\n", "file_path": "tests/conv.rs", "rank": 31, "score": 84986.86167102931 }, { "content": "fn cyclic_convolve<T: Num>(out: &mut [T], in1: &[T], in2: &[T]) {\n\n for (i, out) in out.iter_mut().enumerate() {\n\n let mut sum = T::zero();\n\n for k in 0..in2.len() {\n\n sum += in1[(i + in1.len() - k) % in1.len()] * in2[k];\n\n }\n\n *out = sum;\n\n }\n\n}\n\n\n", "file_path": "tests/conv.rs", "rank": 32, "score": 83426.76595368847 }, { "content": "/// Creates a kernel that converts from the `Real` format to `Complex` format.\n\npub fn new_real_to_complex_kernel<T>(len: usize) -> Box<Kernel<T>>\n\nwhere\n\n T: Num,\n\n{\n\n Box::new(RealToComplexKernel { len })\n\n}\n\n\n", "file_path": "src/kernel/convert.rs", "rank": 33, "score": 82396.53795588226 }, { "content": "/// Creates a bit reversal kernel.\n\n///\n\n/// A bit reversal kernel is, as its name implies, a special kernel that performs\n\n/// the bit reversal operation. The kernel is supposed to be executed after DIF FFT\n\n/// steps.\n\npub fn new_bit_reversal_kernel<T>(radixes: &[usize]) -> Box<Kernel<T>>\n\nwhere\n\n T: Num,\n\n{\n\n let len = radixes.iter().product();\n\n let mut indices = vec![0; len];\n\n\n\n let mut digits = vec![0; radixes.len()];\n\n let mut factors = vec![0; radixes.len()];\n\n factors[0] = 1;\n\n for i in 0..radixes.len() - 1 {\n\n factors[i + 1] = factors[i] * radixes[i];\n\n }\n\n\n\n let mut cur: usize = 0;\n\n for i in 0..len {\n\n indices[cur] = i;\n\n if i < len - 1 {\n\n digits[radixes.len() - 1] += 1;\n\n cur += factors[radixes.len() - 1];\n", "file_path": "src/kernel/bitreversal.rs", "rank": 34, "score": 82396.53795588226 }, { "content": "#[inline]\n\npub fn f32x4_complex_mul_rrii(x: f32x4, y: f32x4, neg_mask: f32x4) -> f32x4 {\n\n let y_iirr = shuffle!(y, y, [2, 3, 4, 5]);\n\n\n\n // (y1a.r * ta.r, y1b.r * tb.r, y1a.i * ta.i, y1b.i * tb.i)\n\n let t2 = x * y;\n\n\n\n // (y1a.r * ta.i, y1b.r * tb.i, y1a.i * ta.r, y1b.i * tb.r)\n\n let t3 = x * y_iirr;\n\n\n\n // (y1a.r * ta.r, y1b.r * tb.r, y1a.r * ta.i, y1b.r * tb.i)\n\n let t4 = shuffle!(t2, t3, [0, 1, 4, 5]);\n\n\n\n // (y1a.i * ta.i, y1b.i * tb.i, y1a.i * ta.r, y1b.i * tb.r)\n\n let t5 = shuffle!(t2, t3, [2, 3, 6, 7]);\n\n\n\n // (-y1a.i * ta.i, -y1b.i * tb.i, y1a.i * ta.r, y1b.i * tb.r)\n\n let t6 = f32x4_bitxor(t5, neg_mask);\n\n\n\n // (y3a.r, y3b.r, y3a.i, y3b.i) =\n\n // (y1a.r * ta.r - y1a.i * ta.i, y1b.r * tb.r - y1b.i * tb.i,\n\n // y1a.r * ta.i + y1a.i * ta.r, y1b.r * tb.i + y1b.i * tb.r)\n\n t4 + t6\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 35, "score": 82078.77628968867 }, { "content": "fn ptr_lsbs(x: usize) -> usize {\n\n x & (ALIGN - 1)\n\n}\n\n\n\n/// Provides a subset of `Vec`'s interface while providing a minimum alignment\n\n/// guarantee that is convenient for SIMD operations.\n\npub struct AlignedVec<T> {\n\n storage: Vec<T>,\n\n offset: usize,\n\n}\n\n\n\nimpl<T: Copy + Default> AlignedVec<T> {\n\n pub fn with_capacity(i: usize) -> Self {\n\n debug_assert!(size_of::<T>() <= ALIGN);\n\n debug_assert!(ALIGN % size_of::<T>() == 0);\n\n\n\n let mut storage: Vec<T> = Vec::with_capacity(i + ALIGN / size_of::<T>() - 1);\n\n let mut offset = 0;\n\n\n\n // Increase the padding until the storage is aligned\n", "file_path": "src/aligned.rs", "rank": 36, "score": 81962.50666036713 }, { "content": "pub trait AlignInfo: Sized {\n\n fn is_aligned() -> bool;\n\n unsafe fn read<T>(p: *const T) -> T;\n\n unsafe fn write<T>(p: *mut T, value: T);\n\n}\n\n\n", "file_path": "src/kernel/utils.rs", "rank": 37, "score": 81579.27493284707 }, { "content": "/// Creates a kernel that converts from the `HalfComplex` format to `Complex` format.\n\npub fn new_half_complex_to_complex_kernel<T>(len: usize) -> Box<Kernel<T>>\n\nwhere\n\n T: Num,\n\n{\n\n assert!(len % 2 == 0);\n\n Box::new(HalfComplexToComplexKernel { len })\n\n}\n\n\n", "file_path": "src/kernel/convert.rs", "rank": 38, "score": 81126.91905169497 }, { "content": "pub fn if_compatible<TExpect, TRequired, F>(f: F) -> Option<TRequired>\n\nwhere\n\n Option<TRequired>: Any,\n\n Option<TExpect>: Any,\n\n F: FnOnce() -> Option<TExpect>,\n\n{\n\n let mut ret_cell = None;\n\n if let Some(ret) = (&mut ret_cell as &mut Any).downcast_mut() {\n\n *ret = f();\n\n }\n\n ret_cell\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct AlignReqKernelWrapper<T>(T);\n\n\n\nimpl<T> AlignReqKernelWrapper<T> {\n\n pub fn new(x: T) -> Self {\n\n AlignReqKernelWrapper(x)\n\n }\n", "file_path": "src/kernel/utils.rs", "rank": 39, "score": 80736.53934223723 }, { "content": "pub fn branch_on_static_params<F, T>(cparams: &KernelCreationParams, f: F) -> T\n\nwhere\n\n F: StaticParamsConsumer<T>,\n\n{\n\n match (cparams.kernel_type, cparams.inverse) {\n\n (KernelType::Dit, false) => f.consume(cparams, StaticParamsDitForward {}),\n\n (KernelType::Dif, false) => f.consume(cparams, StaticParamsDifForward {}),\n\n (KernelType::Dit, true) => f.consume(cparams, StaticParamsDitBackward {}),\n\n (KernelType::Dif, true) => f.consume(cparams, StaticParamsDifBackward {}),\n\n }\n\n}\n\n\n", "file_path": "src/kernel/utils.rs", "rank": 40, "score": 79466.92043804994 }, { "content": "fn naive_dft<T: yfft::Num>(input: &[T], output: &mut [T], inverse: bool) {\n\n let len = input.len() / 2;\n\n let full_circle = if inverse { 2 } else { -2 };\n\n let twiddle_delta: Complex<T> = Complex::new(\n\n Zero::zero(),\n\n T::from(full_circle).unwrap() * T::PI() / T::from(len).unwrap(),\n\n )\n\n .exp();\n\n let mut twiddle_1 = Complex::one();\n\n for x in 0..len {\n\n let mut twiddle_2 = Complex::one();\n\n let mut sum = Complex::zero();\n\n\n\n for y in 0..len {\n\n sum = sum + Complex::new(input[y * 2], input[y * 2 + 1]) * twiddle_2;\n\n twiddle_2 = twiddle_2 * twiddle_1;\n\n }\n\n\n\n output[x * 2] = sum.re;\n\n output[x * 2 + 1] = sum.im;\n\n\n\n twiddle_1 = twiddle_1 * twiddle_delta;\n\n }\n\n}\n\n\n", "file_path": "tests/transform.rs", "rank": 41, "score": 79214.4224105048 }, { "content": "pub fn new_generic_kernel<T: 'static>(cparams: &KernelCreationParams) -> Box<Kernel<T>>\n\nwhere\n\n T: Num,\n\n{\n\n let full_circle = if cparams.inverse { 2 } else { -2 };\n\n let twiddle_delta = Complex::new(\n\n Zero::zero(),\n\n T::from(cparams.size / cparams.radix / cparams.unit).unwrap()\n\n * T::from(full_circle).unwrap()\n\n * T::PI()\n\n / T::from(cparams.size).unwrap(),\n\n )\n\n .exp();\n\n let coef_delta = Complex::new(\n\n Zero::zero(),\n\n T::from(full_circle).unwrap() * T::PI() / T::from(cparams.radix).unwrap(),\n\n )\n\n .exp();\n\n\n\n match cparams.kernel_type {\n", "file_path": "src/kernel/generic.rs", "rank": 42, "score": 78264.18159632047 }, { "content": "pub fn new_x86_sse2_kernel<T>(_: &KernelCreationParams) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n None\n\n}\n", "file_path": "src/kernel/x86/x86sse2.rs", "rank": 43, "score": 77123.17384492696 }, { "content": "pub fn new_specialized_generic_kernel<T>(cparams: &KernelCreationParams) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n branch_on_static_params(\n\n cparams,\n\n Factory::<T> {\n\n phantom: PhantomData,\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/kernel/generic2.rs", "rank": 44, "score": 77123.17384492696 }, { "content": "pub fn new_x86_kernel<T>(cparams: &KernelCreationParams) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n None.or_else(|| x86avxf32radix2::new_x86_avx_f32_radix2_kernel(cparams))\n\n .or_else(|| x86avxf32radix4::new_x86_avx_f32_radix4_kernel(cparams))\n\n .or_else(|| x86sse3f32radix4::new_x86_sse3_f32_radix4_kernel(cparams))\n\n .or_else(|| x86sse2::new_x86_sse2_kernel(cparams))\n\n .or_else(|| x86sse1radix2::new_x86_sse_radix2_kernel(cparams))\n\n .or_else(|| x86sse1radix4::new_x86_sse_radix4_kernel(cparams))\n\n}\n\n\n\npub unsafe fn new_x86_bit_reversal_kernel<T>(indices: &Vec<usize>) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n let radix2 = indices.len() % 2 == 0\n\n && (0..indices.len() / 2).all(|i| indices[i + indices.len() / 2] == indices[i] + 1);\n\n let radix4 = indices.len() % 4 == 0\n\n && (0..indices.len() / 4).all(|i| {\n", "file_path": "src/kernel/x86/mod.rs", "rank": 45, "score": 77123.17384492696 }, { "content": "pub fn new_x86_sse_radix4_kernel<T>(cparams: &KernelCreationParams) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n if cparams.radix != 4 {\n\n return None;\n\n }\n\n\n\n if_compatible(|| branch_on_static_params(cparams, Factory {}))\n\n}\n\n\n", "file_path": "src/kernel/x86/x86sse1radix4.rs", "rank": 46, "score": 75008.26943934165 }, { "content": "pub fn new_x86_sse_radix2_kernel<T>(cparams: &KernelCreationParams) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n if cparams.radix != 2 {\n\n return None;\n\n }\n\n\n\n if_compatible(|| branch_on_static_params(cparams, Factory {}))\n\n}\n\n\n", "file_path": "src/kernel/x86/x86sse1radix2.rs", "rank": 47, "score": 75008.26943934165 }, { "content": "pub fn new_x86_avx_f32_radix4_kernel<T>(cparams: &KernelCreationParams) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n if cparams.radix != 4 {\n\n return None;\n\n }\n\n\n\n if_compatible(|| branch_on_static_params(cparams, Factory {}))\n\n}\n\n\n", "file_path": "src/kernel/x86/x86avxf32radix4.rs", "rank": 48, "score": 74026.40886351035 }, { "content": "pub fn new_x86_sse3_f32_radix4_kernel<T>(cparams: &KernelCreationParams) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n if cparams.radix != 4 {\n\n return None;\n\n }\n\n\n\n if_compatible(|| branch_on_static_params(cparams, Factory {}))\n\n}\n\n\n", "file_path": "src/kernel/x86/x86sse3f32radix4.rs", "rank": 49, "score": 74026.40886351035 }, { "content": "pub fn new_x86_avx_f32_radix2_kernel<T>(cparams: &KernelCreationParams) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n if cparams.radix != 2 {\n\n return None;\n\n }\n\n\n\n if_compatible(|| branch_on_static_params(cparams, Factory {}))\n\n}\n\n\n", "file_path": "src/kernel/x86/x86avxf32radix2.rs", "rank": 50, "score": 74026.40886351035 }, { "content": "/// Creates a real FFT post-processing or backward real FFT pre-processing kernel.\n\npub fn new_real_fft_pre_post_process_kernel<T>(len: usize, inverse: bool) -> Box<Kernel<T>>\n\nwhere\n\n T: Num,\n\n{\n\n super::x86::new_x86_real_fft_pre_post_process_kernel(len, inverse).unwrap_or_else(|| {\n\n assert!(len % 2 == 0);\n\n Box::new(RealFFTPrePostProcessKernel {\n\n len,\n\n table: new_real_fft_coef_table(len, inverse),\n\n inverse,\n\n })\n\n })\n\n}\n\n\n\npub(super) fn new_real_fft_coef_table<T>(len: usize, inverse: bool) -> Vec<T>\n\nwhere\n\n T: Num,\n\n{\n\n assert!(len % 2 == 0);\n\n let mut table = Vec::with_capacity(len * 2);\n", "file_path": "src/kernel/realfft.rs", "rank": 51, "score": 73496.73045659564 }, { "content": "pub trait AlignReqKernel<T>: fmt::Debug + Sized + Sync + Send {\n\n fn transform<I: AlignInfo>(&self, params: &mut KernelParams<T>);\n\n fn required_work_area_size(&self) -> usize {\n\n 0\n\n }\n\n fn alignment_requirement(&self) -> usize;\n\n}\n\n\n", "file_path": "src/kernel/utils.rs", "rank": 52, "score": 63726.98268284632 }, { "content": "#[test]\n\nfn test() {\n\n let setup: Setup<f32> = Setup::new(&Options {\n\n input_data_order: DataOrder::Natural,\n\n output_data_order: DataOrder::Swizzled,\n\n input_data_format: DataFormat::Complex,\n\n output_data_format: DataFormat::Complex,\n\n len: 1024,\n\n inverse: false,\n\n })\n\n .unwrap();\n\n let setup_rc = Rc::new(setup);\n\n Env::new(setup_rc);\n\n}\n", "file_path": "tests/test.rs", "rank": 53, "score": 58884.832505522325 }, { "content": "#[test]\n\nfn conv_f64() {\n\n conv::<f64>();\n\n}\n\n\n", "file_path": "tests/conv.rs", "rank": 54, "score": 57597.52977372536 }, { "content": "#[test]\n\nfn conv_f32() {\n\n conv::<f32>();\n\n}\n\n\n", "file_path": "tests/conv.rs", "rank": 55, "score": 57597.52977372536 }, { "content": "#[test]\n\nfn fft_backward_f32() {\n\n simple_fft::<f32>(true);\n\n}\n\n\n", "file_path": "tests/transform.rs", "rank": 56, "score": 56405.69935726127 }, { "content": "#[test]\n\nfn fft_forward_f32() {\n\n simple_fft::<f32>(false);\n\n}\n\n\n", "file_path": "tests/transform.rs", "rank": 57, "score": 56405.69935726127 }, { "content": "#[test]\n\nfn fft_forward_f64() {\n\n simple_fft::<f64>(false);\n\n}\n\n\n", "file_path": "tests/transform.rs", "rank": 58, "score": 56405.69935726127 }, { "content": "#[test]\n\nfn fft_backward_f64() {\n\n simple_fft::<f64>(true);\n\n}\n\n\n", "file_path": "tests/transform.rs", "rank": 59, "score": 56405.69935726127 }, { "content": "#[test]\n\nfn fft_roundtrip_real_f64() {\n\n fft_roundtrip_real::<f64>();\n\n}\n", "file_path": "tests/realfft.rs", "rank": 60, "score": 55299.10002300915 }, { "content": "#[test]\n\nfn fft_real_forward_f64() {\n\n fft_real_forward::<f64>();\n\n}\n\n\n", "file_path": "tests/realfft.rs", "rank": 61, "score": 55299.10002300915 }, { "content": "#[test]\n\nfn fft_roundtrip_shortcut_f64() {\n\n fft_roundtrip_shortcut::<f64>();\n\n}\n", "file_path": "tests/transform.rs", "rank": 62, "score": 55299.10002300915 }, { "content": "#[test]\n\nfn fft_real_forward_f32() {\n\n fft_real_forward::<f32>();\n\n}\n\n\n", "file_path": "tests/realfft.rs", "rank": 63, "score": 55299.10002300915 }, { "content": "#[test]\n\nfn fft_real_backward_f32() {\n\n fft_real_backward::<f32>();\n\n}\n\n\n", "file_path": "tests/realfft.rs", "rank": 64, "score": 55299.10002300915 }, { "content": "#[test]\n\nfn fft_real_backward_f64() {\n\n fft_real_backward::<f64>();\n\n}\n\n\n", "file_path": "tests/realfft.rs", "rank": 65, "score": 55299.10002300915 }, { "content": "#[test]\n\nfn fft_roundtrip_real_f32() {\n\n fft_roundtrip_real::<f32>();\n\n}\n\n\n", "file_path": "tests/realfft.rs", "rank": 66, "score": 55299.10002300915 }, { "content": "#[test]\n\nfn fft_roundtrip_shortcut_f32() {\n\n fft_roundtrip_shortcut::<f32>();\n\n}\n\n\n", "file_path": "tests/transform.rs", "rank": 67, "score": 55299.10002300915 }, { "content": "#[test]\n\nfn test_f32x4_complex_mul_rrii() {\n\n let neg_mask = unsafe { mem::transmute(u32x4::new(0x80000000, 0x80000000, 0, 0)) };\n\n\n\n let c1: Complex<f32> = Complex::new(123f32, 456f32);\n\n let c2: Complex<f32> = Complex::new(789f32, 135f32);\n\n let c3: Complex<f32> = Complex::new(114f32, 514f32);\n\n let c4: Complex<f32> = Complex::new(987f32, 654f32);\n\n\n\n let d1 = c1 * c3;\n\n let d2 = c2 * c4;\n\n\n\n let x = f32x4::new(c1.re, c2.re, c1.im, c2.im);\n\n let y = f32x4::new(c3.re, c4.re, c3.im, c4.im);\n\n let z = f32x4_complex_mul_rrii(x, y, neg_mask);\n\n\n\n assert_eq!(f32x4_to_array(z), [d1.re, d2.re, d1.im, d2.im]);\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 68, "score": 54268.904729172835 }, { "content": "#[cfg(target_feature = \"avx\")]\n\n#[test]\n\n#[allow(dead_code)]\n\nfn test_avx_f32x8_complex_mul_riri() {\n\n let c1: Complex<f32> = Complex::new(123f32, 456f32);\n\n let c2: Complex<f32> = Complex::new(789f32, 135f32);\n\n let c3: Complex<f32> = Complex::new(114f32, 514f32);\n\n let c4: Complex<f32> = Complex::new(987f32, 654f32);\n\n let c5: Complex<f32> = Complex::new(12f32, 46f32);\n\n let c6: Complex<f32> = Complex::new(78f32, 15f32);\n\n let c7: Complex<f32> = Complex::new(11f32, 54f32);\n\n let c8: Complex<f32> = Complex::new(98f32, 64f32);\n\n\n\n let d1 = c1 * c3;\n\n let d2 = c2 * c4;\n\n let d3 = c5 * c7;\n\n let d4 = c6 * c8;\n\n\n\n let x = f32x8::new(c1.re, c1.im, c2.re, c2.im, c5.re, c5.im, c6.re, c6.im);\n\n let y = f32x8::new(c3.re, c3.im, c4.re, c4.im, c7.re, c7.im, c8.re, c8.im);\n\n let z = avx_f32x8_complex_mul_riri(x, y);\n\n\n\n assert_eq!(\n\n f32x8_to_array(z),\n\n [d1.re, d1.im, d2.re, d2.im, d3.re, d3.im, d4.re, d4.im]\n\n );\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 69, "score": 53307.46466746461 }, { "content": "#[cfg(target_feature = \"sse3\")]\n\n#[test]\n\n#[allow(dead_code)]\n\nfn test_sse3_f32x4_complex_mul_riri() {\n\n let c1: Complex<f32> = Complex::new(123f32, 456f32);\n\n let c2: Complex<f32> = Complex::new(789f32, 135f32);\n\n let c3: Complex<f32> = Complex::new(114f32, 514f32);\n\n let c4: Complex<f32> = Complex::new(987f32, 654f32);\n\n\n\n let d1 = c1 * c3;\n\n let d2 = c2 * c4;\n\n\n\n let x = f32x4::new(c1.re, c1.im, c2.re, c2.im);\n\n let y = f32x4::new(c3.re, c3.im, c4.re, c4.im);\n\n let z = sse3_f32x4_complex_mul_riri(x, y);\n\n\n\n assert_eq!(f32x4_to_array(z), [d1.re, d1.im, d2.re, d2.im]);\n\n}\n\n\n", "file_path": "src/simdutils.rs", "rank": 70, "score": 53307.46466746461 }, { "content": "/// Poor man's generic lambda\n\npub trait StaticParamsConsumer<TRet> {\n\n fn consume<T: StaticParams>(self, cparams: &KernelCreationParams, sparams: T) -> TRet;\n\n}\n\n\n", "file_path": "src/kernel/utils.rs", "rank": 71, "score": 52422.16297162381 }, { "content": "pub trait Kernel<T>: Debug + Sync + Send {\n\n fn transform(&self, params: &mut KernelParams<T>);\n\n fn required_work_area_size(&self) -> usize {\n\n 0\n\n }\n\n}\n\n\n\nimpl<T> Kernel<T>\n\nwhere\n\n T: Num + 'static,\n\n{\n\n pub fn new(cparams: &KernelCreationParams) -> Box<Kernel<T>> {\n\n x86::new_x86_kernel(cparams)\n\n .or_else(|| generic2::new_specialized_generic_kernel(cparams))\n\n .unwrap_or_else(|| generic::new_generic_kernel(cparams))\n\n }\n\n}\n", "file_path": "src/kernel/mod.rs", "rank": 72, "score": 48360.60073325773 }, { "content": "pub trait StaticParams: fmt::Debug + 'static + Sync + Send {\n\n fn inverse(&self) -> bool;\n\n fn kernel_type(&self) -> KernelType;\n\n fn check_param(&self, cparams: &KernelCreationParams) {\n\n assert_eq!(cparams.inverse, self.inverse());\n\n assert_eq!(cparams.kernel_type, self.kernel_type());\n\n }\n\n}\n\n\n", "file_path": "src/kernel/utils.rs", "rank": 73, "score": 45614.368274176915 }, { "content": " indices: &Vec<usize>,\n\n ) -> Option<Box<super::Kernel<T>>> {\n\n None\n\n }\n\n pub fn new_x86_real_fft_pre_post_process_kernel<T>(\n\n len: usize,\n\n inverse: bool,\n\n ) -> Option<Box<super::Kernel<T>>> {\n\n None\n\n }\n\n}\n\n\n\nuse super::Num;\n\nuse std::fmt::Debug;\n\n\n\nuse self::accessor::SliceAccessor;\n\n\n\npub use self::bitreversal::new_bit_reversal_kernel;\n\npub use self::convert::*;\n\npub use self::realfft::*;\n", "file_path": "src/kernel/mod.rs", "rank": 76, "score": 12.584398674875546 }, { "content": "use super::super::Num;\n\nuse super::utils::{if_compatible, AlignInfo, AlignReqKernel, AlignReqKernelWrapper};\n\nuse super::{Kernel, KernelParams, SliceAccessor};\n\n\n\nuse packed_simd::u64x2;\n\n\n\nuse std::mem;\n\n\n\npub unsafe fn new_x86_bit_reversal_kernel<T>(indices: &Vec<usize>) -> Option<Box<Kernel<T>>>\n\nwhere\n\n T: Num,\n\n{\n\n if_compatible(|| {\n\n Some(\n\n Box::new(AlignReqKernelWrapper::new(SseDWordBitReversalKernel {\n\n indices: indices.clone(),\n\n })) as Box<Kernel<f32>>,\n\n )\n\n })\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/kernel/x86/bitreversal.rs", "rank": 77, "score": 10.976250724719211 }, { "content": "pub use packed_simd::{f32x4, f32x8, i32x4, i32x8, u32x4, u32x8, u64x4};\n\n#[cfg(target_arch = \"x86\")]\n\nuse std::arch::x86 as vendor;\n\n#[cfg(target_arch = \"x86_64\")]\n\nuse std::arch::x86_64 as vendor;\n\nuse std::mem;\n\n\n\n#[cfg(test)]\n\nuse num_complex::Complex;\n\n\n\n#[allow(dead_code)]\n\n#[inline]\n", "file_path": "src/simdutils.rs", "rank": 78, "score": 10.581036933935444 }, { "content": "use super::kernel::KernelParams;\n\nuse super::{Num, Setup};\n\nuse num_traits::Zero;\n\nuse std::borrow::Borrow;\n\n\n\n/// Encapsulates the working area required for a transformation.\n\n#[derive(Debug, Clone)]\n\npub struct Env<TNum, TSetupRef> {\n\n setup: TSetupRef,\n\n work_area: Vec<TNum>,\n\n}\n\n\n\nimpl<TNum, TSetupRef> Env<TNum, TSetupRef>\n\nwhere\n\n TNum: Num + 'static,\n\n TSetupRef: Borrow<Setup<TNum>>,\n\n{\n\n pub fn new(setup: TSetupRef) -> Self {\n\n let work_area_size = setup.borrow().required_work_area_size();\n\n Env {\n", "file_path": "src/env.rs", "rank": 80, "score": 9.713462253388782 }, { "content": "use super::utils::{if_compatible, AlignInfo, AlignReqKernel, AlignReqKernelWrapper};\n\nuse super::{Kernel, KernelParams, SliceAccessor};\n\n\n\nuse num_iter::range_step;\n\nuse packed_simd::{f32x4, u32x4};\n\nuse std::f32;\n\nuse std::mem;\n\nuse std::ptr::{read_unaligned, write_unaligned};\n\n\n\nuse aligned::AlignedVec;\n\nuse simdutils::{f32x4_bitxor, f32x4_complex_mul_rrii};\n\nuse {mul_pos_i, Complex, Num};\n\n\n\n/// Creates a real FFT post-processing or backward real FFT pre-processing kernel.\n", "file_path": "src/kernel/x86/x86sse1realfft.rs", "rank": 82, "score": 8.43478796514988 }, { "content": "use super::utils::{if_compatible, AlignInfo, AlignReqKernel, AlignReqKernelWrapper};\n\nuse super::{Kernel, KernelParams, SliceAccessor};\n\n\n\nuse num_iter::range_step;\n\nuse packed_simd::{f32x4, u32x4};\n\nuse std::f32;\n\nuse std::mem;\n\nuse std::ptr::{read_unaligned, write_unaligned};\n\n\n\nuse aligned::AlignedVec;\n\nuse simdutils::{f32x4_bitxor, sse3_f32x4_complex_mul_riri};\n\nuse Num;\n\n\n\nuse super::x86sse1realfft::new_real_fft_coef_table;\n\n\n\n/// Creates a real FFT post-processing or backward real FFT pre-processing kernel.\n", "file_path": "src/kernel/x86/x86sse3f32realfft.rs", "rank": 84, "score": 8.278800505635557 }, { "content": "use super::utils::{if_compatible, AlignInfo, AlignReqKernel, AlignReqKernelWrapper};\n\nuse super::{Kernel, KernelParams, SliceAccessor};\n\n\n\nuse num_iter::range_step;\n\nuse packed_simd::{f32x8, u32x8};\n\nuse std::f32;\n\nuse std::mem;\n\nuse std::ptr::{read_unaligned, write_unaligned};\n\n\n\nuse aligned::AlignedVec;\n\nuse simdutils::{avx_f32x8_bitxor, avx_f32x8_complex_mul_riri};\n\nuse Num;\n\n\n\nuse super::x86sse1realfft::new_real_fft_coef_table;\n\n\n\n/// Creates a real FFT post-processing or backward real FFT pre-processing kernel.\n", "file_path": "src/kernel/x86/x86avxf32realfft.rs", "rank": 85, "score": 8.241450432611174 }, { "content": "extern crate num_complex;\n\nextern crate num_traits;\n\nextern crate yfft;\n\n\n\nuse num_complex::Complex;\n\nuse num_traits::{One, Zero};\n\n\n\nuse yfft::*;\n\n\n\n// TODO: test all kernels --- currently, only the kernels for the highest possible ISA are tested\n\n\n", "file_path": "tests/transform.rs", "rank": 86, "score": 7.982950359340988 }, { "content": "//! Defines the `SliceAccessor` type that can be used to bypass bounds checking\n\n//! on the release builds.\n\n\n\nuse std::{convert, ops};\n\n\n\n#[cfg(not(debug_assertions))]\n\nuse std::slice;\n\n\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct SliceAccessor<T> {\n\n slice: T,\n\n}\n\n\n\nimpl<T> SliceAccessor<T> {\n\n pub unsafe fn new(x: T) -> Self {\n\n SliceAccessor { slice: x }\n\n }\n\n}\n\n\n\nimpl<'a, T> convert::Into<SliceAccessor<&'a [T]>> for SliceAccessor<&'a mut [T]> {\n", "file_path": "src/kernel/accessor.rs", "rank": 87, "score": 7.765295064373983 }, { "content": "//! Defines generic FFT kernels that don't make any assumptions about radix or instruction set.\n\n//!\n\n//! Performances\n\n//! ------------\n\n//!\n\n//! According to a benchmark result, this kernel runs about 100x slower than a commercial-level FFT library on a Skylake\n\n//! machine.\n\n\n\nuse super::{Kernel, KernelCreationParams, KernelParams, KernelType, SliceAccessor};\n\n\n\nuse num_complex::Complex;\n\nuse num_iter::range_step;\n\nuse num_traits::{One, Zero};\n\n\n\nuse super::super::{complex_from_slice, Num};\n\n\n", "file_path": "src/kernel/generic.rs", "rank": 88, "score": 7.612854369450337 }, { "content": "extern crate num_complex;\n\nextern crate num_traits;\n\nextern crate yfft;\n\n\n\nuse num_traits::One;\n\n\n\nuse yfft::*;\n\n\n\n// TODO: test all kernels --- currently, only the kernels for the highest possible ISA are tested\n\n\n", "file_path": "tests/realfft.rs", "rank": 89, "score": 7.501388982952341 }, { "content": "use super::{Kernel, KernelCreationParams, KernelParams, KernelType};\n\nuse std::any::Any;\n\nuse std::{fmt, ptr};\n\n\n", "file_path": "src/kernel/utils.rs", "rank": 90, "score": 7.408836013971849 }, { "content": "//! Defines generic FFT kernels optimized for certain known radix values, but without any specific processor or\n\n//! instruction set specific optimizations.\n\n//!\n\n//! Performances\n\n//! ------------\n\n//!\n\n//! According to a benchmark result, this kernel runs about 10x slower than a commercial-level FFT library on a Skylake\n\n//! machine.\n\n\n\nuse super::utils::{branch_on_static_params, StaticParams, StaticParamsConsumer};\n\nuse super::{Kernel, KernelCreationParams, KernelParams, KernelType, SliceAccessor};\n\n\n\nuse num_complex::Complex;\n\nuse num_iter::range_step;\n\nuse num_traits::{One, Zero};\n\n\n\nuse super::super::{mul_pos_i, Num};\n\n\n\nuse std::fmt::Debug;\n\nuse std::marker::PhantomData;\n\n\n", "file_path": "src/kernel/generic2.rs", "rank": 91, "score": 7.2642628025503795 }, { "content": "extern crate num_complex;\n\nextern crate num_traits;\n\nextern crate yfft;\n\n\n\nuse yfft::*;\n\n\n", "file_path": "tests/conv.rs", "rank": 92, "score": 7.2302635826334045 }, { "content": "extern crate num_traits;\n\n\n\n#[macro_use]\n\nextern crate packed_simd;\n\n\n\nuse std::fmt::Debug;\n\nuse std::ops::{AddAssign, DivAssign, MulAssign, SubAssign};\n\n\n\nuse num_complex::Complex;\n\n\n\n#[macro_use]\n\nmod simdutils;\n\nmod aligned;\n\nmod env;\n\nmod kernel;\n\nmod setup;\n\n\n", "file_path": "src/lib.rs", "rank": 93, "score": 7.219406684079418 }, { "content": "use super::{Kernel, KernelParams, SliceAccessor};\n\n\n\nuse Num;\n\n\n\n/// Creates a kernel that converts from the `Real` format to `Complex` format.\n", "file_path": "src/kernel/convert.rs", "rank": 94, "score": 6.938996508876514 }, { "content": "//! Defines FFT kernels optimized by using SSE2 instruction set.\n\n//!\n\n//! Performances\n\n//! ------------\n\n//!\n\n//! Yet to be measured.\n\n\n\nuse super::{Kernel, KernelCreationParams, Num};\n\n\n", "file_path": "src/kernel/x86/x86sse2.rs", "rank": 95, "score": 6.759305329181377 }, { "content": "//! Defines Radix-4 FFT kernels optimized by using SSE instruction set.\n\n//!\n\n//! Performances\n\n//! ------------\n\n//!\n\n//! According to a benchmark result, this kernel runs about 1-3x slower than a commercial-level FFT library (with\n\n//! all optimizations and instruction sets including ones that this kernel doesn't support enabled) on a Skylake\n\n//! machine.\n\n\n\nuse super::super::super::simdutils::{f32x4_bitxor, f32x4_complex_mul_rrii};\n\nuse super::utils::{\n\n branch_on_static_params, if_compatible, AlignInfo, AlignReqKernel, AlignReqKernelWrapper,\n\n StaticParams, StaticParamsConsumer,\n\n};\n\nuse super::{Kernel, KernelCreationParams, KernelParams, KernelType, Num, SliceAccessor};\n\n\n\nuse num_complex::Complex;\n\nuse num_iter::range_step;\n\n\n\nuse packed_simd::f32x4;\n\n\n\nuse std::f32;\n\n\n", "file_path": "src/kernel/x86/x86sse1radix4.rs", "rank": 96, "score": 6.609049745640535 }, { "content": "//! Defines Radix-2 FFT kernels optimized by using SSE instruction set.\n\n//!\n\n//! Performances\n\n//! ------------\n\n//!\n\n//! According to a benchmark result, this kernel runs about 2-4x slower than a commercial-level FFT library (with\n\n//! all optimizations and instruction sets including ones that this kernel doesn't support enabled) on a Skylake\n\n//! machine.\n\n\n\nuse super::super::super::simdutils::{f32x4_bitxor, f32x4_complex_mul_rrii};\n\nuse super::utils::{\n\n branch_on_static_params, if_compatible, AlignInfo, AlignReqKernel, AlignReqKernelWrapper,\n\n StaticParams, StaticParamsConsumer,\n\n};\n\nuse super::{Kernel, KernelCreationParams, KernelParams, KernelType, Num, SliceAccessor};\n\n\n\nuse num_complex::Complex;\n\nuse num_iter::range_step;\n\n\n\nuse packed_simd::f32x4;\n\n\n\nuse std::f32;\n\n\n", "file_path": "src/kernel/x86/x86sse1radix2.rs", "rank": 97, "score": 6.609049745640535 }, { "content": "}\n\n\n\n#[cfg(not(target_feature = \"sse3\"))]\n\nmod x86sse3f32realfft {\n\n pub fn new_x86_sse3_f32_real_fft_pre_post_process_kernel<T>(\n\n _len: usize,\n\n _inverse: bool,\n\n ) -> Option<Box<super::Kernel<T>>> {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(not(target_feature = \"avx\"))]\n\nmod x86avxbitreversal {\n\n pub fn new_x86_avx_bit_reversal_kernel<T>(_: &Vec<usize>) -> Option<Box<super::Kernel<T>>> {\n\n None\n\n }\n\n pub fn new_x86_avx_radix2_bit_reversal_kernel<T>(\n\n _: &Vec<usize>,\n\n ) -> Option<Box<super::Kernel<T>>> {\n\n None\n\n }\n\n pub fn new_x86_avx_radix4_bit_reversal_kernel<T>(\n\n _: &Vec<usize>,\n\n ) -> Option<Box<super::Kernel<T>>> {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/kernel/x86/mod.rs", "rank": 98, "score": 6.481557293588448 }, { "content": "use super::kernel::{\n\n new_bit_reversal_kernel, new_half_complex_to_complex_kernel,\n\n new_real_fft_pre_post_process_kernel, new_real_to_complex_kernel, Kernel, KernelCreationParams,\n\n KernelType,\n\n};\n\nuse super::Num;\n\nuse std::error;\n\nuse std::fmt;\n\nuse std::result::Result;\n\n\n\n/// Specifies the data order in which the data is supplied to or returned from the kernel.\n\n#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq)]\n\npub enum DataOrder {\n\n /// The data is ordered in a natural order.\n\n Natural,\n\n\n\n /// The data is ordered in a bit-reversal order with arbitrary radixes.\n\n /// Use this value if you intend to process the output in an order-independent way and transform it back to the\n\n /// natural order.\n\n Swizzled,\n", "file_path": "src/setup.rs", "rank": 99, "score": 6.455751186612876 } ]
Rust
client/src/renderer/glium/traits.rs
BonsaiDen/shooter-rs
0ce5d02065be9ded30e29fdf133ad7a615f29323
use glium::{glutin, DisplayBuild, Surface}; use shared::Lithium::{ Client, ClientHandler, EntityState, EntityRegistry, Event, BaseLevel, Renderer }; use super::GliumRenderer; impl Renderer for GliumRenderer { fn run< H: ClientHandler<Self, G, L, E, S>, E: Event, S: EntityState, L: BaseLevel<S>, G: EntityRegistry<S, L, Self> >(mut client: Client<H, Self, G, L, E, S>) where Self: Sized { let (width, height) = (256, 256); let display = glutin::WindowBuilder::new() .with_multisampling(4) .with_dimensions(width, height) .build_glium().unwrap(); let mut renderer = GliumRenderer::new(display, width, height); client.init(&mut renderer); let mut last_tick_time = 0.0; let mut last_frame_time = 0.0; let mut frames_per_tick = 0; while renderer.running() { if renderer.should_draw() { let frame_time = renderer.time(); let tick_rate = renderer.tick_rate(); if frames_per_tick == 0 { if client.tick(&mut renderer) { frames_per_tick = renderer.fps() / tick_rate; last_tick_time = frame_time; } } renderer.set_delta_time((frame_time - last_frame_time) as f32); renderer.set_delta_u( 1.0 / (1.0 / tick_rate as f32) * (frame_time - last_tick_time) as f32 ); client.draw(&mut renderer); renderer.draw(); last_frame_time = frame_time; if frames_per_tick > 0 { frames_per_tick -= 1; } } renderer.events(); } client.destroy(&mut renderer); } fn time(&self) -> f64 { self.time } fn set_time(&mut self, time: f64) { self.time = time; } fn delta_time(&self) -> f32{ self.dt } fn set_delta_time(&mut self, dt: f32) { self.dt = dt; } fn delta_u(&self) -> f32 { self.u } fn set_delta_u(&mut self, u: f32) { self.u = u; } fn fps(&self) -> u32 { self.frame_rate } fn set_fps(&mut self, frame_rate: u32) { self.frame_rate = frame_rate; } fn tick_rate(&self) -> u32 { self.tick_rate } fn set_tick_rate(&mut self, tick_rate: u32) { self.tick_rate = tick_rate; } fn interpolation_ticks(&self) -> usize { self.interpolation_ticks } fn set_interpolation_ticks(&mut self, ticks: usize) { self.interpolation_ticks = ticks; } }
use glium::{glutin, DisplayBuild, Surface}; use shared::Lithium::{ Client, ClientHandler, EntityState, EntityRegistry, Event, BaseLevel, Renderer }; use super::GliumRenderer; impl Renderer for GliumRenderer { fn run< H: ClientHandler<Self, G, L, E, S>, E: Event, S: EntityState, L: BaseLevel<S>, G: EntityRegistry<S, L, Self> >(mut client: Client<H, Self, G, L, E, S>) where Self: Sized { let (width, height) = (256, 256); let display = glutin::WindowBuilder::new() .with_multisampling(4) .with_dimensions(width, height) .build_glium().unwrap(); let mut renderer = GliumRenderer::new(display, width, height); client.init(&mut renderer); let mut last_tick_time = 0.0; let mut last_frame_time = 0.0; let mut frames_per_tick = 0;
fn time(&self) -> f64 { self.time } fn set_time(&mut self, time: f64) { self.time = time; } fn delta_time(&self) -> f32{ self.dt } fn set_delta_time(&mut self, dt: f32) { self.dt = dt; } fn delta_u(&self) -> f32 { self.u } fn set_delta_u(&mut self, u: f32) { self.u = u; } fn fps(&self) -> u32 { self.frame_rate } fn set_fps(&mut self, frame_rate: u32) { self.frame_rate = frame_rate; } fn tick_rate(&self) -> u32 { self.tick_rate } fn set_tick_rate(&mut self, tick_rate: u32) { self.tick_rate = tick_rate; } fn interpolation_ticks(&self) -> usize { self.interpolation_ticks } fn set_interpolation_ticks(&mut self, ticks: usize) { self.interpolation_ticks = ticks; } }
while renderer.running() { if renderer.should_draw() { let frame_time = renderer.time(); let tick_rate = renderer.tick_rate(); if frames_per_tick == 0 { if client.tick(&mut renderer) { frames_per_tick = renderer.fps() / tick_rate; last_tick_time = frame_time; } } renderer.set_delta_time((frame_time - last_frame_time) as f32); renderer.set_delta_u( 1.0 / (1.0 / tick_rate as f32) * (frame_time - last_tick_time) as f32 ); client.draw(&mut renderer); renderer.draw(); last_frame_time = frame_time; if frames_per_tick > 0 { frames_per_tick -= 1; } } renderer.events(); } client.destroy(&mut renderer); }
function_block-function_prefix_line
[ { "content": "fn run() {\n\n\n\n let args = clap::App::new(\"shooter-client\")\n\n .version(&crate_version!())\n\n .author(\"Ivo Wetzel <[email protected]>\")\n\n .about(\"Shooter-Client\")\n\n .arg(clap::Arg::with_name(\"address:port\")\n\n .help(\"Remote server address to connect to.\")\n\n .index(1)\n\n\n\n ).get_matches();\n\n\n\n\n\n // Arguments --------------------------------------------------------------\n\n Renderer::run(game::Game::client(\n\n value_t!(args.value_of(\"address:port\"), SocketAddr).ok()\n\n ));\n\n\n\n}\n", "file_path": "client/src/main.rs", "rank": 0, "score": 140257.09079937104 }, { "content": "fn char_vertices(max_chars: usize) -> Vec<Vertex> {\n\n\n\n let mut buffer = Vec::with_capacity(max_chars * 6);\n\n for _ in 0..max_chars {\n\n buffer.push(Vertex { position: [0.0, 0.0, 0.0, 0.0], char_coords: [0.0, 0.0, 0.0, 0.0] });\n\n buffer.push(Vertex { position: [1.0, 0.0, 0.0, 0.0], char_coords: [0.0, 0.0, 0.0, 0.0] });\n\n buffer.push(Vertex { position: [0.0, 1.0, 0.0, 0.0], char_coords: [0.0, 0.0, 0.0, 0.0] });\n\n buffer.push(Vertex { position: [1.0, 0.0, 0.0, 0.0], char_coords: [0.0, 0.0, 0.0, 0.0] });\n\n buffer.push(Vertex { position: [1.0, 1.0, 0.0, 0.0], char_coords: [0.0, 0.0, 0.0, 0.0] });\n\n buffer.push(Vertex { position: [0.0, 1.0, 0.0, 0.0], char_coords: [0.0, 0.0, 0.0, 0.0] });\n\n }\n\n\n\n buffer\n\n\n\n}\n\n\n\n\n\n// Font Vertex ----------------------------------------------------------------\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 1, "score": 94318.44666433493 }, { "content": "fn run_server(server_addr: SocketAddr, tick_rate: u32) {\n\n if let Err(err) = Server::run(\n\n server_addr,\n\n ServerGame::server(tick_rate, true)\n\n ) {\n\n println!(\"[Server] [Fatal] {:?}\", err);\n\n }\n\n}\n\n\n", "file_path": "client/src/game/views/connect.rs", "rank": 2, "score": 92137.84704248405 }, { "content": "#[cfg(feature=\"glium_renderer\")]\n\npub fn main() {\n\n run();\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 3, "score": 81378.77011946801 }, { "content": "#[cfg(feature=\"allegro_renderer\")]\n\nfn draw_triangle(\n\n renderer: &mut Renderer,\n\n state: &SharedState, color: &Color,\n\n base_scale: f32, body_scale: f32, dr: f32, da: f32, db: f32\n\n) {\n\n let beta = f32::consts::PI / dr;\n\n let ox = state.r.cos() * -2.0 * base_scale + 0.5;\n\n let oy = state.r.sin() * -2.0 * base_scale + 0.5;\n\n let ax = ox + state.x + state.r.cos() * da * body_scale;\n\n let ay = oy + state.y + state.r.sin() * da * body_scale;\n\n let bx = ox + state.x + (state.r + beta).cos() * db * body_scale;\n\n let by = oy + state.y + (state.r + beta).sin() * db * body_scale;\n\n let cx = ox + state.x + (state.r - beta).cos() * db * body_scale;\n\n let cy = oy + state.y + (state.r - beta).sin() * db * body_scale;\n\n renderer.triangle(\n\n color, ax, ay, bx, by, cx, cy, 0.5 * body_scale\n\n );\n\n}\n\n\n", "file_path": "client/src/entities/ship.rs", "rank": 4, "score": 80835.7272670857 }, { "content": "pub trait DrawableEntity<S: EntityState, L: BaseLevel<S>, R: Renderer> {\n\n\n\n fn draw(&mut self, _: &mut R, _: &Level<S, L>, _: S) {}\n\n\n\n fn event(&mut self, _: &EntityEvent, _: &S) {}\n\n\n\n}\n\n\n", "file_path": "lithium-rs/src/entity/traits.rs", "rank": 5, "score": 72875.5221481029 }, { "content": "// Entity Registry Trait ------------------------------------------------------\n\npub trait EntityRegistry<S: EntityState, L: BaseLevel<S>, R: Renderer> {\n\n fn entity_from_type_id(&self, type_id: u8) -> Entity<S, L, R>;\n\n}\n\n\n", "file_path": "lithium-rs/src/entity/manager/registry.rs", "rank": 6, "score": 71822.67554335222 }, { "content": "// Allegro Rendering Implementation -------------------------------------------\n\n#[cfg(feature=\"allegro_renderer\")]\n\nmod allegro;\n\n\n\n#[cfg(feature=\"allegro_renderer\")]\n\npub use self::allegro::AllegroRenderer as Renderer;\n\n\n\n#[cfg(feature=\"allegro_renderer\")]\n\npub use self::allegro::KeyCode as KeyCode;\n\n\n\n// GLium Rendering Implementation ---------------------------------------------\n\n#[cfg(feature=\"glium_renderer\")]\n\nmod glium;\n\n\n\n#[cfg(feature=\"glium_renderer\")]\n\npub use self::glium::GliumRenderer as Renderer;\n\n\n\n#[cfg(feature=\"glium_renderer\")]\n\npub use self::glium::KeyCode as KeyCode;\n\n\n\n// Generic Components ---------------------------------------------------------\n\nmod particle_system;\n\npub use self::particle_system::ParticleSystem as ParticleSystem;\n\npub use self::particle_system::Particle as Particle;\n\n\n", "file_path": "client/src/renderer/mod.rs", "rank": 7, "score": 65274.81707690726 }, { "content": "// External Dependencies ------------------------------------------------------\n\nuse allegro::{\n\n Core, Display, DisplayOption, DisplayOptionImportance, EventQueue, OPENGL\n\n};\n\n\n\n\n\n// Internal Dependencies ------------------------------------------------------\n\nuse shared::Lithium::{\n\n Client, ClientHandler, EntityState, EntityRegistry, Event, BaseLevel, Renderer\n\n};\n\nuse super::AllegroRenderer;\n\n\n\n\n\n// Allegro Renderer Trait Implementation --------------------------------------\n\nimpl Renderer for AllegroRenderer {\n\n\n\n // Statics ----------------------------------------------------------------\n\n fn run<\n\n H: ClientHandler<Self, G, L, E, S>,\n\n E: Event,\n", "file_path": "client/src/renderer/allegro/traits.rs", "rank": 10, "score": 63389.33354602877 }, { "content": " S: EntityState,\n\n L: BaseLevel<S>,\n\n G: EntityRegistry<S, L, Self>\n\n\n\n >(mut client: Client<H, Self, G, L, E, S>) where Self: Sized {\n\n\n\n // Init Allegro\n\n let mut core = Core::init().unwrap();\n\n let q = EventQueue::new(&core).unwrap();\n\n\n\n // Keyboard\n\n core.install_keyboard().unwrap();\n\n q.register_event_source(core.get_keyboard_event_source());\n\n\n\n // Create Display\n\n core.set_new_display_flags(OPENGL);\n\n core.set_new_display_option(\n\n DisplayOption::SampleBuffers,\n\n 2,\n\n DisplayOptionImportance::Suggest\n", "file_path": "client/src/renderer/allegro/traits.rs", "rank": 11, "score": 63382.56613578502 }, { "content": " is_running: bool\n\n\n\n}\n\n\n\nimpl GliumRenderer {\n\n\n\n pub fn new(display: GlutinFacade, width: u32, height: u32) -> GliumRenderer {\n\n\n\n let font = Font::new(&display, \"font.fnt\", \"font_0.png\");\n\n let particle_system = GliumParticleSystem::new(&display, 1000);\n\n\n\n GliumRenderer {\n\n\n\n // Timing\n\n frame_rate: 60,\n\n tick_rate: 60,\n\n time: 0.0,\n\n dt: 0.0,\n\n u: 0.0,\n\n\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 12, "score": 63371.46831852569 }, { "content": "\n\n // Window Handling --------------------------------------------------------\n\n pub fn set_title(&mut self, title: &str) {\n\n self.display.get_window().unwrap().set_title(title);\n\n }\n\n\n\n pub fn resize(&mut self, width: i32, height: i32) {\n\n self.perspective = GliumRenderer::perspective(width as f32, height as f32);\n\n self.display.get_window().unwrap().set_inner_size(width as u32, height as u32);\n\n }\n\n\n\n\n\n // Input ------------------------------------------------------------------\n\n pub fn key_down(&mut self, key_code: KeyCode) -> bool {\n\n self.key_state[key_code as usize]\n\n }\n\n\n\n pub fn key_pressed(&mut self, key_code: KeyCode) -> bool {\n\n self.key_state[key_code as usize] && !self.key_state_old[key_code as usize]\n\n }\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 13, "score": 63370.126005485414 }, { "content": " );\n\n\n\n core.set_new_display_option(\n\n DisplayOption::Samples,\n\n 16,\n\n DisplayOptionImportance::Suggest\n\n );\n\n\n\n let disp = Display::new(\n\n &core, 256, 256\n\n\n\n ).ok().expect(\"Failed to create OPENGL context.\");\n\n\n\n q.register_event_source(disp.get_event_source());\n\n\n\n // Create renderer\n\n let mut renderer = AllegroRenderer::new(core, disp, q);\n\n\n\n // Init callback\n\n client.init(&mut renderer);\n", "file_path": "client/src/renderer/allegro/traits.rs", "rank": 14, "score": 63370.03049211919 }, { "content": " fn events(&mut self) {\n\n\n\n match self.queue.wait_for_event() {\n\n\n\n DisplayClose{ ..} => {\n\n self.is_running = false;\n\n },\n\n\n\n KeyDown{keycode: k, ..} if (k as u32) < 255 => {\n\n\n\n self.key_state[k as usize] = true;\n\n\n\n // Exit via Ctrl-C\n\n if k == KeyCode::C && self.key_state[KeyCode::LCtrl as usize] {\n\n self.is_running = false;\n\n }\n\n\n\n },\n\n\n\n KeyUp{keycode: k, ..} if (k as u32) < 255 => {\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 15, "score": 63368.00584327519 }, { "content": " // Drawing\n\n target: None,\n\n display: display,\n\n font: font,\n\n perspective: GliumRenderer::perspective(width as f32, height as f32),\n\n\n\n particle_system: particle_system,\n\n interpolation_ticks: 0,\n\n\n\n // Input\n\n key_state: [false; 256],\n\n key_state_old: [false; 256],\n\n\n\n // Internal State\n\n rng: XorShiftRng::new_unseeded(),\n\n is_running: true\n\n\n\n }\n\n\n\n }\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 16, "score": 63367.04126257709 }, { "content": "use shared::Color;\n\nuse shared::Lithium::Renderer;\n\nuse renderer::Particle;\n\nuse self::particle_system::AllegroParticleSystem;\n\npub use allegro::KeyCode as KeyCode;\n\n\n\n\n\n// Allegro Based Renderer -----------------------------------------------------\n\npub struct AllegroRenderer {\n\n\n\n // Allegro Related\n\n core: Core,\n\n display: Display,\n\n queue: EventQueue,\n\n prim: PrimitivesAddon,\n\n timer: Timer,\n\n font: Font,\n\n\n\n // Timing\n\n frame_rate: u32,\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 17, "score": 63366.683617807124 }, { "content": "\n\n let (r, l, t, b, f, n) = (\n\n width as f32, 0.0, 0.0, height as f32, 100.0, 0.0\n\n );\n\n\n\n [\n\n [2.0 / (r - l), 0.0, 0.0, 0.0],\n\n [0.0, 2.0 / (t - b), 0.0, 0.0],\n\n [0.0, 0.0, -2.0 / (f - n), 0.0],\n\n [-1.0, 1.0, -(f + n) / (f - n), 1.0f32]\n\n ]\n\n\n\n }\n\n\n\n}\n\n\n\n\n\n// Internal Methods required for trait implementation -------------------------\n\nimpl GliumRenderer {\n\n\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 18, "score": 63366.67934834668 }, { "content": "impl AllegroRenderer {\n\n\n\n pub fn new(\n\n core: Core, display: Display, queue: EventQueue\n\n\n\n ) -> AllegroRenderer {\n\n\n\n let prim = PrimitivesAddon::init(&core).unwrap();\n\n let font_addon = FontAddon::init(&core).unwrap();\n\n let font = Font::new_builtin(&font_addon).unwrap();\n\n let timer = Timer::new(&core, 1.0 / 60.0).unwrap();\n\n\n\n queue.register_event_source(timer.get_event_source());\n\n timer.start();\n\n\n\n AllegroRenderer {\n\n core: core,\n\n display: display,\n\n queue: queue,\n\n prim: prim,\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 20, "score": 63366.081253929835 }, { "content": " fn should_draw(&mut self) -> bool {\n\n thread::sleep(Duration::from_millis(1000 / self.frame_rate as u64));\n\n self.target = Some(self.display.draw());\n\n true\n\n }\n\n\n\n fn events(&mut self) {\n\n\n\n for ev in self.display.poll_events() {\n\n match ev {\n\n glutin::Event::Closed => {\n\n self.is_running = false;\n\n },\n\n glutin::Event::Focused(false) => {\n\n for i in 0..256 {\n\n self.key_state[i] = false;\n\n }\n\n },\n\n glutin::Event::KeyboardInput(glutin::ElementState::Pressed, code, _) => {\n\n self.key_state[code as usize] = true;\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 21, "score": 63365.98219044453 }, { "content": "\n\n // Mainloop\n\n let mut last_tick_time = 0.0;\n\n let mut last_frame_time = 0.0;\n\n let mut frames_per_tick = 0;\n\n\n\n while renderer.running() {\n\n\n\n if renderer.should_draw() {\n\n\n\n let frame_time = renderer.time();\n\n let tick_rate = renderer.tick_rate();\n\n\n\n if frames_per_tick == 0 {\n\n if client.tick(&mut renderer) {\n\n frames_per_tick = renderer.fps() / tick_rate;\n\n last_tick_time = frame_time;\n\n }\n\n }\n\n\n", "file_path": "client/src/renderer/allegro/traits.rs", "rank": 22, "score": 63365.87190523575 }, { "content": " pub fn set_title(&mut self, title: &str) {\n\n self.display.set_window_title(title);\n\n }\n\n\n\n pub fn resize(&mut self, width: i32, height: i32) {\n\n self.display.resize(width, height).ok();\n\n }\n\n\n\n\n\n // Input ------------------------------------------------------------------\n\n pub fn key_down(&mut self, key_code: KeyCode) -> bool {\n\n self.key_state[key_code as usize]\n\n }\n\n\n\n pub fn key_pressed(&mut self, key_code: KeyCode) -> bool {\n\n self.key_state[key_code as usize] && !self.key_state_old[key_code as usize]\n\n }\n\n\n\n pub fn key_released(&mut self, key_code: KeyCode) -> bool {\n\n !self.key_state[key_code as usize] && self.key_state_old[key_code as usize]\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 24, "score": 63365.21622773124 }, { "content": "\n\n if code == KeyCode::C as u8 && self.key_state[KeyCode::LCtrl as usize] {\n\n self.is_running = false;\n\n }\n\n },\n\n glutin::Event::KeyboardInput(glutin::ElementState::Released, code, _) => {\n\n self.key_state[code as usize] = false;\n\n },\n\n _ => {}\n\n }\n\n }\n\n\n\n self.set_time(clock_ticks::precise_time_ms() as f64 / 1000.0);\n\n\n\n }\n\n\n\n fn running(&mut self) -> bool {\n\n self.is_running\n\n }\n\n\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 25, "score": 63364.40904933663 }, { "content": " renderer.set_delta_time((frame_time - last_frame_time) as f32);\n\n renderer.set_delta_u(\n\n 1.0 / (1.0 / tick_rate as f32) * (frame_time - last_tick_time) as f32\n\n );\n\n\n\n client.draw(&mut renderer);\n\n renderer.draw();\n\n\n\n last_frame_time = frame_time;\n\n\n\n // TODO handle this more nicely?\n\n if frames_per_tick > 0 {\n\n frames_per_tick -= 1;\n\n }\n\n\n\n }\n\n\n\n renderer.events();\n\n\n\n }\n", "file_path": "client/src/renderer/allegro/traits.rs", "rank": 26, "score": 63364.152803539975 }, { "content": " let py = p.screen_rect.y as f32 + y + 0.5;\n\n\n\n let char_coords: [f32; 4] = [\n\n p.page_rect.x as f32,\n\n self.tex_size - p.page_rect.y as f32 - p.page_rect.height as f32,\n\n p.page_rect.width as f32,\n\n p.page_rect.height as f32\n\n ];\n\n\n\n // a--b d\n\n // | |\n\n // c c--e\n\n let mut vertex = &mut self.vertex_chars[i * 6..i * 6 + 6];\n\n\n\n vertex[0].position[2] = px;\n\n vertex[1].position[2] = px;\n\n vertex[2].position[2] = px;\n\n vertex[3].position[2] = px;\n\n vertex[4].position[2] = px;\n\n vertex[5].position[2] = px;\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 27, "score": 63364.07358431586 }, { "content": " p.fadeout = 0.25;\n\n p.lifetime = 0.8;\n\n p.remaining = 0.8;\n\n self.first_available_particle = p.next_available;\n\n self.max_used_particle = cmp::max(self.max_used_particle, p.id + 1);\n\n Some(p)\n\n\n\n } else {\n\n None\n\n }\n\n\n\n }\n\n\n\n pub fn draw<F>(\n\n &mut self, dt: f32, mut draw_callback: F\n\n\n\n ) where F: FnMut(usize, &Particle, f32) {\n\n\n\n let mut max_used_particle = 0;\n\n\n", "file_path": "client/src/renderer/particle_system.rs", "rank": 28, "score": 63363.35532374851 }, { "content": " self.key_state[k as usize] = false;\n\n },\n\n\n\n TimerTick{timestamp: t, ..} => {\n\n self.set_time(t);\n\n self.redraw = true;\n\n },\n\n\n\n _ => ()\n\n\n\n }\n\n }\n\n\n\n fn running(&mut self) -> bool {\n\n self.is_running\n\n }\n\n\n\n fn draw(&mut self) {\n\n self.core.flip_display();\n\n self.key_state_old = self.key_state;\n\n }\n\n\n\n}\n\n\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 29, "score": 63363.34923567699 }, { "content": "// External Dependencies ------------------------------------------------------\n\nuse std::thread;\n\nuse std::time::Duration;\n\nuse rand::{SeedableRng, XorShiftRng};\n\nuse glium::backend::glutin_backend::GlutinFacade;\n\nuse glium::{glutin, Surface, Frame};\n\nuse clock_ticks;\n\n\n\n\n\n// Internal Dependencies ------------------------------------------------------\n\nmod traits;\n\nmod particle_system;\n\nmod font;\n\nuse shared::Color;\n\nuse shared::Lithium::Renderer;\n\nuse renderer::Particle;\n\nuse self::particle_system::GliumParticleSystem;\n\nuse self::font::Font;\n\n\n\n\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 30, "score": 63362.70671468979 }, { "content": "// External Dependencies ------------------------------------------------------\n\nuse allegro::{\n\n Core,\n\n Color as AllegroColor,\n\n Display,\n\n DisplayClose,\n\n EventQueue,\n\n KeyUp,\n\n KeyDown,\n\n Timer,\n\n TimerTick\n\n};\n\nuse rand::{SeedableRng, XorShiftRng};\n\nuse allegro_font::{FontDrawing, FontAddon, Font, FontAlign};\n\nuse allegro_primitives::PrimitivesAddon;\n\n\n\n\n\n// Internal Dependencies ------------------------------------------------------\n\nmod traits;\n\nmod particle_system;\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 31, "score": 63362.65639656225 }, { "content": "\n\n client.destroy(&mut renderer);\n\n\n\n }\n\n\n\n\n\n // Time Related -----------------------------------------------------------\n\n fn time(&self) -> f64 {\n\n self.time\n\n }\n\n\n\n fn set_time(&mut self, time: f64) {\n\n self.time = time;\n\n }\n\n\n\n fn delta_time(&self) -> f32{\n\n self.dt\n\n }\n\n\n\n fn set_delta_time(&mut self, dt: f32) {\n", "file_path": "client/src/renderer/allegro/traits.rs", "rank": 32, "score": 63362.40389007907 }, { "content": " }\n\n\n\n\n\n // Drawing Methods --------------------------------------------------------\n\n pub fn clear(&mut self, color: &Color) {\n\n self.core.clear_to_color(AllegroRenderer::get_color(color));\n\n }\n\n\n\n pub fn triangle(\n\n &mut self, color: &Color,\n\n ax: f32, ay: f32,\n\n bx: f32, by: f32,\n\n cx: f32, cy: f32,\n\n line_width: f32\n\n ) {\n\n self.prim.draw_triangle(\n\n ax, ay, bx, by, cx, cy, AllegroRenderer::get_color(color), line_width\n\n );\n\n }\n\n\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 33, "score": 63362.35782812806 }, { "content": " }\n\n\n\n pub fn rng(&mut self) -> &mut XorShiftRng {\n\n &mut self.rng\n\n }\n\n\n\n\n\n // Color Conversion -------------------------------------------------------\n\n pub fn get_color(color: &Color) -> [f32; 4] {\n\n [\n\n color.r as f32 / 255.0,\n\n color.g as f32 / 255.0,\n\n color.b as f32 / 255.0,\n\n color.a as f32 / 255.0\n\n ]\n\n }\n\n\n\n\n\n // Perspective Conversion -------------------------------------------------\n\n fn perspective(width: f32, height: f32) -> [[f32; 4]; 4] {\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 34, "score": 63362.22525343366 }, { "content": "// External Dependencies ------------------------------------------------------\n\nuse std::cmp;\n\n\n\n\n\n// Internal Dependencies ------------------------------------------------------\n\nuse shared::{Color, ColorName};\n\n\n\n\n\n// ParticleSystem -------------------------------------------------------------\n\npub struct ParticleSystem {\n\n pub first_available_particle: usize,\n\n pub max_used_particle: usize,\n\n pub particles: Vec<Particle>\n\n}\n\n\n\nimpl ParticleSystem {\n\n\n\n pub fn new(max_particles: usize) -> ParticleSystem {\n\n\n\n let mut particles = Vec::with_capacity(max_particles);\n", "file_path": "client/src/renderer/particle_system.rs", "rank": 35, "score": 63361.205123962514 }, { "content": " let a = alpha as f32 / 255.0;\n\n AllegroColor::from_rgb(\n\n (color.r as f32 * a) as u8,\n\n (color.g as f32 * a) as u8,\n\n (color.b as f32 * a) as u8\n\n )\n\n }\n\n\n\n}\n\n\n\n\n\n// Internal Methods required for trait implementation -------------------------\n\nimpl AllegroRenderer {\n\n\n\n fn should_draw(&mut self) -> bool {\n\n let redraw = self.redraw;\n\n self.redraw = false;\n\n redraw\n\n }\n\n\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 36, "score": 63360.948749713454 }, { "content": " // Clear previously used vertices\n\n if self.last_length > positions.len() {\n\n for i in positions.len()..self.last_length {\n\n let mut vertex = &mut self.vertex_chars[i * 6..i * 6 + 6];\n\n let py = -100.0;\n\n vertex[0].position[3] = py;\n\n vertex[1].position[3] = py;\n\n vertex[2].position[3] = py;\n\n vertex[3].position[3] = py;\n\n vertex[4].position[3] = py;\n\n vertex[5].position[3] = py;\n\n }\n\n }\n\n\n\n self.last_length = positions.len();\n\n\n\n // Render new text\n\n for (i, p) in positions.iter().enumerate() {\n\n\n\n let px = p.screen_rect.x as f32 + x + 0.5;\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 37, "score": 63360.36532168631 }, { "content": "// STD Dependencies -----------------------------------------------------------\n\nuse std::cmp;\n\nuse std::fs::File;\n\n\n\n\n\n// External Dependencies ------------------------------------------------------\n\nuse image;\n\nuse glium;\n\nuse glium::Surface;\n\nuse bmfont::{BMFont, OrdinateOrientation};\n\n\n\n\n\n// Statics --------------------------------------------------------------------\n\nconst MAX_CHARS: usize = 256;\n\n\n\n\n\n// BMFont Abstraction ---------------------------------------------------------\n\npub struct Font {\n\n\n\n last_length: usize,\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 38, "score": 63360.05717117997 }, { "content": "\n\n bm_font: BMFont,\n\n\n\n scale: f32,\n\n tex_size: f32,\n\n texture: glium::texture::unsigned_texture2d::UnsignedTexture2d,\n\n\n\n vertex_chars: Vec<Vertex>,\n\n vertices: glium::VertexBuffer<Vertex>,\n\n indices: glium::index::NoIndices,\n\n\n\n params: glium::DrawParameters<'static>,\n\n program: glium::Program\n\n\n\n}\n\n\n\nimpl Font {\n\n\n\n pub fn new(display: &glium::backend::glutin_backend::GlutinFacade, font_file: &str, image_file: &str) -> Font {\n\n\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 39, "score": 63359.681282257014 }, { "content": " ParticleSystem {\n\n first_available_particle: 0,\n\n max_used_particle: 0,\n\n particles: particles\n\n }\n\n\n\n }\n\n\n\n pub fn get(&mut self) -> Option<&mut Particle> {\n\n\n\n if let Some(p) = self.particles.get_mut(self.first_available_particle) {\n\n p.active = true;\n\n p.x = 0.0;\n\n p.y = 0.0;\n\n p.s = 5.0;\n\n p.sms = -2.5;\n\n p.v = 0.0;\n\n p.vms = 0.0;\n\n p.r = 0.0;\n\n p.rms = 0.0;\n", "file_path": "client/src/renderer/particle_system.rs", "rank": 40, "score": 63359.47844470136 }, { "content": "// Key Code Mapping -----------------------------------------------------------\n\n#[derive(Copy, Clone)]\n\npub enum KeyCode {\n\n A = 38,\n\n C = 54,\n\n D = 40,\n\n W = 25,\n\n Up = 111,\n\n Left = 113,\n\n Right = 114,\n\n LCtrl = 37,\n\n Enter = 36,\n\n Escape = 9\n\n}\n\n\n\n\n\n// Glium Based Renderer -------------------------------------------------------\n\npub struct GliumRenderer {\n\n\n\n // Timing\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 41, "score": 63359.42476935708 }, { "content": " &self.perspective,\n\n text,\n\n x, y,\n\n GliumRenderer::get_color(color)\n\n );\n\n }\n\n\n\n pub fn particle(&mut self) -> Option<&mut Particle> {\n\n self.particle_system.get()\n\n }\n\n\n\n pub fn draw_particles(&mut self) {\n\n let mut target = self.target.as_mut().unwrap();\n\n self.particle_system.draw(&mut target, &self.perspective, self.dt);\n\n }\n\n\n\n\n\n // RNG --------------------------------------------------------------------\n\n pub fn reseed_rng(&mut self, seed: [u32; 4]) {\n\n self.rng.reseed(seed);\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 42, "score": 63359.36914711336 }, { "content": " let img = image::load(File::open(image_file).unwrap(), image::PNG).unwrap().to_rgba();\n\n let image_dimensions = img.dimensions();\n\n let img = glium::texture::RawImage2d::from_raw_rgba_reversed(img.into_raw(), image_dimensions);\n\n let bm_font = BMFont::new(File::open(font_file).unwrap(), OrdinateOrientation::TopToBottom).unwrap();\n\n let scale = (bm_font.line_height() + 1) as f32;\n\n\n\n let vertex_chars = char_vertices(MAX_CHARS);\n\n let vertices = glium::VertexBuffer::new(display, &vertex_chars).unwrap();\n\n\n\n Font {\n\n\n\n bm_font: bm_font,\n\n last_length: 0,\n\n\n\n scale: scale,\n\n tex_size: image_dimensions.0 as f32,\n\n texture: glium::texture::unsigned_texture2d::UnsignedTexture2d::new(\n\n display,\n\n img\n\n\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 43, "score": 63359.1844931159 }, { "content": " pub fn text(&mut self, color: &Color, x: f32, mut y: f32, text: &str) {\n\n for s in text.split('\\n') {\n\n self.core.draw_text(\n\n &self.font, AllegroRenderer::get_color(color),\n\n x, y, FontAlign::Left, s\n\n );\n\n y += 12.0;\n\n }\n\n }\n\n\n\n pub fn particle(&mut self) -> Option<&mut Particle> {\n\n self.particle_system.get()\n\n }\n\n\n\n pub fn draw_particles(&mut self) {\n\n self.particle_system.draw(self.dt, &self.prim);\n\n }\n\n\n\n\n\n // RNG --------------------------------------------------------------------\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 44, "score": 63359.11645953637 }, { "content": " // Angle modification per second\n\n pub rms: f32,\n\n\n\n pub fadeout: f32,\n\n pub lifetime: f32,\n\n pub remaining: f32,\n\n\n\n pub id: usize,\n\n pub next_available: usize\n\n\n\n}\n\n\n\nimpl Particle {\n\n\n\n fn is_active(&mut self) -> bool {\n\n self.active\n\n }\n\n\n\n fn step(&mut self, dt: f32) -> bool {\n\n if self.remaining <= 0.0 {\n", "file_path": "client/src/renderer/particle_system.rs", "rank": 45, "score": 63358.74075945183 }, { "content": " fn draw(&mut self) {\n\n let target = self.target.take().unwrap();\n\n target.finish().unwrap();\n\n self.display.swap_buffers().unwrap(); // TODO make sure vsync doesn't screw with us here\n\n self.key_state_old = self.key_state;\n\n }\n\n\n\n}\n\n\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 46, "score": 63358.62040599853 }, { "content": " for i in 0..self.max_used_particle {\n\n let particle = self.particles.get_mut(i).unwrap();\n\n if particle.is_active() {\n\n if particle.step(dt) == false {\n\n particle.next_available = self.first_available_particle;\n\n self.first_available_particle = particle.id;\n\n\n\n } else {\n\n max_used_particle = cmp::max(\n\n particle.id + 1,\n\n max_used_particle\n\n );\n\n }\n\n\n\n let lp = 1.0 / particle.lifetime * particle.remaining;\n\n let alpha = if lp <= particle.fadeout {\n\n 255.0 / (particle.lifetime * particle.fadeout) * particle.remaining.max(0.0)\n\n\n\n } else {\n\n 255.0\n", "file_path": "client/src/renderer/particle_system.rs", "rank": 47, "score": 63358.57879203621 }, { "content": " //pub fn text(&self) -> Text {\n\n\n\n //}\n\n\n\n pub fn draw(\n\n &mut self, target: &mut glium::Frame,\n\n pm: &[[f32; 4]; 4], text: &str, x: f32, y: f32, color: [f32; 4]\n\n ) {\n\n\n\n // TODO support alignment\n\n // TODO allow partial write?\n\n\n\n // TODO add a text type which contains the pre-parsed text,\n\n // this can speed up commonly used text segments\n\n // dynamic parts should be placed on their own?\n\n let positions = self.bm_font.parse(\n\n &text[0..cmp::min(text.len(), MAX_CHARS)]\n\n\n\n ).unwrap();\n\n\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 48, "score": 63357.95688373952 }, { "content": "\n\n pub fn key_released(&mut self, key_code: KeyCode) -> bool {\n\n !self.key_state[key_code as usize] && self.key_state_old[key_code as usize]\n\n }\n\n\n\n\n\n // Drawing Methods --------------------------------------------------------\n\n pub fn clear(&mut self, color: &Color) {\n\n self.target.as_mut().unwrap().clear_color(\n\n color.r as f32 / 255.0,\n\n color.g as f32 / 255.0,\n\n color.b as f32 / 255.0,\n\n color.a as f32 / 255.0\n\n );\n\n }\n\n\n\n pub fn text(&mut self, color: &Color, x: f32, y: f32, text: &str) {\n\n let mut target = self.target.as_mut().unwrap();\n\n self.font.draw(\n\n &mut target,\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 49, "score": 63356.7535467485 }, { "content": " perspective: *pm,\n\n color: color,\n\n scale: self.scale,\n\n texSize: self.tex_size,\n\n offset: [0.0, 0.0f32],\n\n tex: &self.texture\n\n };\n\n\n\n target.draw(\n\n &self.vertices, &self.indices, &self.program, &uniforms,\n\n &self.params\n\n\n\n ).unwrap();\n\n\n\n }\n\n\n\n}\n\n\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 50, "score": 63356.66775059246 }, { "content": " };\n\n\n\n draw_callback(i, particle, alpha);\n\n\n\n }\n\n }\n\n\n\n self.max_used_particle = max_used_particle;\n\n\n\n }\n\n\n\n}\n\n\n\n\n\n// Particle -------------------------------------------------------------------\n\npub struct Particle {\n\n\n\n active: bool,\n\n\n\n pub color: Color,\n", "file_path": "client/src/renderer/particle_system.rs", "rank": 51, "score": 63356.66715863289 }, { "content": "\n\n // Position\n\n pub x: f32,\n\n pub y: f32,\n\n\n\n // Velocity\n\n pub v: f32,\n\n\n\n // Size\n\n pub s: f32,\n\n\n\n // Size modification per second\n\n pub sms: f32,\n\n\n\n // Velocity modification per seond\n\n pub vms: f32,\n\n\n\n // Angle\n\n pub r: f32,\n\n\n", "file_path": "client/src/renderer/particle_system.rs", "rank": 52, "score": 63356.50698040447 }, { "content": " pub fn reseed_rng(&mut self, seed: [u32; 4]) {\n\n self.rng.reseed(seed);\n\n }\n\n\n\n pub fn rng(&mut self) -> &mut XorShiftRng {\n\n &mut self.rng\n\n }\n\n\n\n\n\n // Color Conversion -------------------------------------------------------\n\n pub fn get_color(color: &Color) -> AllegroColor {\n\n let a = color.a as f32 / 255.0;\n\n AllegroColor::from_rgb(\n\n (color.r as f32 * a) as u8,\n\n (color.g as f32 * a) as u8,\n\n (color.b as f32 * a) as u8\n\n )\n\n }\n\n\n\n pub fn get_color_with_alpha(color: &Color, alpha: u8) -> AllegroColor {\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 53, "score": 63356.38982451329 }, { "content": " self.dt = dt;\n\n }\n\n\n\n fn delta_u(&self) -> f32 {\n\n self.u\n\n }\n\n\n\n fn set_delta_u(&mut self, u: f32) {\n\n self.u = u;\n\n }\n\n\n\n\n\n // Frame / Tick Rate ------------------------------------------------------\n\n fn fps(&self) -> u32 {\n\n self.frame_rate\n\n }\n\n\n\n fn set_fps(&mut self, frame_rate: u32) {\n\n self.frame_rate = frame_rate;\n\n self.timer.set_speed(1.0 / frame_rate as f64);\n", "file_path": "client/src/renderer/allegro/traits.rs", "rank": 55, "score": 63356.05899092899 }, { "content": " }\n\n\n\n fn tick_rate(&self) -> u32 {\n\n self.tick_rate\n\n }\n\n\n\n fn set_tick_rate(&mut self, tick_rate: u32) {\n\n self.tick_rate = tick_rate;\n\n }\n\n\n\n\n\n // Interpolation ----------------------------------------------------------\n\n fn interpolation_ticks(&self) -> usize {\n\n self.interpolation_ticks\n\n }\n\n\n\n fn set_interpolation_ticks(&mut self, ticks: usize) {\n\n self.interpolation_ticks = ticks;\n\n }\n\n\n\n}\n\n\n", "file_path": "client/src/renderer/allegro/traits.rs", "rank": 56, "score": 63356.05899092899 }, { "content": " tick_rate: u32,\n\n time: f64,\n\n dt: f32,\n\n u: f32,\n\n\n\n // Drawing\n\n particle_system: AllegroParticleSystem,\n\n interpolation_ticks: usize,\n\n\n\n // Input\n\n key_state: [bool; 256],\n\n key_state_old: [bool; 256],\n\n\n\n // Internal State\n\n rng: XorShiftRng,\n\n is_running: bool,\n\n redraw: bool\n\n\n\n}\n\n\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 59, "score": 63355.46510571503 }, { "content": " r#\"\n\n #version 140\n\n\n\n uniform mat4 perspective;\n\n uniform float texSize;\n\n uniform vec2 offset;\n\n uniform float scale;\n\n\n\n attribute vec4 position;\n\n attribute vec4 char_coords;\n\n\n\n out vec2 tc;\n\n\n\n void main() {\n\n\n\n mat4 matrix;\n\n matrix[0][0] = scale;\n\n matrix[1][1] = scale;\n\n matrix[2][2] = scale;\n\n matrix[3][0] = position[2];\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 60, "score": 63355.42823174065 }, { "content": " timer: timer,\n\n font: font,\n\n particle_system: AllegroParticleSystem::new(1000),\n\n is_running: true,\n\n redraw: false,\n\n frame_rate: 60,\n\n tick_rate: 60,\n\n time: 0.0,\n\n dt: 0.0,\n\n u: 0.0,\n\n key_state: [false; 256],\n\n key_state_old: [false; 256],\n\n rng: XorShiftRng::new_unseeded(),\n\n interpolation_ticks: 0\n\n }\n\n\n\n }\n\n\n\n\n\n // Window Handling --------------------------------------------------------\n", "file_path": "client/src/renderer/allegro/mod.rs", "rank": 61, "score": 63355.411721886165 }, { "content": " frame_rate: u32,\n\n tick_rate: u32,\n\n time: f64,\n\n dt: f32,\n\n u: f32,\n\n\n\n // Drawing\n\n target: Option<Frame>,\n\n display: GlutinFacade,\n\n font: Font,\n\n perspective: [[f32; 4]; 4],\n\n particle_system: GliumParticleSystem,\n\n interpolation_ticks: usize,\n\n\n\n // Input\n\n key_state: [bool; 256],\n\n key_state_old: [bool; 256],\n\n\n\n // Internal State\n\n rng: XorShiftRng,\n", "file_path": "client/src/renderer/glium/mod.rs", "rank": 62, "score": 63355.23235369444 }, { "content": " ).unwrap(),\n\n\n\n vertex_chars: vertex_chars,\n\n vertices: vertices,\n\n indices: glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList),\n\n\n\n params: glium::DrawParameters {\n\n dithering: false,\n\n smooth: Some(glium::Smooth::Fastest),\n\n blend: glium::Blend {\n\n color: glium::BlendingFunction::Addition {\n\n source: glium::LinearBlendingFactor::SourceAlpha,\n\n destination: glium::LinearBlendingFactor::OneMinusSourceAlpha\n\n },\n\n .. Default::default()\n\n },\n\n .. Default::default()\n\n },\n\n program: glium::Program::from_source(\n\n display,\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 63, "score": 63354.909878185266 }, { "content": " matrix[3][1] = position[3];\n\n matrix[3][3] = 1.0;\n\n\n\n // Transform from absolute texture coordinates to normalized texture coordinates\n\n // This works because the rectangle spans [0,1] x [0,1]\n\n // Depending on where the origin lies in your texture (i.e. topleft or bottom left corner),\n\n // you need to replace \"1. - position.y\" with just \"position.y\"\n\n tc = (char_coords.xy + char_coords.zw * vec2(position.x, 1.0 - position.y)) / texSize;\n\n\n\n // Map the vertices of the unit square to a rectangle with\n\n // correct aspect ratio and positioned at the correct offset\n\n float x = (char_coords[2] * position.x + offset.x) / char_coords[3];\n\n float y = position.y + offset.y / char_coords[3];\n\n\n\n // Apply the model, view and projection transformations\n\n gl_Position = perspective * matrix * vec4(x, y, 0.0, 1.0);\n\n\n\n }\n\n \"#,\n\n r#\"\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 64, "score": 63354.70927792119 }, { "content": "\n\n vertex[0].position[3] = py;\n\n vertex[1].position[3] = py;\n\n vertex[2].position[3] = py;\n\n vertex[3].position[3] = py;\n\n vertex[4].position[3] = py;\n\n vertex[5].position[3] = py;\n\n\n\n vertex[0].char_coords = char_coords;\n\n vertex[1].char_coords = char_coords;\n\n vertex[2].char_coords = char_coords;\n\n vertex[3].char_coords = char_coords;\n\n vertex[4].char_coords = char_coords;\n\n vertex[5].char_coords = char_coords;\n\n\n\n }\n\n\n\n self.vertices.write(&self.vertex_chars);\n\n\n\n let uniforms = uniform! {\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 65, "score": 63352.62201143519 }, { "content": " for i in 0..max_particles {\n\n particles.push(Particle {\n\n active: false,\n\n color: Color::from_name(ColorName::Black),\n\n x: 0.0,\n\n y: 0.0,\n\n s: 1.0,\n\n sms: 0.0,\n\n v: 0.0,\n\n vms: 0.0,\n\n r: 0.0,\n\n rms: 0.0,\n\n fadeout: 0.0,\n\n lifetime: 0.0,\n\n remaining: 0.0,\n\n id: i,\n\n next_available: i + 1,\n\n });\n\n }\n\n\n", "file_path": "client/src/renderer/particle_system.rs", "rank": 66, "score": 63352.62201143519 }, { "content": " #version 140\n\n\n\n uniform vec4 color;\n\n uniform usampler2D tex;\n\n\n\n in vec2 tc;\n\n out vec4 outColor;\n\n\n\n void main() {\n\n uvec4 vec_tex;\n\n vec_tex = texture(tex, tc);\n\n outColor = color * vec4(1.0, 1.0, 1.0, float(vec_tex.x) / 255.0);\n\n }\n\n \"#,\n\n None\n\n\n\n ).unwrap()\n\n }\n\n }\n\n\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 67, "score": 63352.62201143519 }, { "content": " self.active = false;\n\n false\n\n\n\n } else {\n\n self.x += self.r.cos() * self.v * dt;\n\n self.y += self.r.sin() * self.v * dt;\n\n self.s += self.sms * dt;\n\n self.r += self.rms * dt;\n\n self.v += self.vms * dt;\n\n self.remaining -= dt;\n\n true\n\n }\n\n }\n\n\n\n}\n\n\n", "file_path": "client/src/renderer/particle_system.rs", "rank": 68, "score": 63352.62201143519 }, { "content": "// External Dependencies ------------------------------------------------------\n\nuse allegro_primitives::PrimitivesAddon;\n\n\n\n\n\n// Internal Dependencies ------------------------------------------------------\n\nuse renderer::{ParticleSystem, Particle};\n\nuse super::AllegroRenderer;\n\n\n\n\n\n// Allegro based ParticleSystem -----------------------------------------------\n\npub struct AllegroParticleSystem {\n\n system: ParticleSystem\n\n}\n\n\n\nimpl AllegroParticleSystem {\n\n\n\n pub fn new(max_particles: usize) -> AllegroParticleSystem {\n\n AllegroParticleSystem {\n\n system: ParticleSystem::new(max_particles)\n\n }\n", "file_path": "client/src/renderer/allegro/particle_system.rs", "rank": 69, "score": 61559.15053898201 }, { "content": "// External Dependencies ------------------------------------------------------\n\nuse glium;\n\nuse glium::Surface;\n\n\n\n\n\n// Internal Dependencies ------------------------------------------------------\n\nuse renderer::{ParticleSystem, Particle};\n\n\n\n\n\n// Glium based ParticleSystem -------------------------------------------------\n\npub struct GliumParticleSystem {\n\n\n\n system: ParticleSystem,\n\n\n\n vertex_particles: Vec<Vertex>,\n\n vertices: glium::VertexBuffer<Vertex>,\n\n indices: glium::index::NoIndices,\n\n\n\n params: glium::DrawParameters<'static>,\n\n program: glium::Program\n", "file_path": "client/src/renderer/glium/particle_system.rs", "rank": 70, "score": 61558.59974112944 }, { "content": "\n\n}\n\n\n\nimpl GliumParticleSystem {\n\n\n\n pub fn new(display: &glium::backend::glutin_backend::GlutinFacade, max_particles: usize) -> GliumParticleSystem {\n\n\n\n let mut vertex_particles = Vec::with_capacity(max_particles);\n\n for _ in 0..max_particles {\n\n vertex_particles.push(Vertex {\n\n position: [-1.0, 1.0],\n\n scale: 0.0,\n\n color: [1.0, 0.0, 0.0, 1.0]\n\n })\n\n }\n\n\n\n let vertices = glium::VertexBuffer::new(display, &vertex_particles).unwrap();\n\n\n\n GliumParticleSystem {\n\n\n", "file_path": "client/src/renderer/glium/particle_system.rs", "rank": 71, "score": 61558.01764296727 }, { "content": " }\n\n\n\n pub fn get(&mut self) -> Option<&mut Particle> {\n\n self.system.get()\n\n }\n\n\n\n pub fn draw(&mut self, dt: f32, prim: &PrimitivesAddon) {\n\n self.system.draw(dt, |_, ref particle, alpha| {\n\n let hs = particle.s / 2.0;\n\n prim.draw_filled_rectangle(\n\n particle.x - hs + 0.5, particle.y - hs + 0.5,\n\n particle.x + hs + 0.5, particle.y + hs + 0.5,\n\n AllegroRenderer::get_color_with_alpha(&particle.color, alpha as u8)\n\n );\n\n });\n\n }\n\n\n\n}\n\n\n", "file_path": "client/src/renderer/allegro/particle_system.rs", "rank": 72, "score": 61555.75948245566 }, { "content": " glium::program::ProgramCreationInput::SourceCode {\n\n tessellation_control_shader: None,\n\n tessellation_evaluation_shader: None,\n\n geometry_shader: None,\n\n outputs_srgb: false,\n\n uses_point_size: true,\n\n vertex_shader: r#\"\n\n #version 140\n\n\n\n in vec2 position;\n\n in float scale;\n\n in vec4 color;\n\n out vec4 colorV;\n\n\n\n uniform mat4 perspective;\n\n\n\n void main() {\n\n colorV = color;\n\n gl_PointSize = scale;\n\n gl_Position = perspective * vec4(position, 0.0, 1.0);\n", "file_path": "client/src/renderer/glium/particle_system.rs", "rank": 73, "score": 61555.39950984026 }, { "content": " pub fn get(&mut self) -> Option<&mut Particle> {\n\n self.system.get()\n\n }\n\n\n\n pub fn draw(\n\n &mut self, target: &mut glium::Frame, pm: &[[f32; 4]; 4], dt: f32\n\n ) {\n\n\n\n let particles = &mut self.vertex_particles;\n\n self.system.draw(dt, |i, ref particle, alpha| {\n\n let mut vertex = particles.get_mut(i).unwrap();\n\n vertex.position[0] = particle.x;\n\n vertex.position[1] = particle.y;\n\n vertex.scale = particle.s;\n\n vertex.color[0] = particle.color.r as f32 / 255.0;\n\n vertex.color[1] = particle.color.g as f32 / 255.0;\n\n vertex.color[2] = particle.color.b as f32 / 255.0;\n\n vertex.color[3] = alpha / 255.0;\n\n });\n\n\n", "file_path": "client/src/renderer/glium/particle_system.rs", "rank": 74, "score": 61553.87155400624 }, { "content": " system: ParticleSystem::new(max_particles),\n\n\n\n vertex_particles: vertex_particles,\n\n vertices: vertices,\n\n indices: glium::index::NoIndices(glium::index::PrimitiveType::Points),\n\n\n\n params: glium::DrawParameters {\n\n polygon_mode: glium::draw_parameters::PolygonMode::Point,\n\n blend: glium::Blend {\n\n color: glium::BlendingFunction::Addition {\n\n source: glium::LinearBlendingFactor::SourceAlpha,\n\n destination: glium::LinearBlendingFactor::OneMinusSourceAlpha\n\n },\n\n .. Default::default()\n\n },\n\n .. Default::default()\n\n },\n\n\n\n program: glium::Program::new(\n\n display,\n", "file_path": "client/src/renderer/glium/particle_system.rs", "rank": 75, "score": 61551.96312264936 }, { "content": " }\n\n \"#,\n\n fragment_shader: r#\"\n\n #version 140\n\n\n\n in vec4 colorV;\n\n out vec4 outColor;\n\n\n\n void main() {\n\n outColor = colorV;\n\n }\n\n \"#,\n\n transform_feedback_varyings: None\n\n }\n\n ).unwrap()\n\n\n\n }\n\n\n\n }\n\n\n", "file_path": "client/src/renderer/glium/particle_system.rs", "rank": 76, "score": 61549.776552298 }, { "content": " self.vertices.write(particles);\n\n\n\n let uniforms = uniform! {\n\n perspective: *pm\n\n };\n\n\n\n target.draw(\n\n &self.vertices, &self.indices, &self.program, &uniforms,\n\n &self.params\n\n\n\n ).unwrap();\n\n\n\n }\n\n\n\n}\n\n\n\n// Particle Vertex ------------------------------------------------------------\n\n#[derive(Copy, Clone)]\n\npub struct Vertex {\n\n pub position: [f32; 2],\n\n pub scale: f32,\n\n pub color: [f32; 4]\n\n}\n\n\n\nimplement_vertex!(Vertex, position, scale, color);\n\n\n", "file_path": "client/src/renderer/glium/particle_system.rs", "rank": 77, "score": 61549.776552298 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Vertex {\n\n position: [f32; 4],\n\n char_coords: [f32; 4]\n\n}\n\n\n\nimplement_vertex!(Vertex, position, char_coords);\n\n\n", "file_path": "client/src/renderer/glium/font.rs", "rank": 78, "score": 61549.776552298 }, { "content": "// Abstract Event -------------------------------------------------------------\n\npub trait Event: Encodable + Decodable + Default {}\n\n\n\n\n\n// Event Handler --------------------------------------------------------------\n\npub struct EventHandler<T: Event> {\n\n incoming: Option<Vec<(ConnectionID, T)>>,\n\n outgoing: Vec<(Option<ConnectionID>, T)>\n\n}\n\n\n\nimpl<T: Event> EventHandler<T> {\n\n\n\n pub fn new() -> EventHandler<T> {\n\n EventHandler {\n\n incoming: None,\n\n outgoing: Vec::new()\n\n }\n\n }\n\n\n\n pub fn send_to(&mut self, receiver: Option<ConnectionID>, event: T) {\n\n self.outgoing.push((receiver, event));\n", "file_path": "lithium-rs/src/event.rs", "rank": 79, "score": 54948.31108185429 }, { "content": "// Main -----------------------------------------------------------------------\n\nfn main() {\n\n\n\n let args = clap::App::new(\"shooter-server\")\n\n .version(&crate_version!())\n\n .author(\"Ivo Wetzel <[email protected]>\")\n\n .about(\"Shooter Server\")\n\n .arg(clap::Arg::with_name(\"address:port\")\n\n .help(\"Local server address to bind to.\")\n\n .index(1)\n\n\n\n ).get_matches();\n\n\n\n\n\n // Arguments --------------------------------------------------------------\n\n let server_addr = value_t!(\n\n args.value_of(\"address:port\"), SocketAddr\n\n\n\n ).unwrap_or(SocketAddr::from_str(\"127.0.0.1:31475\").unwrap());\n\n\n\n\n\n // Server Setup -----------------------------------------------------------\n\n if let Err(err) = Server::run(server_addr, game::Game::server(30, false)) {\n\n println!(\"[Server] [Fatal] {:?}\", err);\n\n }\n\n\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 80, "score": 52924.674628736255 }, { "content": "// Basic Entity Traits --------------------------------------------------------\n\npub trait BaseEntity<S: EntityState, L: BaseLevel<S>> {\n\n\n\n fn type_id(&self) -> u8;\n\n\n\n fn apply_input(\n\n &mut self, level: &Level<S, L>, state: &mut S, input: &EntityInput, dt: f32\n\n );\n\n\n\n fn visible_to(&self, _: &ConnectionID) -> bool {\n\n true\n\n }\n\n\n\n fn serialize_state(&self, _: &mut S, _: &ConnectionID) {}\n\n\n\n fn event(&mut self, _: &EntityEvent, _: &S) {}\n\n\n\n}\n\n\n", "file_path": "lithium-rs/src/entity/traits.rs", "rank": 81, "score": 52486.99141783493 }, { "content": "fn hue_to_rgb(p: f32, q: f32, t: f32) -> f32 {\n\n\n\n let t = if t < 0.0 {\n\n t + 1.0\n\n\n\n } else if t > 1.0 {\n\n t - 1.0\n\n\n\n } else {\n\n t\n\n };\n\n\n\n if t < 1.0 / 6.0 {\n\n p + (q - p) * 6.0 * t\n\n\n\n } else if t < 1.0 / 2.0 {\n\n q\n\n\n\n } else if t < 2.0 / 3.0 {\n\n p + (q - p) * (2.0 / 3.0 - t) * 6.0\n\n\n\n } else {\n\n p\n\n }\n\n\n\n}\n\n\n", "file_path": "shared/src/color.rs", "rank": 82, "score": 41942.807813290725 }, { "content": "// Helpers --------------------------------------------------------------------\n\npub fn tick_is_more_recent(a: u8, b: u8) -> bool {\n\n (a > b) && (a - b <= 128) || (b > a) && (b - a > 128)\n\n}\n\n\n", "file_path": "lithium-rs/src/entity/entity.rs", "rank": 83, "score": 41016.24332696956 }, { "content": "// Renderer Abstraction -------------------------------------------------------\n\npub trait Renderer {\n\n\n\n // Statics ----------------------------------------------------------------\n\n fn run<\n\n H: Handler<Self, G, L, E, S>,\n\n E: Event,\n\n S: EntityState,\n\n L: BaseLevel<S>,\n\n G: EntityRegistry<S, L, Self>\n\n\n\n >(_: Client<H, Self, G, L, E, S>) where Self: Sized {}\n\n\n\n // Time Related -----------------------------------------------------------\n\n fn time(&self) -> f64;\n\n\n\n fn set_time(&mut self, time: f64);\n\n\n\n fn delta_time(&self) -> f32;\n\n\n\n fn set_delta_time(&mut self, dt: f32);\n", "file_path": "lithium-rs/src/renderer.rs", "rank": 84, "score": 39680.74267149871 }, { "content": "// External Dependencies ------------------------------------------------------\n\nuse lithium::Event;\n\n\n\n\n\n// Internal Dependencies ------------------------------------------------------\n\nuse command::SharedCommand;\n\n\n\n// Game Events ----------------------------------------------------------------\n\n#[derive(Debug, RustcEncodable, RustcDecodable)]\n\npub enum SharedEvent {\n\n JoinGame,\n\n GameJoined,\n\n LeaveGame,\n\n PlayerJoined,\n\n PlayerLeft,\n\n Command(SharedCommand),\n\n Unknown\n\n}\n\n\n\nimpl Event for SharedEvent {}\n\n\n\nimpl Default for SharedEvent {\n\n fn default() -> SharedEvent {\n\n SharedEvent::Unknown\n\n }\n\n}\n\n\n", "file_path": "shared/src/event.rs", "rank": 85, "score": 36591.06603012835 }, { "content": " }\n\n\n\n pub fn send(&mut self, event: T) {\n\n self.outgoing.push((None, event));\n\n }\n\n\n\n pub fn received(&mut self) -> Option<Vec<(ConnectionID, T)>> {\n\n self.incoming.take()\n\n }\n\n\n\n pub fn receive_events(&mut self, owner_id: ConnectionID, mut data: &[u8]) {\n\n\n\n let mut incoming = Vec::new();\n\n while let Ok(event) = decode_from::<&[u8], T>(\n\n &mut data, SizeLimit::Bounded(256)\n\n ) {\n\n incoming.push((owner_id, event));\n\n }\n\n\n\n self.incoming = Some(incoming);\n", "file_path": "lithium-rs/src/event.rs", "rank": 86, "score": 35486.811813315464 }, { "content": "// External Dependencies ------------------------------------------------------\n\nuse bincode::SizeLimit;\n\nuse cobalt::ConnectionID;\n\nuse rustc_serialize::{Encodable, Decodable};\n\nuse bincode::rustc_serialize::{encode,decode_from};\n\n\n\n\n\n// Abstract Event -------------------------------------------------------------\n", "file_path": "lithium-rs/src/event.rs", "rank": 87, "score": 35486.791933250046 }, { "content": "\n\n }\n\n\n\n pub fn serialize_events(&mut self, receiver: Option<&ConnectionID>) -> Option<Vec<u8>> {\n\n\n\n let outgoing: Vec<u8> = self.outgoing.iter().filter(|event| {\n\n if let Some(r) = receiver {\n\n match event.0 {\n\n Some(target) => {\n\n target == *r\n\n }\n\n None => true\n\n }\n\n\n\n } else {\n\n true\n\n }\n\n\n\n }).fold(Vec::new(), |mut data, event| {\n\n data.extend(encode(&event.1, SizeLimit::Infinite).unwrap());\n", "file_path": "lithium-rs/src/event.rs", "rank": 88, "score": 35486.353966711074 }, { "content": " data\n\n });\n\n\n\n if outgoing.is_empty() {\n\n None\n\n\n\n } else {\n\n Some(outgoing)\n\n }\n\n\n\n }\n\n\n\n pub fn flush(&mut self) {\n\n self.outgoing.clear();\n\n }\n\n\n\n}\n\n\n", "file_path": "lithium-rs/src/event.rs", "rank": 89, "score": 35479.38060740964 }, { "content": "// Server / Client Side Entity Events -----------------------------------------\n\n#[derive(Debug)]\n\npub enum EntityEvent {\n\n Tick(u8, f32),\n\n Created(u8, u16),\n\n Destroyed(u8, u16),\n\n Hide(u8),\n\n Show(u8),\n\n Flags(u8)\n\n}\n\n\n", "file_path": "lithium-rs/src/entity/event.rs", "rank": 90, "score": 34443.77115086258 }, { "content": "// Internal -------------------------------------------------------------------\n\nuse event::Event;\n\nuse level::BaseLevel;\n\nuse entity::{EntityState, EntityRegistry};\n\nuse client::{Client, Handler};\n\n\n\n\n\n// Renderer Abstraction -------------------------------------------------------\n", "file_path": "lithium-rs/src/renderer.rs", "rank": 91, "score": 33548.758218802366 }, { "content": "\n\n}\n\n\n\n\n\n// Default Noop Renderer Implementation ---------------------------------------\n\npub struct DefaultRenderer;\n\nimpl Renderer for DefaultRenderer {\n\n\n\n // Time Related -----------------------------------------------------------\n\n fn time(&self) -> f64 {\n\n 0.0\n\n }\n\n\n\n fn set_time(&mut self, _: f64) {\n\n }\n\n\n\n fn delta_time(&self) -> f32 {\n\n 0.0\n\n }\n\n\n", "file_path": "lithium-rs/src/renderer.rs", "rank": 92, "score": 33543.59290879637 }, { "content": " fn set_delta_time(&mut self, _: f32) {\n\n }\n\n\n\n fn delta_u(&self) -> f32 {\n\n 0.0\n\n }\n\n\n\n fn set_delta_u(&mut self, _: f32) {\n\n }\n\n\n\n\n\n // Frame / Tick Rate ------------------------------------------------------\n\n fn fps(&self) -> u32 {\n\n 0\n\n }\n\n\n\n fn set_fps(&mut self, _: u32) {\n\n }\n\n\n\n fn tick_rate(&self) -> u32 {\n", "file_path": "lithium-rs/src/renderer.rs", "rank": 93, "score": 33537.158990326025 }, { "content": "\n\n fn delta_u(&self) -> f32;\n\n\n\n fn set_delta_u(&mut self, u: f32);\n\n\n\n\n\n // Frame / Tick Rate ------------------------------------------------------\n\n fn fps(&self) -> u32;\n\n\n\n fn set_fps(&mut self, frame_rate: u32);\n\n\n\n fn tick_rate(&self) -> u32;\n\n\n\n fn set_tick_rate(&mut self, tick_rate: u32);\n\n\n\n\n\n // Interpolation ----------------------------------------------------------\n\n fn interpolation_ticks(&self) -> usize;\n\n\n\n fn set_interpolation_ticks(&mut self, ticks: usize);\n", "file_path": "lithium-rs/src/renderer.rs", "rank": 94, "score": 33537.14428842717 }, { "content": " 0\n\n }\n\n\n\n fn set_tick_rate(&mut self, _: u32) {\n\n }\n\n\n\n\n\n // Interpolation ----------------------------------------------------------\n\n fn interpolation_ticks(&self) -> usize {\n\n 0\n\n }\n\n\n\n fn set_interpolation_ticks(&mut self, _: usize) {\n\n }\n\n\n\n}\n\n\n", "file_path": "lithium-rs/src/renderer.rs", "rank": 95, "score": 33537.04236598518 }, { "content": "extern crate bmfont;\n\n#[cfg(feature=\"glium_renderer\")]\n\nextern crate image;\n\n#[cfg(feature=\"glium_renderer\")]\n\nextern crate clock_ticks;\n\n\n\n\n\n// External Dependencies ------------------------------------------------------\n\nuse std::net::SocketAddr;\n\nuse shared::Lithium::Renderer as LithiumRenderer;\n\n\n\n\n\n// Internal Dependencies ------------------------------------------------------\n\nmod entities;\n\nmod game;\n\nmod level;\n\nmod renderer;\n\nuse renderer::Renderer;\n\n\n\n\n\n// Main -----------------------------------------------------------------------\n\n#[cfg(feature=\"allegro_renderer\")]\n\nallegro_main! {\n\n run();\n\n}\n\n\n\n#[cfg(feature=\"glium_renderer\")]\n", "file_path": "client/src/main.rs", "rank": 96, "score": 32728.87027465312 }, { "content": "#[macro_use]\n\nextern crate clap;\n\nextern crate rand;\n\nextern crate shared;\n\nextern crate shooter_server;\n\n\n\n#[cfg(feature=\"allegro_renderer\")]\n\n#[macro_use]\n\nextern crate allegro;\n\n#[cfg(feature=\"allegro_renderer\")]\n\nextern crate allegro_sys;\n\n#[cfg(feature=\"allegro_renderer\")]\n\nextern crate allegro_font;\n\n#[cfg(feature=\"allegro_renderer\")]\n\nextern crate allegro_primitives;\n\n\n\n#[cfg(feature=\"glium_renderer\")]\n\n#[macro_use]\n\nextern crate glium;\n\n#[cfg(feature=\"glium_renderer\")]\n", "file_path": "client/src/main.rs", "rank": 97, "score": 32726.099623853203 }, { "content": " &mut self, &mut R, &Level<S, L>, &mut Entity<S, L, R>, u8, f32\n\n );\n\n\n\n fn tick_after(&mut self, Handle<Self, R, G, L, E, S>) where Self: Sized;\n\n\n\n fn draw(&mut self, Handle<Self, R, G, L, E, S>) where Self: Sized;\n\n\n\n fn destroy(&mut self, Handle<Self, R, G, L, E, S>) where Self: Sized;\n\n\n\n}\n\n\n\n\n\n// Timer Implementation -------------------------------------------------------\n\nimpl_timer!(Handler, Renderer, EntityRegistry, BaseLevel, Event, EntityState);\n\n\n", "file_path": "lithium-rs/src/client.rs", "rank": 98, "score": 31761.765139066763 }, { "content": " H: Handler<R, G, L, E, S> + 'a,\n\n R: Renderer + 'a,\n\n G: EntityRegistry<S, L, R> + 'a,\n\n L: BaseLevel<S> + 'a,\n\n E: Event + 'a,\n\n S: EntityState + 'a\n\n> {\n\n pub renderer: &'a mut R,\n\n pub level: &'a mut Level<S, L>,\n\n pub events: &'a mut EventHandler<E>,\n\n pub entities: &'a mut EntityManager<S, L, R, G>,\n\n pub timer: &'a mut Timer<H, R, G, L, E, S>,\n\n pub client: &'a mut ClientStream\n\n}\n\n\n\n\n\n// Client Handler -------------------------------------------------------------\n", "file_path": "lithium-rs/src/client.rs", "rank": 99, "score": 31760.45799669676 } ]
Rust
jissen/2nd/code4/src/main.rs
Hota822/Rust
05a7756cfec474052161b3886b035a326a58f49a
fn main() { f4_2(); f4_3(); } fn f4_3() { let t1 = (88, true); assert_eq!(t1.0, 88); let mut t2 = (88, true); t2.1 = false; assert_eq!(t2.1, false); let (n1, b1) = t2; println!("({}, {})", n1, b1); let ((_, _), (n2, _)) = ((1, 2), (3, 4)); println!("{}", n2); let mut t1 = ((1, 2), (3, 4)); let ((ref mut x1_ptr, ref mut x2_ptr), _) = t1; *x1_ptr = 5; *x2_ptr = 6; println!("{:?}", t1); let a1 = [false, true, false]; let a2 = [0.0, 1.0, 0.6, -0.4]; assert_eq!(a1.len() + 1, a2.len()); let a3 = [0; 100]; assert_eq!(a3.len(), 100); let _a4 = [['a', 'b'], ['c', 'd']]; let i = 99; let mut v = vec![0; i]; assert_eq!(v.len(), 99); v.push(1); assert_eq!(v.len(), 100); assert_eq!(v.pop().unwrap(), 1); assert_eq!(v.len(), 99); let mut a1 = ['h', 'e', 'l', 'l', 'o']; println!("{}", a1[1]); a1[0] = 'H'; println!("{}", a1[0]); let i = 0; println!("{}", a1[i]); println!("{:?}", a1.get(4)); println!("{}", a1.get(4).unwrap()); println!("{:?}", a1.get(5)); let mut a4 = [1; 50]; for i in a4.iter() { print!("{}", i); } println!(); for i in a4.iter_mut() { print!("{}", i); } println!(); fn print_info(name: &str, sl: &[char]) { println!(" {:9} - {}, {}, {:?}, {:?}, {:?}", name, name, sl.len(), sl.first(), sl[1], sl.last(), ); } let a1 = ['a', 'b', 'c', 'd', 'e']; println!("a1: {:?}", a1 ); print_info("&a1[..]", &a1); print_info("&a1[..]", &a1[..]); let v1 = vec!['f', 'g', 'h', 'i', 'j']; print_info("&v1[..]", &v1); let mut a1 = [5, 4, 3, 2]; let s1 = &mut a1[1..3]; s1[0] = 6; s1[1] *= 10; s1.swap(0, 1); println!("{:?}", s1); println!("{:?}", a1); let a2: [i32; 0] = []; let s2 = &a2; assert!(s2.is_empty()); assert_eq!(s2.len(), 0); assert_eq!(s2.get(0), None); let a3 = ["Zero", "One", "Two", "Three", "Four"]; let s3 = &a3[1..4]; assert!(!s3.is_empty()); assert_eq!(s3.len(), 3); assert_eq!(s3.first(), Some(&"One")); assert_eq!(s3[1], "Two"); assert_eq!(s3.get(1), Some(&"Two")); assert_eq!(a3.get(1), Some(&"One")); assert!(a3.contains(&"One")); assert!(s3.starts_with(&["One", "Two"])); assert!(s3.ends_with(&["Two", "Three"])); let mut a4 = [1, 3, 4, 5]; println!("{:?}", a4); assert_eq!(a4.get(1).unwrap(), &3); let i1 = a4.get_mut(3).unwrap(); println!("{:?}", i1); *i1 = 10; println!("{:?}", i1); println!("{:?}", a4); let mut i2 = a4.get_mut(3).unwrap(); println!("{:?}", i2); i2 = a4.get_mut(1).unwrap(); println!("{:?}, warning is not displayed", i2); let _i3 = a4.get(1).unwrap(); let mut a4 = [6, 4, 2, 8, 0, 9, 4, 3, 7, 5, 1, 7]; a4[2..6].sort(); println!("{:?}", a4); let (s4a, s4b) = a4.split_at_mut(5); s4a.reverse(); println!("s4a: {:?}", s4a); s4b.sort_unstable(); println!("s4b: {:?}", s4b); println!("{:?}", a4); let s1 = "abc1"; let s2 = "abc2"; assert!(s1 < s2); assert!(s1 != s2); let s3 = "add new line"; let s4 = "no new \ line"; println!("{}", s3); println!("{}", s4); let s5 = "\\ add back slash by escape"; let s6 = r#"\ add back slash by row string"#; println!("{}", s5); println!("{}", s6); let s7 = r###"it's able to use # and ## in this literal"###; println!("{}", s7); println!("{}\u{1f600}",s7 ); let fruits = "red apple, green apple\nraspberry, black berry"; let mut lines = fruits.lines(); let apple_line = lines.next(); assert_eq!(Some("red apple, green apple"), apple_line); assert_eq!(Some("raspberry, black berry"), lines.next()); assert_eq!(None, lines.next()); if let Some(apples) = apple_line { assert!(apples.starts_with("red")); assert!(apples.contains("apple")); println!("{:?}", apples.find("green")); let mut apple_iter = apples.split(","); assert_eq!(apple_iter.next(), Some("red apple")); let green = apple_iter.next(); assert_eq!(Some(" green apple"), green); assert_eq!(Some("green apple"), green.map(|s| s.trim())); assert_eq!(Some("green apple"), green.map(str::trim)); } let s1 = "a"; let s2 = "あ"; let s3 = "😀"; println!("{}, {}, {}", s1.len(), s2.len(), s3.len()); let s = "abcあいう"; assert_eq!(s.get(0..1), Some("a")); assert_eq!(s.get(3..6), Some("あ")); assert_eq!(s.get(3..5), None); assert_eq!(s.get(3..=6), None ); let s = "かか\u{3099}く"; println!("{}", s); let s_iter = s.chars(); for c in s_iter { println!("{}", c); } let mut s_iter = s.chars(); println!("{:?}", s_iter.next()); println!("{:?}", s_iter.next()); println!("{:?}", s_iter.next()); println!("{:?}", s_iter.next()); println!("{:?}", s_iter.next()); println!("{:?}", s_iter.next()); let utf8: [u8; 4] = [0x61, 0xe3, 0x81, 0x82]; assert_eq!(std::str::from_utf8(&utf8), Ok("aあ")); let bad_utf8: [u8; 2] = [0x81, 0x33]; let result = std::str::from_utf8(&bad_utf8); assert!(result.is_err()); let mut s1 = "abc".to_string(); let s2 = s1.as_mut_str(); s2.make_ascii_uppercase(); assert_eq!("ABC", s2); let b = unsafe {s2.as_bytes_mut() }; b[0] = b'D'; b[1] = b'E'; b[2] = b'F'; println!("{}", s2); } fn f4_2() { let n = 42; let c = 'R'; println!("{}, {}", n, c); fn hello() { println!("hello"); } let hello = hello(); assert_eq!((), hello); assert_eq!(std::mem::size_of::<()>(), 0); let b1 = true; let b2 = !b1; assert_eq!(b2, false); let n1 = 8; let n2 = 12; let b3 = n1 > 10; let b4 = n2 > 10; let b5 = b3 && b4; let b6 = b3 || b4; assert_eq!(b5, false); assert_eq!(b6, true); assert_eq!(std::mem::size_of::<bool>(), 1); let n1 = 10_000; let n1_1 = 10000; let n2 = 0u8; let n2_1: u8 = 0; let n3 = -100_isize; let n3_1: isize = -100; assert_eq!(n1, n1_1); assert_eq!(n2, n2_1); assert_eq!(n3, n3_1); let n4 = 10; let n5 = n3 + n4; print_typename(n5); let n1 = 57; let h1 = 0xff; let o1 = 0o71; let b1 = 0b1111_1111; assert_eq!(h1, b1); assert_eq!(n1, o1); let n6 = b'A'; assert_eq!(n6, 65u8); let n1 = std::u8::MAX; let _n2 = 1u8; println!("{}", n1); let n1 = 200u8; let n2 = 3u8; assert!(n1.checked_mul(n2).is_none()); assert_eq!(n1.saturating_mul(n2), std::u8::MAX); assert_eq!(n1.wrapping_mul(n2), 88); assert_eq!(n1.overflowing_mul(n2), (88, true)); let _f1 = 10.0; let _f2 = -1_234.56_f32; let _f3 = 578.6E77; let c1 ='A'; let c2 = 'a'; assert!(c1 < c2); assert!(c1.is_uppercase()); let c3 = '0'; assert!(c3.is_digit(10)); let c4 = '\t'; let c5 = '\n'; let c6 = '\''; let c7 = '\\'; let c8 = '\x7F'; let c9 = '\u{1f600}'; println!("{}, {}, {}, {}, {}, {}, {}", c3, c4, c5, c6, c7, c8, c9); fn func1(mut n: u32) { println!("func1-1 {}", n); n = 1; println!("func1-2 {}", n); } fn func2(n_ptr: &mut u32) { println!("func2 *n_ptr {}", *n_ptr); println!("func2 n_ptr {}", n_ptr); *n_ptr = 10; println!("func2 *n_ptr {}", n_ptr); } let mut x = 5; func1(x); func2(&mut x); let c1 = 'A'; let c1_ptr = &c1; assert_eq!(*c1_ptr, 'A'); let mut n1 = 0; let n1_ptr = &mut n1; assert_eq!(*n1_ptr, 0); fn double (n: i32) -> i32 { n + n } fn abs(n: i32) -> i32 { if n >= 0 { n } else { -n } } let mut func: fn(i32) -> i32 = double; let x = -5; println!("{}", func(x)); func = abs; println!("{}", func(x)); let x = 4; let adder = |n| n + x ; assert_eq!(adder(2), 4 + 2); let mut state = false; let mut flipflop = || { state = !state; state }; println!("{}", flipflop()); println!("{}", flipflop()); println!("{}", flipflop()); println!("{}", state); } fn print_typename<T>(_ : T) { println!("{}", std::any::type_name::<T>()); }
fn main() { f4_2(); f4_3(); } fn f4_3() { let t1 = (88, true); assert_eq!(t1.0, 88); let mut t2 = (88, true); t2.1 = false; assert_eq!(t2.1, false); let (n1, b1) = t2; println!("({}, {})", n1, b1); let ((_, _), (n2, _)) = ((1, 2), (3, 4)); println!("{}", n2); let mut t1 = ((1, 2), (3, 4)); let ((ref mut x1_ptr, ref mut x2_ptr), _) = t1; *x1_ptr = 5; *x2_ptr = 6; println!("{:?}", t1); let a1 = [false, true, false]; let a2 = [0.0, 1.0, 0.6, -0.4]; assert_eq!(a1.len() + 1, a2.len()); let a3 = [0; 100]; assert_eq!(a3.len(), 100); let _a4 = [['a', 'b'], ['c', 'd']]; let i = 99; let mut v = vec![0; i]; assert_eq!(v.len(), 99); v.push(1); assert_eq!(v.len(), 100); assert_eq!(v.pop().unwrap(), 1); assert_eq!(v.len(), 99); let mut a1 = ['h', 'e', 'l', 'l', 'o']; println!("{}", a1[1]); a1[0] = 'H'; println!("{}", a1[0]); let i = 0; println!("{}", a1[i]); println!("{:?}", a1.get(4)); println!("{}", a1.get(4).unwrap()); println!("{:?}", a1.get(5)); let mut a4 = [1; 50]; for i in a4.iter() { print!("{}", i); } println!(); for i in a4.iter_mut() { print!("{}", i); } println!(); fn print_info(name: &str, sl: &[char]) { println!(" {:9} - {}, {}, {:?}, {:?}, {:?}", name, name, sl.len(), sl.first(), sl[1], sl.last(), ); } let a1 = ['a', 'b', 'c', 'd', 'e']; println!("a1: {:?}", a1 ); print_info("&a1[..]", &a1); print_info("&a1[..]", &a1[..]); let v1 = vec!['f', 'g', 'h', 'i', 'j']; print_info("&v1[..]", &v1); let mut a1 = [5, 4, 3, 2]; let s1 = &mut a1[1..3]; s1[0] = 6; s1[1] *= 10; s1.swap(0, 1); println!("{:?}", s1); println!("{:?}", a1); let a2: [i32; 0] = []; let s2 = &a2; assert!(s2.is_empty()); assert_eq!(s2.len(), 0); assert_eq!(s2.get(0), None); let a3 = ["Zero", "One", "Two", "Three", "Four"]; let s3 = &a3[1..4]; assert!(!s3.is_empty()); assert_eq!(s3.len(), 3); assert_eq!(s3.first(), Some(&"One")); assert_eq!(s3[1], "Two"); assert_eq!(s3.get(1), Some(&"Two")); assert_eq!(a3.get(1), Some(&"One")); assert!(a3.contains(&"One")); assert!(s3.starts_with(&["One", "Two"])); assert!(s3.ends_with(&["Two", "Three"])); let mut a4 = [1, 3, 4, 5]; println!("{:?}", a4); assert_eq!(a4.get(1).unwrap(), &3); let i1 = a4.get_mut(3).unwrap(); println!("{:?}", i1); *i1 = 10; println!("{:?}", i1); println!("{:?}", a4); let mut i2 = a4.get_mut(3).unwrap(); println!("{:?}", i2); i2 = a4.get_mut(1).unwrap(); println!("{:?}, warning is not displayed", i2); let _i3 = a4.get(1).unwrap(); let mut a4 = [6, 4, 2, 8, 0, 9, 4, 3, 7, 5, 1, 7]; a4[2..6].sort(); println!("{:?}", a4); let (s4a, s4b) = a4.split_at_mut(5); s4a.reverse(); println!("s4a: {:?}", s4a); s4b.sort_unstable(); println!("s4b: {:?}", s4b); println!("{:?}", a4); let s1 = "abc1"; let s2 = "abc2"; assert!(s1 < s2); assert!(s1 != s2); let s3 = "add new line"; let s4 = "no new \ line"; println!("{}", s3); println!("{}", s4); let s5 = "\\ add back slash by escape"; let s6 = r#"\ add back slash by row string"#; println!("{}", s5); println!("{}", s6); let s7 = r###"it's able to use # and ## in this literal"###; println!("{}", s7); println!("{}\u{1f600}",s7 ); let fruits = "red apple, green apple\nraspberry, black berry"; let mut lines = fruits.lines(); let apple_line = lines.next(); assert_eq!(Some("red apple, green apple"), apple_line); assert_eq!(Some("raspberry, black berry"), lines.next()); assert_eq!(None, lines.next());
let s1 = "a"; let s2 = "あ"; let s3 = "😀"; println!("{}, {}, {}", s1.len(), s2.len(), s3.len()); let s = "abcあいう"; assert_eq!(s.get(0..1), Some("a")); assert_eq!(s.get(3..6), Some("あ")); assert_eq!(s.get(3..5), None); assert_eq!(s.get(3..=6), None ); let s = "かか\u{3099}く"; println!("{}", s); let s_iter = s.chars(); for c in s_iter { println!("{}", c); } let mut s_iter = s.chars(); println!("{:?}", s_iter.next()); println!("{:?}", s_iter.next()); println!("{:?}", s_iter.next()); println!("{:?}", s_iter.next()); println!("{:?}", s_iter.next()); println!("{:?}", s_iter.next()); let utf8: [u8; 4] = [0x61, 0xe3, 0x81, 0x82]; assert_eq!(std::str::from_utf8(&utf8), Ok("aあ")); let bad_utf8: [u8; 2] = [0x81, 0x33]; let result = std::str::from_utf8(&bad_utf8); assert!(result.is_err()); let mut s1 = "abc".to_string(); let s2 = s1.as_mut_str(); s2.make_ascii_uppercase(); assert_eq!("ABC", s2); let b = unsafe {s2.as_bytes_mut() }; b[0] = b'D'; b[1] = b'E'; b[2] = b'F'; println!("{}", s2); } fn f4_2() { let n = 42; let c = 'R'; println!("{}, {}", n, c); fn hello() { println!("hello"); } let hello = hello(); assert_eq!((), hello); assert_eq!(std::mem::size_of::<()>(), 0); let b1 = true; let b2 = !b1; assert_eq!(b2, false); let n1 = 8; let n2 = 12; let b3 = n1 > 10; let b4 = n2 > 10; let b5 = b3 && b4; let b6 = b3 || b4; assert_eq!(b5, false); assert_eq!(b6, true); assert_eq!(std::mem::size_of::<bool>(), 1); let n1 = 10_000; let n1_1 = 10000; let n2 = 0u8; let n2_1: u8 = 0; let n3 = -100_isize; let n3_1: isize = -100; assert_eq!(n1, n1_1); assert_eq!(n2, n2_1); assert_eq!(n3, n3_1); let n4 = 10; let n5 = n3 + n4; print_typename(n5); let n1 = 57; let h1 = 0xff; let o1 = 0o71; let b1 = 0b1111_1111; assert_eq!(h1, b1); assert_eq!(n1, o1); let n6 = b'A'; assert_eq!(n6, 65u8); let n1 = std::u8::MAX; let _n2 = 1u8; println!("{}", n1); let n1 = 200u8; let n2 = 3u8; assert!(n1.checked_mul(n2).is_none()); assert_eq!(n1.saturating_mul(n2), std::u8::MAX); assert_eq!(n1.wrapping_mul(n2), 88); assert_eq!(n1.overflowing_mul(n2), (88, true)); let _f1 = 10.0; let _f2 = -1_234.56_f32; let _f3 = 578.6E77; let c1 ='A'; let c2 = 'a'; assert!(c1 < c2); assert!(c1.is_uppercase()); let c3 = '0'; assert!(c3.is_digit(10)); let c4 = '\t'; let c5 = '\n'; let c6 = '\''; let c7 = '\\'; let c8 = '\x7F'; let c9 = '\u{1f600}'; println!("{}, {}, {}, {}, {}, {}, {}", c3, c4, c5, c6, c7, c8, c9); fn func1(mut n: u32) { println!("func1-1 {}", n); n = 1; println!("func1-2 {}", n); } fn func2(n_ptr: &mut u32) { println!("func2 *n_ptr {}", *n_ptr); println!("func2 n_ptr {}", n_ptr); *n_ptr = 10; println!("func2 *n_ptr {}", n_ptr); } let mut x = 5; func1(x); func2(&mut x); let c1 = 'A'; let c1_ptr = &c1; assert_eq!(*c1_ptr, 'A'); let mut n1 = 0; let n1_ptr = &mut n1; assert_eq!(*n1_ptr, 0); fn double (n: i32) -> i32 { n + n } fn abs(n: i32) -> i32 { if n >= 0 { n } else { -n } } let mut func: fn(i32) -> i32 = double; let x = -5; println!("{}", func(x)); func = abs; println!("{}", func(x)); let x = 4; let adder = |n| n + x ; assert_eq!(adder(2), 4 + 2); let mut state = false; let mut flipflop = || { state = !state; state }; println!("{}", flipflop()); println!("{}", flipflop()); println!("{}", flipflop()); println!("{}", state); } fn print_typename<T>(_ : T) { println!("{}", std::any::type_name::<T>()); }
if let Some(apples) = apple_line { assert!(apples.starts_with("red")); assert!(apples.contains("apple")); println!("{:?}", apples.find("green")); let mut apple_iter = apples.split(","); assert_eq!(apple_iter.next(), Some("red apple")); let green = apple_iter.next(); assert_eq!(Some(" green apple"), green); assert_eq!(Some("green apple"), green.map(|s| s.trim())); assert_eq!(Some("green apple"), green.map(str::trim)); }
if_condition
[ { "content": "fn print_info(name: &str, sl: &[char]) {\n\n println!(\" {:9} - {}, {:?}, {:?}, {:?}\",\n\n name,\n\n sl.len(),\n\n sl.first(),\n\n sl.get(1),\n\n sl.last()\n\n );\n\n}\n\n\n", "file_path": "jissen/1st/code4/src/main.rs", "rank": 0, "score": 413647.8982519959 }, { "content": "fn add1(s0: &str, s1: &str) -> Result<i32, String> {\n\n let s0 = s0.parse::<i32>().map_err(|_e| \"s0 is not integer\")?;\n\n let s1 = s1.parse::<i32>().map_err(|_e| \"s1 is not integer\")?;\n\n Ok(s0 + s1)\n\n}\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 1, "score": 369907.143742262 }, { "content": "fn f2(n: &mut usize, str: &str, slice: &[i32]) {\n\n *n = str.len() + slice.len();\n\n}\n\n\n\n// #[repr(C)]\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 2, "score": 305560.9934192122 }, { "content": "fn add0(s0: &str, s1: &str) -> Result<i32, std::num::ParseIntError> {\n\n let s0 = s0.parse::<i32>()?;\n\n let s1 = s1.parse::<i32>()?;\n\n Ok(s0 + s1)\n\n}\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 3, "score": 287142.43550743046 }, { "content": "fn create_task_box(name: &str) -> Component {\n\n let task_name = ComponentBuilder::new()\n\n .set_text(name)\n\n .set_gravity(\"center\")\n\n .set_flex(1)\n\n .build_text();\n\n\n\n let post_back_done = Action::create_postback(\"完了\", \"action=done&id=1\");\n\n\n\n // let task_name_container = ComponentBuilder::new()\n\n // .set_layout(\"vertical\")\n\n // .set_contents(vec![task_name])\n\n // .set_flex(1)\n\n // .build_box();\n\n\n\n\n\n let done_button = ComponentBuilder::new()\n\n .set_action(post_back_done)\n\n .set_style(\"primary\")\n\n .set_gravity(\"center\")\n", "file_path": "rustapi/line-messaging-api-rust/tests/messages_test/mod.rs", "rank": 4, "score": 229009.77327689645 }, { "content": "pub fn heavy_calc(name: &str, n: u64) -> u64 {\n\n println!(\"{}: started\", name);\n\n thread::sleep(time::Duration::from_millis(n));\n\n let sum = (1..=n).sum();\n\n println!(\"{}: ended\", name);\n\n sum\n\n}", "file_path": "jissen/1st/code3/src/main.rs", "rank": 5, "score": 224644.4558764802 }, { "content": "fn f5(p: &[i32]) -> i32 { p[0]}\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 6, "score": 216108.72154575147 }, { "content": "fn new_poly(vertexes: Vec<(i32, i32)>) -> Polygon {\n\n let stroke_width = 1;\n\n let fill = (1,1,1);\n\n Polygon {vertexes, stroke_width, fill}\n\n}\n\n\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 7, "score": 214217.00916854828 }, { "content": "pub fn to_events<'a, T>(data: &str) -> Result<T, String>\n\nwhere for<'de> T: Deserialize<'de>, \n\n{\n\n let events: Value = match serde_json::from_str(data) {\n\n Ok(events) => events,\n\n Err(err) => return Err(err.to_string()),\n\n };\n\n \n\n let contents: Value = match serde_json::to_value(&events[\"events\"][0]) {\n\n Ok(contents) => contents,\n\n Err(err) => return Err(err.to_string()),\n\n };\n\n serde_json::from_value(contents).map_err(|err| { err.to_string() })\n\n}", "file_path": "rustapi/line-messaging-api-rust/src/utils.rs", "rank": 8, "score": 212481.60731755654 }, { "content": "fn f6(p: Box<[i32]>) -> i32 { p[0]}\n\n\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 9, "score": 211390.3274941279 }, { "content": "fn typename<T>(_: &T) -> &'static str {\n\n let string = std::any::type_name::<T>();\n\n println!(\"{}\", string);\n\n string\n\n}\n\n\n\n\n", "file_path": "jissen/2nd/code5/src/main.rs", "rank": 10, "score": 194313.71223138628 }, { "content": "fn double(n: i32) -> i32 {\n\n n + n\n\n}\n\n\n", "file_path": "jissen/1st/code4/src/main.rs", "rank": 11, "score": 187843.50787543392 }, { "content": "fn abs(n: i32) -> i32 {\n\n if n >= 0 { n } else { -n }\n\n}\n\n\n", "file_path": "jissen/1st/code4/src/main.rs", "rank": 12, "score": 187843.50787543392 }, { "content": "fn new_user2(name: UserName2, id: Id2, created: Timestamp2) -> User2 {\n\n (id, name, created)\n\n}\n\n\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 13, "score": 185463.7285952664 }, { "content": "fn elements(s: &[i32]) -> Option<i32> {\n\n let s0 = s.get(0)?;\n\n let s3 = s.get(3)?;\n\n Some(s0 + s3)\n\n}\n\n\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 14, "score": 184059.144464481 }, { "content": "fn bad_elements(s: &[i32]) -> Option<i32> {\n\n let s0 = s.get(0)?;\n\n let s3 = s.get(5)?;\n\n Some(s0 + s3)\n\n}\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 15, "score": 182308.0603027515 }, { "content": "fn main() {\n\n rocket::ignite().mount(\"/\", routes![webhook]).launch();\n\n}", "file_path": "rustapi/line-messaging-api-rocket/examples/webhook/src/main.rs", "rank": 16, "score": 181017.1882128397 }, { "content": "fn timed_sort<F>(sorter: &F, len: usize, name: &str) -> f64\n\nwhere\n\n F: Fn(&mut [u32], &SortOrder) -> Result<(), String>\n\n{\n\n let mut x = new_u32_vec(len);\n\n let start = Instant::now();\n\n sorter(&mut x, &SortOrder::Ascending).expect(\"Failed to sort: \");\n\n let dur = start.elapsed();\n\n\n\n let nano_secs = dur.subsec_nanos() as f64 + dur.as_secs() as f64 * 1e9_f64;\n\n println!(\n\n \"{}: sorted {} integers in {} seconds\",\n\n name,\n\n len,\n\n nano_secs / 1e9;\n\n );\n\n assert!(is_sorted_ascending(&x));\n\n nano_secs\n\n}", "file_path": "jissen/2nd/code3/examples/benchmark.rs", "rank": 17, "score": 179857.7035923843 }, { "content": "fn timed_sort<F>(sorter: &F, len: usize, name: &str) -> f64\n\nwhere\n\n F: Fn(&mut [u32], &SortOrder) -> Result<(), String>\n\n{\n\n let mut x = new_u32_vec(len);\n\n let start = Instant::now();\n\n sorter(&mut x, &SortOrder::Ascending).expect(\"Failed to sort: \");\n\n let dur = start.elapsed();\n\n\n\n let nano_secs = dur.subsec_nanos() as f64 + dur.as_secs() as f64 * 1e9_f64;\n\n println!(\n\n \"{}: sorted {} integers in {} seconds\",\n\n name,\n\n len,\n\n nano_secs / 1e9\n\n );\n\n assert!(is_sorted_ascending(&x));\n\n nano_secs\n\n}", "file_path": "jissen/2nd/code3_1/examples/benchmark.rs", "rank": 18, "score": 179857.7035923843 }, { "content": "fn timed_sort<F>(sorter: &F, len: usize, name: &str) -> f64\n\nwhere\n\n F: Fn(&mut [u32], &SortOrder) -> Result<(), String>,\n\n{\n\n let mut x = new_u32_vec(len);\n\n let start = Instant::now();\n\n sorter(&mut x, &SortOrder::Ascending).expect(\"Failed to sort: \");\n\n let dur = start.elapsed();\n\n\n\n let nano_secs = dur.subsec_nanos() as f64 + dur.as_secs() as f64 * 1e9_f64;\n\n println!(\n\n \"{}: sorted {} integers in {} seconds\",\n\n name,\n\n len,\n\n nano_secs / 1e9\n\n );\n\n\n\n assert!(is_sorted_ascending(&x));\n\n\n\n nano_secs\n\n}", "file_path": "jissen/1st/code3/bitonic-sorter/examples/benchmark.rs", "rank": 19, "score": 176688.90948528037 }, { "content": "fn stringify(x: impl ToString) -> String { x.to_string() }\n\n\n", "file_path": "jissen/1st/code7/src/main.rs", "rank": 20, "score": 174972.46932669042 }, { "content": "pub fn sort_by<T, F>(x: &mut [T], comparator: &F) -> Result<(), String>\n\nwhere T: Send,\n\n F: Sync + Fn(&T, &T) -> Ordering\n\n{\n\n if x.len().is_power_of_two() {\n\n do_sort(x, true, comparator);\n\n Ok(())\n\n } else {\n\n Err(format!(\"The length of x is not a power of two.\"))\n\n }\n\n\n\n}\n\n\n", "file_path": "jissen/2nd/code3/src/fourth.rs", "rank": 21, "score": 172893.92615514575 }, { "content": "pub fn sort_by<T, F>(x: &mut [T], comparator: &F) -> Result<(), String>\n\nwhere F: Fn(&T, &T) -> Ordering\n\n{\n\n if x.len().is_power_of_two() {\n\n do_sort(x, true, comparator);\n\n Ok(())\n\n } else {\n\n Err(format!(\"The length of x is not a power of two.\"))\n\n }\n\n\n\n}\n\n\n", "file_path": "jissen/2nd/code3_1/src/third.rs", "rank": 22, "score": 172893.92615514575 }, { "content": "pub fn sort_by<T, F>(x: &mut [T], comparator: &F) -> Result<(), String>\n\nwhere F: Fn(&T, &T) -> Ordering\n\n{\n\n if x.len().is_power_of_two() {\n\n do_sort(x, true, comparator);\n\n Ok(())\n\n } else {\n\n Err(format!(\"The length of x is not a power of two.\"))\n\n }\n\n\n\n}\n\n\n", "file_path": "jissen/2nd/code3/src/third.rs", "rank": 23, "score": 172893.92615514577 }, { "content": "pub fn sort_by<T, F>(x: &mut [T], comparator: &F) -> Result<(), String>\n\nwhere T: Send,\n\n F: Sync + Fn(&T, &T) -> Ordering\n\n{\n\n if x.len().is_power_of_two() {\n\n do_sort(x, true, comparator);\n\n Ok(())\n\n } else {\n\n Err(format!(\"The length of x is not a power of two.\"))\n\n }\n\n\n\n}\n\n\n", "file_path": "jissen/2nd/code3_1/src/fourth.rs", "rank": 24, "score": 172893.92615514577 }, { "content": "pub fn sort<T>(x: &mut [T], order: &SortOrder) -> Result<(), String>\n\nwhere\n\n T: Ord\n\n{\n\n match *order {\n\n Asc => sort_by(x, &|a, b| a.cmp(b)),\n\n Desc => sort_by(x, &|a, b| b.cmp(a)),\n\n }\n\n}\n\n\n", "file_path": "jissen/1st/code3/bitonic-sorter/src/third.rs", "rank": 25, "score": 172064.80342981758 }, { "content": "pub fn sort<T>(x: &mut [T], order: &SortOrder) -> Result<(), String>\n\nwhere\n\n T: Ord\n\n{\n\n if x.len().is_power_of_two() {\n\n match *order {\n\n Asc => do_sort(x, true),\n\n Desc => do_sort(x, false),\n\n };\n\n Ok(())\n\n } else {\n\n Err(format!(\"The length of x is not a power of two. (x.len(): {}\", x.len()))\n\n }\n\n}\n\n\n", "file_path": "jissen/1st/code3/bitonic-sorter/src/second.rs", "rank": 26, "score": 172064.80342981758 }, { "content": "pub fn sort<T>(x: &mut [T], order: &SortOrder) -> Result<(), String>\n\nwhere\n\n T: Ord + Send\n\n{\n\n match *order {\n\n Asc => sort_by(x, &|a, b| a.cmp(b)),\n\n Desc => sort_by(x, &|a, b| b.cmp(a)),\n\n }\n\n}\n\n\n", "file_path": "jissen/1st/code3/bitonic-sorter/src/fourth.rs", "rank": 27, "score": 172064.80342981758 }, { "content": "pub fn sort<T: Ord>(x: &mut [T], order: &SortOrder) -> Result<(), String> {\n\n if x.len().is_power_of_two() {\n\n match *order {\n\n SortOrder::Ascending => do_sort(x, true),\n\n SortOrder::Descending => do_sort(x, false),\n\n };\n\n Ok(())\n\n } else {\n\n Err(format!(\"The length of x is not a power of two.(x.len(): {}\", x.len()))\n\n }\n\n}\n\n\n", "file_path": "jissen/2nd/code3/src/second.rs", "rank": 28, "score": 171296.56696955394 }, { "content": "pub fn sort<T: Ord>(x: &mut [T], order: &SortOrder) -> Result<(), String> {\n\n match *order {\n\n SortOrder::Ascending => sort_by(x, &|a, b| a.cmp(b)),\n\n SortOrder::Descending => sort_by(x, &|a, b| b.cmp(a)),\n\n }\n\n}\n\n\n", "file_path": "jissen/2nd/code3/src/third.rs", "rank": 29, "score": 171296.56696955394 }, { "content": "pub fn sort<T: Ord>(x: &mut [T], order: &SortOrder) -> Result<(), String> {\n\n if x.len().is_power_of_two() {\n\n match *order {\n\n SortOrder::Ascending => do_sort(x, true),\n\n SortOrder::Descending => do_sort(x, false),\n\n };\n\n Ok(())\n\n } else {\n\n Err(format!(\"The length of x is not a power of two.(x.len(): {}\", x.len()))\n\n }\n\n}\n\n\n", "file_path": "jissen/2nd/code3_1/src/second.rs", "rank": 30, "score": 171296.56696955394 }, { "content": "pub fn sort<T: Ord>(x: &mut [T], order: &SortOrder) -> Result<(), String> {\n\n match *order {\n\n SortOrder::Ascending => sort_by(x, &|a, b| a.cmp(b)),\n\n SortOrder::Descending => sort_by(x, &|a, b| b.cmp(a)),\n\n }\n\n}\n\n\n", "file_path": "jissen/2nd/code3_1/src/third.rs", "rank": 31, "score": 171296.56696955394 }, { "content": "fn f1(mut n: u32) {\n\n n = 1;\n\n println!(\"f1 n = {}\", n);\n\n}\n\n\n", "file_path": "jissen/sample1/src/main.rs", "rank": 32, "score": 171001.6125216566 }, { "content": "pub fn sort_by<T, F>(x: &mut[T], comparator: &F) -> Result<(), String>\n\nwhere\n\n T: Send,\n\n F: Sync + Fn(&T, &T) -> Ordering\n\n{\n\n if x.len().is_power_of_two() {\n\n do_sort(x, true, comparator);\n\n Ok(())\n\n } else {\n\n Err(format!(\"The length of x i not a power of two. (x.len(): {})\", x.len()))\n\n }\n\n}\n\n\n\n\n", "file_path": "jissen/1st/code3/bitonic-sorter/src/fourth.rs", "rank": 33, "score": 169748.45755719853 }, { "content": "pub fn sort_by<T, F>(x: &mut[T], comparator: &F) -> Result<(), String>\n\nwhere F: Fn(&T, &T) -> Ordering\n\n{\n\n if x.len().is_power_of_two() {\n\n do_sort(x, true, comparator);\n\n Ok(())\n\n } else {\n\n Err(format!(\"The length of x i not a power of two. (x.len(): {})\", x.len()))\n\n }\n\n}\n\n\n\n\n", "file_path": "jissen/1st/code3/bitonic-sorter/src/third.rs", "rank": 34, "score": 169748.45755719853 }, { "content": "fn f2(n_ptr: &mut u32) {\n\n println!(\"f2 n_ptr = {:p}\", n_ptr);\n\n *n_ptr = 2;\n\n // n_ptr = 2 expected &mut, find integer\n\n\n\n}\n", "file_path": "jissen/sample1/src/main.rs", "rank": 35, "score": 168962.96391792514 }, { "content": "fn f1(mut n: u32) {\n\n n += 1;\n\n println!(\"f1: n = {}\", n);\n\n}\n\n\n", "file_path": "jissen/1st/code4/src/main.rs", "rank": 36, "score": 168962.96391792514 }, { "content": "fn print_typename<T>(_: T) {\n\n println!(\"{}\", std::any::type_name::<T>());\n\n}", "file_path": "jissen/1st/code5/src/main.rs", "rank": 38, "score": 168869.02087964583 }, { "content": "fn print_typename<T>(_: T) {\n\n println!(\"{}\", std::any::type_name::<T>());\n\n}\n\n\n", "file_path": "jissen/1st/code4/src/main.rs", "rank": 39, "score": 168869.02087964586 }, { "content": "pub fn sort<T: Ord + Send>(x: &mut [T], order: &SortOrder) -> Result<(), String> {\n\n match *order {\n\n SortOrder::Ascending => sort_by(x, &|a, b| a.cmp(b)),\n\n SortOrder::Descending => sort_by(x, &|a, b| b.cmp(a)),\n\n }\n\n}\n\n\n", "file_path": "jissen/2nd/code3_1/src/fourth.rs", "rank": 40, "score": 167574.2298553785 }, { "content": "pub fn sort<T: Ord + Send>(x: &mut [T], order: &SortOrder) -> Result<(), String> {\n\n match *order {\n\n SortOrder::Ascending => sort_by(x, &|a, b| a.cmp(b)),\n\n SortOrder::Descending => sort_by(x, &|a, b| b.cmp(a)),\n\n }\n\n}\n\n\n", "file_path": "jissen/2nd/code3/src/fourth.rs", "rank": 41, "score": 167574.2298553785 }, { "content": "pub fn valify_signature(&self, channel_secret_key: &str, channel_token: &str) -> bool {\n\n // シークレットキーを暗号化\n\n\n\n\n\n}", "file_path": "rocket-webapi/src/main.rs", "rank": 42, "score": 167473.1801628378 }, { "content": "fn f4(slice: &mut [usize]) {\n\n let len = f3(slice);\n\n slice[0] = len;\n\n}\n\n\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 43, "score": 166996.0132881432 }, { "content": "fn f2(n_ptr: &mut u32) {\n\n println!(\"f2: n_ptr = {:p}\", n_ptr);\n\n *n_ptr = 2;\n\n println!(\"f2: *n_ptr = {}\", *n_ptr)\n\n}\n\n\n", "file_path": "jissen/1st/code4/src/main.rs", "rank": 44, "score": 166996.0132881432 }, { "content": "fn rpn(exp: &str) -> f64 {\n\n let mut stack = Vec::new();\n\n for token in exp.split_whitespace() {\n\n if let Ok(num) = token.parse::<f64>() {\n\n stack.push(num)\n\n } else {\n\n match token {\n\n \"+\" => apply2(&mut stack, |x, y| x + y),\n\n \"-\" => apply2(&mut stack, |x, y| x - y),\n\n \"*\" => apply2(&mut stack, |x, y| x * y),\n\n \"/\" => apply2(&mut stack, |x, y| x / y),\n\n _ => panic!(\"Unknown operator: {}\", token)\n\n }\n\n }\n\n }\n\n stack.pop().expect(\"stack underflow\")\n\n}\n\n\n", "file_path": "jissen/1st/code2/rpn/src/main.rs", "rank": 45, "score": 166626.91919791527 }, { "content": "fn f1<T>(v1: &Vec<T>) {\n\n println!(\"v1 len: {}, capacity: {}\", v1.len(), v1.capacity());\n\n}\n\n\n\n\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 46, "score": 164837.1270304467 }, { "content": "#[post(\"/todos\", data = \"<todo>\")]\n\npub fn new_todo(todo: Json<ToDo>) -> String {\n\n format!(\"Accepted post request! {:?}\", todo.0)\n\n}\n\n\n", "file_path": "rocket-webapi/src/routes.rs", "rank": 47, "score": 164750.52903056054 }, { "content": "pub fn get_user_id() -> String {\n\n let mut f = File::open(\"tests/common/config.json\").unwrap();\n\n let mut buffer = Vec::new();\n\n f.read_to_end(&mut buffer).unwrap();\n\n let config: Value = serde_json::from_slice(&buffer).unwrap();\n\n String::from(config[\"user_id\"].as_str().unwrap())\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 48, "score": 164119.20615114557 }, { "content": "pub fn get_test_video() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\":[{\"type\":\"message\",\"replyToken\":\"c03cce4ba8514cf49b2139c26c6c7486\",\"source\":{\"userId\":\"Ua2829b4c5a9b21984c091fc0b641fa8f\",\"type\":\"user\"},\"timestamp\":1528732060803,\"message\":{\"type\":\"video\",\"id\":\"8100347360051\"}}]}\n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 49, "score": 164119.20615114557 }, { "content": "pub fn get_test_text() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\":[{\"type\":\"message\",\"replyToken\":\"63ca831b72f94011b38bde2676d7a6eb\",\"source\":{\"userId\":\"Ua2829b4c5a9b21984c091fc0b641fa8f\",\"type\":\"user\"},\"timestamp\":1528728227563,\"message\":{\"type\":\"text\",\"id\":\"8100074921758\",\"text\":\"はい\"}}]}\n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 50, "score": 164119.20615114557 }, { "content": "pub fn get_test_follow() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\" :[{\"replyToken\": \"nHuyWiB7yP5Zw52FIkcQobQuGDXCTA\",\"type\": \"message\",\"timestamp\": 1462629479859,\"source\": {\"type\": \"user\",\"userId\": \"U4af4980629...\"},\"message\": {\"id\": \"325708\",\"type\": \"file\",\"fileName\": \"file.txt\",\"fileSize\": 2138}}]} \n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 51, "score": 164119.20615114557 }, { "content": "pub fn get_test_join() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\": [{\"replyToken\": \"nHuyWiB7yP5Zw52FIkcQobQuGDXCTA\",\"type\": \"join\",\"timestamp\": 1462629479859,\"source\": {\"type\": \"group\",\"groupId\": \"C4af4980629...\"}}]}\n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 52, "score": 164119.20615114557 }, { "content": "pub fn get_test_leave() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\":[{\"type\":\"leave\",\"source\":{\"groupId\":\"C4991a36653054b7525ac684337557e23\",\"type\":\"group\"},\"timestamp\":1528734310446}]}\n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 53, "score": 164119.20615114557 }, { "content": "pub fn get_test_unfollow() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\": [{\"type\": \"unfollow\",\"timestamp\": 1462629479859,\"source\": {\"type\": \"user\",\"userId\": \"U4af4980629...\"}}]}\n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 54, "score": 164119.20615114557 }, { "content": "pub fn get_test_audio() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\":[{\"type\":\"message\",\"replyToken\":\"e8355dc0634b4e6097d0365454610643\",\"source\":{\"userId\":\"Ua2829b4c5a9b21984c091fc0b641fa8f\",\"type\":\"user\"},\"timestamp\":1528732180464,\"message\":{\"type\":\"audio\",\"id\":\"8100354340055\"}}]}\n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 55, "score": 164119.20615114557 }, { "content": "pub fn get_test_postback() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\": [{\"type\":\"postback\",\"replyToken\":\"b60d432864f44d079f6d8efe86cf404b\",\"source\":{\"userId\":\"U91eeaf62d...\",\"type\":\"user\"},\"timestamp\":1513669370317,\"postback\":{\"data\":\"storeId=12345\",\"params\":{\"datetime\":\"2017-12-25T01:00\"}}}]}\n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 56, "score": 164119.20615114557 }, { "content": "pub fn get_test_location() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\" :[{\"replyToken\": \"nHuyWiB7yP5Zw52FIkcQobQuGDXCTA\",\"type\": \"message\",\"timestamp\": 1462629479859,\"source\": {\"type\": \"user\",\"userId\": \"U4af4980629...\"},\"message\": {\"id\": \"325708\",\"type\": \"file\",\"fileName\": \"file.txt\",\"fileSize\": 2138}}]} \n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 57, "score": 164119.20615114557 }, { "content": "pub fn get_test_image() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\":[{\"type\":\"message\",\"replyToken\":\"378bcb8c251b43c79393aedf7418ff02\",\"source\":{\"userId\":\"Ua2829b4c5a9b21984c091fc0b641fa8f\",\"type\":\"user\"},\"timestamp\":1528728934280,\"message\":{\"type\":\"image\",\"id\":\"8100131723119\"}}]}\n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 58, "score": 164119.20615114557 }, { "content": "pub fn get_test_file() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\" :[{\"replyToken\": \"nHuyWiB7yP5Zw52FIkcQobQuGDXCTA\",\"type\": \"message\",\"timestamp\": 1462629479859,\"source\": {\"type\": \"user\",\"userId\": \"U4af4980629...\"},\"message\": {\"id\": \"325708\",\"type\": \"file\",\"fileName\": \"file.txt\",\"fileSize\": 2138}}]}\n\n \"#\n\n )\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 59, "score": 164119.20615114557 }, { "content": "pub fn get_test_beacon() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\": [{\"replyToken\": \"nHuyWiB7yP5Zw52FIkcQobQuGDXCTA\",\"type\": \"beacon\",\"timestamp\": 1462629479859,\"source\": {\"type\": \"user\",\"userId\": \"U4af4980629...\"},\"beacon\": {\"hwid\": \"d41d8cd98f\",\"type\": \"enter\"}}]}\n\n \"#\n\n )\n\n}\n", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 60, "score": 164119.20615114557 }, { "content": "fn is_leap_year(year: i32) -> bool {\n\n year % 4 == 0 && !(year % 100 == 0 && year % 400 != 0)\n\n}\n", "file_path": "jissen/1st/code6/leap-year/src/main.rs", "rank": 61, "score": 163091.60035203825 }, { "content": "fn add(x: f64, y: f64) -> f64 {\n\n x + y\n\n}\n", "file_path": "jissen/1st/code2/hello/src/main.rs", "rank": 62, "score": 162725.02848748225 }, { "content": "pub fn get_test_account_link() -> String {\n\n String::from(\n\n r#\"\n\n {\"events\": [{\"type\": \"accountLink\",\"replyToken\": \"b60d432864f44d079f6d8efe86cf404b\",\"source\": {\"userId\": \"U91eeaf62d...\",\"type\": \"user\"},\"timestamp\": 1513669370317,\"link\": {\"result\": \"ok\",\"nonce\": \"xxxxxxxxxxxxxxx\"}}]}\n\n \"#\n\n )\n\n}", "file_path": "rustapi/line-messaging-api-rust/tests/common/mod.rs", "rank": 63, "score": 162437.98531628065 }, { "content": "pub fn is_replyable(data: &str) -> bool {\n\n let events: Value = match serde_json::from_str(data) {\n\n Ok(events) => events,\n\n Err(_) => return false,\n\n };\n\n\n\n serde_json::to_string(&events[\"events\"][0][\"replyToken\"]).unwrap() != \"null\"\n\n}\n\n\n", "file_path": "rustapi/line-messaging-api-rust/src/utils.rs", "rank": 64, "score": 161842.2151979027 }, { "content": "fn main() {\n\n // get user input\n\n println!(\"please input wating time\");\n\n let mut input = String::new();\n\n io::stdin()\n\n .read_line(&mut input)\n\n .expect(\"input number\");\n\n // parse string to num\n\n let mut input: u8 = match input.trim().parse() {\n\n Ok(num) => num,\n\n Err(_) => return,\n\n };\n\n\n\n if input > 60 {\n\n println!(\"too much\");\n\n return;\n\n }\n\n\n\n // stay time with input value\n\n // loopp\n", "file_path": "alarm/src/main.rs", "rank": 65, "score": 151697.7067688913 }, { "content": "fn main() {\n\n let table = Arc::new(Table {forks: vec! [\n\n Mutex::new(()),\n\n Mutex::new(()),\n\n Mutex::new(()),\n\n Mutex::new(()),\n\n Mutex::new(()),\n\n ]});\n\n\n\n let philosophers = vec![\n\n Philosopher::new(\"judith Butler\", 0, 1),\n\n Philosopher::new(\"Gilles Deleuze\", 1, 2),\n\n Philosopher::new(\"Karl Marx\", 2, 3),\n\n Philosopher::new(\"Emma Goldman\", 3, 4),\n\n Philosopher::new(\"Michel Foucault\", 0, 4),\n\n ];\n\n\n\n let handles: Vec<_> = philosophers.into_iter().map(|p| {\n\n let table = table.clone();\n\n\n", "file_path": "dining/src/main.rs", "rank": 66, "score": 151697.7067688913 }, { "content": "fn main() {\n\n rocket::ignite()\n\n .mount(\"/\", routes![index, todos, new_todo, todo_by_id])\n\n .launch();\n\n}\n\n\n", "file_path": "rocket-webapi/src/main.rs", "rank": 67, "score": 150429.2966387414 }, { "content": "fn main() {\n\n // f1(2);\n\n // fn3();\n\n f4_16();\n\n}\n\n\n", "file_path": "jissen/sample1/src/main.rs", "rank": 68, "score": 150429.2966387414 }, { "content": "fn main() {\n\n println!(\"Guess the number!\");\n\n\n\n let secret_number = rand::thread_rng().gen_range(1, 101);\n\n\n\n loop {\n\n println!(\"Please input your guess.\");\n\n\n\n let mut guess = String::new();\n\n\n\n io::stdin()\n\n .read_line(&mut guess)\n\n .expect(\"Failed to read line\");\n\n\n\n let guess: u32 = match guess.trim().parse() {\n\n Ok(num) => num,\n\n Err(_) => continue,\n\n };\n\n\n\n println!(\"You guessed: {}\", guess);\n", "file_path": "guessing_game/src/main.rs", "rank": 69, "score": 150429.2966387414 }, { "content": "fn apply2<F>(stack: &mut Vec<f64>, fun: F)\n\n where\n\n F: Fn(f64, f64) -> f64,\n\n {\n\n if let (Some(y), Some(x)) = (stack.pop(), stack.pop()) {\n\n let z = fun(x, y);\n\n stack.push(z);\n\n } else {\n\n panic!(\"Stack underflow\");\n\n }\n\n }", "file_path": "jissen/1st/code2/rpn/src/main.rs", "rank": 70, "score": 150127.33718198532 }, { "content": "// use std::collections::HashMap;\n\nfn main() {\n\n println!(\"Hello, world!\");\n\n let s = \"aa\";\n\n print_typename(s);\n\n\n\n // 5-2-1\n\n let t1 = (3, \"birds\".to_string());\n\n let mut b1 = Box::new(t1);\n\n (*b1).0 += 1;\n\n assert_eq!(*b1, (4, \"birds\".to_string()));\n\n\n\n // 5-2-2\n\n\n\n let _v1 = vec!(false, true, false);\n\n let v2 = vec!(0.0, -1.0, 1.0, 0.5);\n\n assert_eq!(v2.len(), 4);\n\n\n\n let v3 = vec!(0; 100);\n\n assert_eq!(v3.len(), 100);\n\n\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 71, "score": 149204.06547440283 }, { "content": "fn main() {\n\n let s1 = \"Hello, \";\n\n let s2 = \"World!\";\n\n let s3 = s1.to_string() + s2;\n\n assert_eq!(s3, \"Hello, World!\");\n\n\n\n hello();\n\n let ret = hello();\n\n assert_eq!(ret, ());\n\n\n\n let b1 = true;\n\n let b2 = !b1;\n\n assert_eq!(b2, false);\n\n\n\n let n1 = 8;\n\n let n2 = 12;\n\n let b3 = n1 >= 10;\n\n let b4 = n2 >= 10;\n\n let b5 = b4 && b3;\n\n let b6 = b3 || b4;\n", "file_path": "jissen/1st/code4/src/main.rs", "rank": 72, "score": 149200.1819049993 }, { "content": "fn main() {\n\n let n1 = 1200;\n\n let n2 = 1000;\n\n\n\n let child = thread::spawn(move || {\n\n heavy_calc(\"child\", n2)\n\n });\n\n\n\n let s1 = heavy_calc(\"main\", n1);\n\n\n\n // println!(\"{}\", child.join().unwrap());\n\n\n\n let _a = match child.join() {\n\n Ok(s2) => println!(\"{}, {}\", s1, s2),\n\n Err(e) => println!(\"error: {:?}\", e),\n\n };\n\n}\n\n\n", "file_path": "jissen/1st/code3/src/main.rs", "rank": 73, "score": 149200.1819049993 }, { "content": "fn main() {\n\n code7_3();\n\n code7_6();\n\n code7_7();\n\n code7_9();\n\n code7_9a();\n\n code7_10();\n\n if let Ok(()) = code7_11() {\n\n println!(\"end code 7-11\");\n\n };\n\n if let Ok(()) = code7_11a() {\n\n println!(\"end code 7-11a\");\n\n }\n\n code7_12();\n\n}\n\n\n", "file_path": "jissen/1st/code7/src/main.rs", "rank": 74, "score": 149200.1819049993 }, { "content": "fn main() {\n\n rocket::ignite().mount(\"/\", routes![webhook]).launch();\n\n}", "file_path": "rustapi/linebot-rust/src/main.rs", "rank": 75, "score": 149200.1819049993 }, { "content": "fn main() {\n\n code8_1();\n\n code8_2();\n\n code8_3();\n\n code8_4();\n\n code8_5();\n\n\n\n}\n\n\n", "file_path": "jissen/1st/code8/src/main.rs", "rank": 77, "score": 149200.1819049993 }, { "content": "fn main() {\n\n fn5_2();\n\n fn5_3();\n\n fn5_4();\n\n}\n\n\n", "file_path": "jissen/2nd/code5/src/main.rs", "rank": 78, "score": 149200.1819049993 }, { "content": "fn main() {\n\n let text = r#\"{\"name\": \"hoge\", \"age\": 18, \"phones\": [\"+44 1234567\", \"+44 0987654\"]}\"#;\n\n let structure = serde_json::from_str::<Person>(text);\n\n println!(\"{:?}\", structure);\n\n\n\n let person = Person {\n\n name: \"fuga\".to_string(),\n\n age: 14,\n\n phones: vec![\"+44 135790\".to_string(),],\n\n };\n\n let json = serde_json::to_string(&person);\n\n println!(\"{}\", json.unwrap());\n\n}\n\n\n", "file_path": "sample_code/serde/src/main.rs", "rank": 79, "score": 149200.1819049993 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "jissen/2nd/code3/src/main.rs", "rank": 80, "score": 149200.1819049993 }, { "content": "fn main() {\n\n let exp = \"6.1 5.2 4.3 * + 3.4 2.5 / 1.6 * -\";\n\n let ans = rpn(exp);\n\n debug_assert_eq!(\"26.2840\", format!(\"{:.4}\", ans));\n\n println!(\"{} = {:.4}\", exp, ans);\n\n}\n\n\n", "file_path": "jissen/1st/code2/rpn/src/main.rs", "rank": 81, "score": 148008.4687881754 }, { "content": "fn main() {\n\n let mut circle = Circle {radius: 10};\n\n println!(\"Circle's diameter: {}\", circle.diameter());\n\n let circle2: u32 = circle.diameter();\n\n println!(\"Circle's diameter: {}\", circle2);\n\n circle.double_diameter();\n\n println!(\"Circle's diameter: {}\", circle.diameter());\n\n let circle = Circle::small_circle();\n\n println!(\"Circle's diameter: {}\", circle.diameter());\n\n let mut s = \"aaa\".to_string();\n\n s.push_str(\"aaa\");\n\n // println!();\n\n\n\n let light = Light::Green;\n\n let action = match light {\n\n Light::Green => \"Go\",\n\n Light::_Yellow => \"Proceed with caution\",\n\n Light::_Red => \"Stop\",\n\n };\n\n println!(\"Green: {}\", action);\n", "file_path": "jissen/1st/code6/sample/src/main.rs", "rank": 82, "score": 148008.4687881754 }, { "content": "fn main() {\n\n rocket::ignite()\n\n .attach(LogsDbConn::fairing())\n\n .launch();\n\n}\n\n\n", "file_path": "sample_code/rocket_database/src/main.rs", "rank": 83, "score": 148008.4687881754 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "jissen/2nd/leap-year/src/main.rs", "rank": 84, "score": 148008.4687881754 }, { "content": "fn main() {\n\n println!(\"Hell world{}, {}\", \"aaaa\", \"aaaa\");\n\n \n\n}\n\n\n", "file_path": "jissen/1st/code2/hello/src/main.rs", "rank": 85, "score": 148008.4687881754 }, { "content": "fn main() {\n\n let mut year = String::new();\n\n print!(\"Please input a year to check if it is a leap year:\");\n\n io::stdout().flush().unwrap();\n\n io::stdin().read_line(&mut year).unwrap();\n\n let year = year.trim().parse::<i32>().unwrap();\n\n\n\n if is_leap_year(year) {\n\n println!(\"{} is a leap year\", year);\n\n } else {\n\n println!(\"{} is not a leap year\", year);\n\n }\n\n}\n\n\n", "file_path": "jissen/1st/code6/leap-year/src/main.rs", "rank": 86, "score": 146852.38811884832 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n\n\n\n\n\npub enum SortOrder {\n\n Ascending,\n\n Descending,\n\n}\n\n\n", "file_path": "jissen/2nd/code3_1/src/lib.rs", "rank": 89, "score": 141311.1760955921 }, { "content": "fn main() {\n\n if let Some(n) = env::args().nth(1) {\n\n let bits = u32::from_str(&n).expect(\"error: parsing argument\");\n\n run_sorts(bits);\n\n } else {\n\n eprintln!(\n\n \"Usage {} <number of elements in bits>\",\n\n env::args().nth(0).unwrap()\n\n );\n\n std::process::exit(1);\n\n }\n\n}\n\n\n\n\n", "file_path": "jissen/2nd/code3/examples/benchmark.rs", "rank": 90, "score": 141311.1760955921 }, { "content": "fn main() {\n\n if let Some(n) = env::args().nth(1) {\n\n let bits = u32::from_str(&n).expect(\"error: parsing argument\");\n\n run_sorts(bits);\n\n } else {\n\n eprintln!(\n\n \"Usage {} <number of elements in bits>\",\n\n env::args().nth(0).unwrap()\n\n );\n\n std::process::exit(1);\n\n }\n\n}\n\n\n\n\n", "file_path": "jissen/2nd/code3_1/examples/benchmark.rs", "rank": 91, "score": 141311.1760955921 }, { "content": "fn main() {\n\n if let Some(n) = env::args().nth(1) {\n\n let bits = u32::from_str(&n).expect(\"error parsing argument\");\n\n run_sorts(bits);\n\n \n\n run_sorts2(bits);\n\n } else {\n\n eprintln!(\n\n \"Usage {} <number of elements in bits>\",\n\n env::args().nth(0).unwrap()\n\n );\n\n std::process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "jissen/1st/code3/bitonic-sorter/examples/benchmark.rs", "rank": 92, "score": 138612.0830933447 }, { "content": "fn _print_point3(p: impl Coordinates) {\n\n let p = p.to_cartesian();\n\n println!(\"x = {}, y = {}\", p.x, p.y);\n\n}\n\n\n", "file_path": "jissen/1st/code8/src/main.rs", "rank": 93, "score": 134202.93737528296 }, { "content": "fn _print_point4<P>(p: &P)\n\nwhere\n\n P: Coordinates + Clone,\n\n{\n\n let p = p.clone().to_cartesian();\n\n println!(\"x = {}, y = {}\", p.x, p.y);\n\n}\n", "file_path": "jissen/1st/code8/src/main.rs", "rank": 94, "score": 134202.93737528296 }, { "content": "fn _print_point2<P>(p: P)\n\nwhere\n\n P: Coordinates,\n\n{\n\n let p = p.to_cartesian();\n\n println!(\"x = {}, y = {}\", p.x, p.y);\n\n}\n\n\n", "file_path": "jissen/1st/code8/src/main.rs", "rank": 95, "score": 134202.93737528296 }, { "content": "#[get(\"/\")]\n\npub fn index() -> &'static str {\n\n \"Hello, world\"\n\n}\n\n\n", "file_path": "rocket-webapi/src/routes.rs", "rank": 96, "score": 134021.89506065063 }, { "content": "fn print_point<P: Coordinates>(p: P) {\n\n let p = p.to_cartesian();\n\n println!(\"x = {}, y = {}\", p.x, p.y);\n\n}\n\n\n", "file_path": "jissen/1st/code8/src/main.rs", "rank": 97, "score": 131366.92757300846 }, { "content": "fn main() -> Result<(), Box<std::error::Error>> {\n\n let tarou = Person { name: \"太郎\".to_string(), age: 18 };\n\n let json = serde_json::to_string(&tarou)?;\n\n println!(\"{}\", json);\n\n\n\n Ok(())\n\n}", "file_path": "rustapi/sample/json/src/main.rs", "rank": 98, "score": 131190.66007252224 }, { "content": "struct Vertex(i32, i32);\n\n\n\n// struct UniqueValue;\n\n\n", "file_path": "jissen/1st/code5/src/main.rs", "rank": 99, "score": 129014.6198589866 } ]
Rust
src/lt.rs
Others/fountain_codes
798ea7240b1714958a5cf7e5af0f0c2048ce011e
use std::cmp; use std::collections::{HashMap, HashSet}; use std::fmt::{self, Debug, Formatter}; use std::hash::{Hash, Hasher}; use std::io::{self, Cursor}; use std::ops::{BitXor, BitXorAssign, Index}; use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt}; use super::{Client, CreationError, Data, Decoder, Encoder, Metadata, Packet, PartialEncoder, Source}; use super::distributions::{Distribution, RobustSolitonDistribution}; const DEFAULT_FAILURE_PROBABILITY: f64 = 0.1; const DEFAULT_HINT_CONSTANT: f64 = 0.3; pub struct LtSource { blocks: Vec<Block>, distribution: Distribution } impl Source<LtPacket> for LtSource { fn new(metadata: Metadata, data: Data) -> Result<Self, CreationError> { let data_bytes = metadata.data_bytes(); if data_bytes == 0 { return Err(CreationError::DataZeroBytes); } if data_bytes != data.len() as u64 { return Err(CreationError::InvalidMetadata); } let extra_block = cmp::min((data_bytes % BLOCK_BYTES as u64), 1); let block_count = (data_bytes / (BLOCK_BYTES as u64)) + extra_block; if block_count > (u32::max_value() as u64) { return Err(CreationError::DataTooBig) } let mut blocks: Vec<Block> = Vec::with_capacity(block_count as usize); for chunk in data.chunks(BLOCK_BYTES) { let mut block = [0; BLOCK_BYTES]; for i in 0..chunk.len() { block[i] = chunk[i]; } blocks.push(Block::from_data(block)); } let density_function = RobustSolitonDistribution::new_using_heuristic(DEFAULT_FAILURE_PROBABILITY, DEFAULT_HINT_CONSTANT); let distribution = Distribution::new(&density_function, block_count as u32).map_err(|e| CreationError::RandomInitializationError(e))?; Ok(LtSource{ blocks: blocks, distribution: distribution }) } } fn choose_blocks_to_combine(distribution: &Distribution, blocks: &mut Vec<u32>) { let blocks_to_combine = cmp::min(blocks.len(), distribution.query() as usize); for i in 0..blocks_to_combine { let j = distribution.query_interior_rng_usize(i, blocks.len()); blocks.swap(i, j); } blocks.truncate(blocks_to_combine as usize); } impl Encoder<LtPacket> for LtSource { fn create_packet(&self) -> LtPacket { let block_count = self.blocks.len(); let mut blocks: Vec<u32> = Vec::with_capacity(block_count); for i in 0..block_count{ blocks.push(i as u32); } choose_blocks_to_combine(&self.distribution, &mut blocks); let mut new_block = Block::new(); for block_id in &blocks { new_block ^= self.blocks.index(*block_id as usize); } LtPacket::new(blocks, new_block) } } #[derive(Debug)] pub struct LtClient { metadata: Metadata, block_count: u32, distribution: Distribution, decoded_blocks: HashMap<u32, Block>, stale_packets: HashSet<LtPacket> } impl Client<LtPacket> for LtClient { fn new(metadata: Metadata) -> Result<Self, CreationError> { let data_bytes = metadata.data_bytes(); if data_bytes == 0 { return Err(CreationError::DataZeroBytes) } let extra_block = cmp::min((data_bytes % BLOCK_BYTES as u64), 1); let block_count = (data_bytes / (BLOCK_BYTES as u64)) + extra_block; if block_count > (u32::max_value() as u64) { return Err(CreationError::DataTooBig) } let density_function = RobustSolitonDistribution::new_using_heuristic(DEFAULT_FAILURE_PROBABILITY, DEFAULT_HINT_CONSTANT); let distribution = Distribution::new(&density_function, block_count as u32).map_err(|e| CreationError::RandomInitializationError(e))?; Ok(LtClient { metadata: metadata, block_count: block_count as u32, distribution: distribution, decoded_blocks: HashMap::new(), stale_packets: HashSet::new() }) } } impl PartialEncoder<LtPacket> for LtClient { fn try_create_packet(&self) -> Option<LtPacket> { let mut blocks: Vec<u32> = Vec::with_capacity(self.decoded_blocks.len()); for &key in self.decoded_blocks.keys() { blocks.push(key); } if blocks.len() == 0 { return None; } choose_blocks_to_combine(&self.distribution, &mut blocks); let mut new_block = Block::new(); for block_id in &blocks { new_block = new_block ^ self.decoded_blocks.index(block_id); } return Some(LtPacket::new(blocks, new_block)); } } impl Decoder<LtPacket> for LtClient { fn receive_packet(&mut self, packet: LtPacket) { let mut fresh_packets: Vec<LtPacket> = vec![packet]; while let Some(packet) = fresh_packets.pop() { let mut xor: Vec<u32> = Vec::with_capacity(packet.combined_blocks.len()); let mut multiple_remaining = false; let mut remainder: Option<u32> = None; for block_id in &packet.combined_blocks { if self.decoded_blocks.contains_key(&block_id) { xor.push(*block_id); } else { remainder = match remainder { Option::None => { Some(*block_id) } Option::Some(remainder) => { multiple_remaining = true; Some(remainder) } }; if multiple_remaining { break; } } } if multiple_remaining || remainder.is_none(){ self.stale_packets.insert(packet); }else { let block_id = remainder.unwrap(); if !self.decoded_blocks.contains_key(&block_id) { let mut data = packet.data; for block_id in xor { data = data ^ self.decoded_blocks.get(&block_id).expect("Blocks selected to be xor'd must exist"); } self.decoded_blocks.insert(block_id, data); let mut refreshed_packets: Vec<LtPacket> = Vec::new(); for stale_packet in &self.stale_packets { if stale_packet.combined_blocks.contains(&block_id) { refreshed_packets.push(stale_packet.clone()); } } for packet in refreshed_packets { self.stale_packets.remove(&packet); fresh_packets.push(packet); } } } } } fn get_result(&self) -> Option<Data> { if self.decoded_blocks.len() < self.block_count as usize { return None; } let mut block_bytes: Vec<u8> = Vec::with_capacity(self.metadata.data_bytes() as usize); for i in 0..self.block_count { let block_option = self.decoded_blocks.get(&i); if block_option.is_none() { return None; } block_bytes.extend_from_slice(block_option.unwrap().data()); } block_bytes.truncate(self.metadata.data_bytes() as usize); Some(block_bytes) } fn decoding_progress(&self) -> f64 { (self.decoded_blocks.len() as f64) / (self.block_count as f64) } } const BLOCK_BYTES: usize = 1024; struct Block { data: [u8; BLOCK_BYTES] } impl Block { fn new() -> Block { Block { data: [0; BLOCK_BYTES] } } fn from_data(data: [u8; BLOCK_BYTES]) -> Block { Block { data: data } } fn data(&self) -> &[u8] { &self.data[..] } } impl<'a> BitXorAssign<&'a Block> for Block { fn bitxor_assign(&mut self, rhs: &'a Block) { for i in 0..BLOCK_BYTES { self.data[i] ^= rhs.data[i] } } } impl<'a> BitXor<&'a Block> for Block { type Output = Self; fn bitxor(self, rhs: &'a Block) -> Self { let mut result = self; result ^= rhs; return result; } } impl Clone for Block { fn clone(&self) -> Self { Block { data: self.data } } } impl Debug for Block { fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { fmt.write_str(&format!("{:?}", &self.data[..])) } } impl PartialEq for Block { fn eq(&self, other: &Self) -> bool { &self.data[..] == &other.data[..] } } impl Eq for Block {} impl Hash for Block { fn hash<H: Hasher>(&self, state: &mut H) { state.write(&self.data[..]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct LtPacket { combined_blocks: Vec<u32>, data: Block } impl LtPacket { fn new(combined_blocks: Vec<u32>, data: Block) -> LtPacket { LtPacket { combined_blocks: combined_blocks, data: data } } } impl Packet for LtPacket { fn from_bytes(bytes: Vec<u8>) -> io::Result<LtPacket> { let mut rdr = Cursor::new(bytes); let block_count = rdr.read_u32::<BigEndian>()?; let mut combined_blocks = Vec::new(); for _ in 0..block_count { let block = rdr.read_u32::<BigEndian>()?; combined_blocks.push(block); } let mut block_data = [0; BLOCK_BYTES]; for i in 0..BLOCK_BYTES { block_data[i] = rdr.read_u8()?; } let block = Block::from_data(block_data); Ok(LtPacket::new(combined_blocks, block)) } fn to_bytes(&self) -> io::Result<Vec<u8>> { let mut dest = Vec::new(); dest.write_u32::<BigEndian>(self.combined_blocks.len() as u32)?; for block in &self.combined_blocks { dest.write_u32::<BigEndian>(*block)?; } for byte in self.data.data() { dest.write_u8(*byte)?; } Ok(dest) } } #[cfg(test)] mod tests { use super::super::Packet; use super::{BLOCK_BYTES, Block, LtPacket}; #[test] fn block_equals() { assert_eq!(Block::new() ^ &Block::new(), Block::new()); let one_block = Block::from_data([1; BLOCK_BYTES]); assert_eq!(one_block.clone() ^ &Block::new(), one_block); } #[test] fn packet_round_trips() { let combined_blocks = vec![1, 2, 3, 4, 5]; let block_data = [0; BLOCK_BYTES]; let packet = LtPacket::new(combined_blocks.clone(), Block::from_data(block_data).clone()); let bytes = packet.clone().to_bytes().unwrap(); assert_eq!(LtPacket::from_bytes(bytes).unwrap(), packet); } }
use std::cmp; use std::collections::{HashMap, HashSet}; use std::fmt::{self, Debug, Formatter}; use std::hash::{Hash, Hasher}; use std::io::{self, Cursor}; use std::ops::{BitXor, BitXorAssign, Index}; use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt}; use super::{Client, CreationError, Data, Decoder, Encoder, Metadata, Packet, PartialEncoder, Source}; use super::distributions::{Distribution, RobustSolitonDistribution}; const DEFAULT_FAILURE_PROBABILITY: f64 = 0.1; const DEFAULT_HINT_CONSTANT: f64 = 0.3; pub struct LtSource { blocks: Vec<Block>, distribution: Distribution } impl Source<LtPacket> for LtSource { fn new(metadata: Metadata, data: Data) -> Result<Self, CreationError> { let data_bytes = metadata.data_bytes(); if data_bytes == 0 { return Err(CreationError::DataZeroBytes); } if data_bytes != data.len() as u64 { return Err(CreationError::InvalidMetadata); } let extra_block = cmp::min((data_bytes % BLOCK_BYTES as u64), 1); let block_count = (data_bytes / (BLOCK_BYTES as u64)) + extra_block; if block_count > (u32::max_value() as u64) { return Err(CreationError::DataTooBig) } let mut blocks: Vec<Block> = Vec::with_capacity(block_count as usize); for chunk in data.chunks(BLOCK_BYTES) { let mut block = [0; BLOCK_BYTES]; for i in 0..chunk.len() { block[i] = chunk[i]; } blocks.push(Block::from_data(block)); } let density_function = RobustSolitonDistribution::new_using_heuristic(DEFAULT_FAILURE_PROBABILITY, DEFAULT_HINT_CONSTANT); let distribution = Distribution::new(&density_function, block_count as u32).map_err(|e| CreationError::RandomInitializationError(e))?; Ok(LtSource{ blocks: blocks, distribution: distribution }) } } fn choose_blocks_to_combine(distribution: &Distribution, blocks: &mut Vec<u32>) { let blocks_to_combine = cmp::min(blocks.len(), distribution.query() as usize); fo
impl Encoder<LtPacket> for LtSource { fn create_packet(&self) -> LtPacket { let block_count = self.blocks.len(); let mut blocks: Vec<u32> = Vec::with_capacity(block_count); for i in 0..block_count{ blocks.push(i as u32); } choose_blocks_to_combine(&self.distribution, &mut blocks); let mut new_block = Block::new(); for block_id in &blocks { new_block ^= self.blocks.index(*block_id as usize); } LtPacket::new(blocks, new_block) } } #[derive(Debug)] pub struct LtClient { metadata: Metadata, block_count: u32, distribution: Distribution, decoded_blocks: HashMap<u32, Block>, stale_packets: HashSet<LtPacket> } impl Client<LtPacket> for LtClient { fn new(metadata: Metadata) -> Result<Self, CreationError> { let data_bytes = metadata.data_bytes(); if data_bytes == 0 { return Err(CreationError::DataZeroBytes) } let extra_block = cmp::min((data_bytes % BLOCK_BYTES as u64), 1); let block_count = (data_bytes / (BLOCK_BYTES as u64)) + extra_block; if block_count > (u32::max_value() as u64) { return Err(CreationError::DataTooBig) } let density_function = RobustSolitonDistribution::new_using_heuristic(DEFAULT_FAILURE_PROBABILITY, DEFAULT_HINT_CONSTANT); let distribution = Distribution::new(&density_function, block_count as u32).map_err(|e| CreationError::RandomInitializationError(e))?; Ok(LtClient { metadata: metadata, block_count: block_count as u32, distribution: distribution, decoded_blocks: HashMap::new(), stale_packets: HashSet::new() }) } } impl PartialEncoder<LtPacket> for LtClient { fn try_create_packet(&self) -> Option<LtPacket> { let mut blocks: Vec<u32> = Vec::with_capacity(self.decoded_blocks.len()); for &key in self.decoded_blocks.keys() { blocks.push(key); } if blocks.len() == 0 { return None; } choose_blocks_to_combine(&self.distribution, &mut blocks); let mut new_block = Block::new(); for block_id in &blocks { new_block = new_block ^ self.decoded_blocks.index(block_id); } return Some(LtPacket::new(blocks, new_block)); } } impl Decoder<LtPacket> for LtClient { fn receive_packet(&mut self, packet: LtPacket) { let mut fresh_packets: Vec<LtPacket> = vec![packet]; while let Some(packet) = fresh_packets.pop() { let mut xor: Vec<u32> = Vec::with_capacity(packet.combined_blocks.len()); let mut multiple_remaining = false; let mut remainder: Option<u32> = None; for block_id in &packet.combined_blocks { if self.decoded_blocks.contains_key(&block_id) { xor.push(*block_id); } else { remainder = match remainder { Option::None => { Some(*block_id) } Option::Some(remainder) => { multiple_remaining = true; Some(remainder) } }; if multiple_remaining { break; } } } if multiple_remaining || remainder.is_none(){ self.stale_packets.insert(packet); }else { let block_id = remainder.unwrap(); if !self.decoded_blocks.contains_key(&block_id) { let mut data = packet.data; for block_id in xor { data = data ^ self.decoded_blocks.get(&block_id).expect("Blocks selected to be xor'd must exist"); } self.decoded_blocks.insert(block_id, data); let mut refreshed_packets: Vec<LtPacket> = Vec::new(); for stale_packet in &self.stale_packets { if stale_packet.combined_blocks.contains(&block_id) { refreshed_packets.push(stale_packet.clone()); } } for packet in refreshed_packets { self.stale_packets.remove(&packet); fresh_packets.push(packet); } } } } } fn get_result(&self) -> Option<Data> { if self.decoded_blocks.len() < self.block_count as usize { return None; } let mut block_bytes: Vec<u8> = Vec::with_capacity(self.metadata.data_bytes() as usize); for i in 0..self.block_count { let block_option = self.decoded_blocks.get(&i); if block_option.is_none() { return None; } block_bytes.extend_from_slice(block_option.unwrap().data()); } block_bytes.truncate(self.metadata.data_bytes() as usize); Some(block_bytes) } fn decoding_progress(&self) -> f64 { (self.decoded_blocks.len() as f64) / (self.block_count as f64) } } const BLOCK_BYTES: usize = 1024; struct Block { data: [u8; BLOCK_BYTES] } impl Block { fn new() -> Block { Block { data: [0; BLOCK_BYTES] } } fn from_data(data: [u8; BLOCK_BYTES]) -> Block { Block { data: data } } fn data(&self) -> &[u8] { &self.data[..] } } impl<'a> BitXorAssign<&'a Block> for Block { fn bitxor_assign(&mut self, rhs: &'a Block) { for i in 0..BLOCK_BYTES { self.data[i] ^= rhs.data[i] } } } impl<'a> BitXor<&'a Block> for Block { type Output = Self; fn bitxor(self, rhs: &'a Block) -> Self { let mut result = self; result ^= rhs; return result; } } impl Clone for Block { fn clone(&self) -> Self { Block { data: self.data } } } impl Debug for Block { fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { fmt.write_str(&format!("{:?}", &self.data[..])) } } impl PartialEq for Block { fn eq(&self, other: &Self) -> bool { &self.data[..] == &other.data[..] } } impl Eq for Block {} impl Hash for Block { fn hash<H: Hasher>(&self, state: &mut H) { state.write(&self.data[..]) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct LtPacket { combined_blocks: Vec<u32>, data: Block } impl LtPacket { fn new(combined_blocks: Vec<u32>, data: Block) -> LtPacket { LtPacket { combined_blocks: combined_blocks, data: data } } } impl Packet for LtPacket { fn from_bytes(bytes: Vec<u8>) -> io::Result<LtPacket> { let mut rdr = Cursor::new(bytes); let block_count = rdr.read_u32::<BigEndian>()?; let mut combined_blocks = Vec::new(); for _ in 0..block_count { let block = rdr.read_u32::<BigEndian>()?; combined_blocks.push(block); } let mut block_data = [0; BLOCK_BYTES]; for i in 0..BLOCK_BYTES { block_data[i] = rdr.read_u8()?; } let block = Block::from_data(block_data); Ok(LtPacket::new(combined_blocks, block)) } fn to_bytes(&self) -> io::Result<Vec<u8>> { let mut dest = Vec::new(); dest.write_u32::<BigEndian>(self.combined_blocks.len() as u32)?; for block in &self.combined_blocks { dest.write_u32::<BigEndian>(*block)?; } for byte in self.data.data() { dest.write_u8(*byte)?; } Ok(dest) } } #[cfg(test)] mod tests { use super::super::Packet; use super::{BLOCK_BYTES, Block, LtPacket}; #[test] fn block_equals() { assert_eq!(Block::new() ^ &Block::new(), Block::new()); let one_block = Block::from_data([1; BLOCK_BYTES]); assert_eq!(one_block.clone() ^ &Block::new(), one_block); } #[test] fn packet_round_trips() { let combined_blocks = vec![1, 2, 3, 4, 5]; let block_data = [0; BLOCK_BYTES]; let packet = LtPacket::new(combined_blocks.clone(), Block::from_data(block_data).clone()); let bytes = packet.clone().to_bytes().unwrap(); assert_eq!(LtPacket::from_bytes(bytes).unwrap(), packet); } }
r i in 0..blocks_to_combine { let j = distribution.query_interior_rng_usize(i, blocks.len()); blocks.swap(i, j); } blocks.truncate(blocks_to_combine as usize); }
function_block-function_prefixed
[ { "content": "pub trait Source<P: Packet> : Encoder<P> + Sized {\n\n fn new(metadata: Metadata, data: Data) -> Result<Self, CreationError>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 67255.24559277261 }, { "content": "pub trait Decoder<P: Packet> {\n\n fn receive_packet(&mut self, packet: P);\n\n\n\n fn decoding_progress(&self) -> f64;\n\n\n\n fn get_result(&self) -> Option<Data>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 65225.4612614593 }, { "content": "pub trait Encoder<P: Packet> {\n\n fn create_packet(&self) -> P;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 65027.77068084592 }, { "content": "pub trait PartialEncoder<P: Packet> {\n\n fn try_create_packet(&self) -> Option<P>;\n\n}\n\n\n\nimpl<P: Packet> PartialEncoder<P> for Encoder<P> {\n\n fn try_create_packet(&self) -> Option<P> {\n\n Some(self.create_packet())\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 62517.72185856068 }, { "content": "// TODO: Figure out if Clients should be generic over some sort of \"parameter\" type\n\npub trait Client<P: Packet> : Decoder<P> + PartialEncoder<P> + Sized {\n\n fn new(metadata: Metadata) -> Result<Self, CreationError>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum CreationError {\n\n DataZeroBytes,\n\n DataTooBig,\n\n InvalidMetadata,\n\n RandomInitializationError(io::Error)\n\n}", "file_path": "src/lib.rs", "rank": 6, "score": 58500.61267048745 }, { "content": "pub trait Packet: Sized {\n\n fn from_bytes(bytes: Vec<u8>) -> io::Result<Self>;\n\n\n\n fn to_bytes(&self) -> io::Result<Vec<u8>>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 53605.78385996136 }, { "content": "// Define various ProbabilityDensityFunctions\n\npub trait ProbabilityDensityFunction {\n\n fn density(&self, point: u32, limit: u32) -> f64;\n\n}\n\n\n\npub struct IdealSolitonDistribution;\n\n\n\nimpl ProbabilityDensityFunction for IdealSolitonDistribution {\n\n fn density(&self, point: u32, limit: u32) -> f64 {\n\n if point == 0 || point > limit {\n\n panic!(\"Point must be in the range (0, limit], but was really {}! (the limit was {})\", point, limit);\n\n }else if point == 1 {\n\n 1.0 / (limit as f64)\n\n } else {\n\n 1.0 / ((point as f64) * (point as f64 - 1.0))\n\n }\n\n }\n\n}\n\n\n\npub struct RobustSolitonDistribution {\n\n failure_probability: f64,\n", "file_path": "src/distributions.rs", "rank": 8, "score": 41185.80807719772 }, { "content": "fn random_bytes(byte_count: usize) -> Vec<u8> {\n\n let mut result: Vec<u8> = Vec::with_capacity(byte_count);\n\n while result.len() < byte_count {\n\n result.push(rand::random());\n\n }\n\n result\n\n}", "file_path": "tests/lt_coding_test.rs", "rank": 9, "score": 33716.620038223846 }, { "content": "// TODO: Make Data more generic\n\ntype Data = Vec<u8>;\n\n\n", "file_path": "src/lib.rs", "rank": 10, "score": 29402.544079463347 }, { "content": "#[test]\n\nfn test_lt_coding_medium() {\n\n let byte_count: usize = 15 * 1024 * 1024;\n\n\n\n let metadata = Metadata::new(byte_count as u64);\n\n let data = random_bytes(byte_count);\n\n\n\n let source: LtSource = LtSource::new(metadata, data).unwrap();\n\n let mut client: LtClient = LtClient::new(metadata).unwrap();\n\n\n\n // Going over a 100000 packets means the decoding almost certainly failed\n\n for _ in 0..100000 {\n\n let packet = source.create_packet();\n\n client.receive_packet(packet);\n\n\n\n println!(\"Decoding progress {}\", client.decoding_progress());\n\n if client.get_result().is_some() {\n\n return;\n\n }\n\n }\n\n assert!(client.get_result().is_some());\n\n}\n\n\n\n\n", "file_path": "tests/lt_coding_test.rs", "rank": 11, "score": 28490.853292799144 }, { "content": "#[test]\n\nfn test_lt_coding_small() {\n\n let byte_count: usize = 100;\n\n\n\n let metadata = Metadata::new(byte_count as u64);\n\n let data = random_bytes(byte_count);\n\n\n\n let source: LtSource = LtSource::new(metadata, data.clone()).unwrap();\n\n let mut client: LtClient = LtClient::new(metadata).unwrap();\n\n\n\n let packet = source.create_packet();\n\n println!(\"Packet {:?}\", packet);\n\n\n\n println!(\"Client pre-packet {:?}\", client);\n\n client.receive_packet(packet);\n\n println!(\"Client post-packet {:?}\", client);\n\n\n\n let result = client.get_result().expect(\"One packet should be enough to transmit a small ammount of data...\");\n\n assert_eq!(result, data);\n\n}\n\n\n\n// Bench:\n\n// When DEFAULT_FAILURE_PROBABILITY = 0.01 & DEFAULT_HINT_CONSTANT = 0.3\n\n// Finished after 21339 iterations\n\n// When DEFAULT_FAILURE_PROBABILITY = 0.1 & DEFAULT_HINT_CONSTANT = 0.3\n\n// Finished after 19533, 19680 iterations\n", "file_path": "tests/lt_coding_test.rs", "rank": 12, "score": 28490.853292799144 }, { "content": "// TODO: Add fingerprint to Metadata\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Metadata {\n\n data_bytes: u64\n\n}\n\n\n\nimpl Metadata {\n\n pub fn new(data_bytes: u64) -> Metadata {\n\n Metadata {\n\n data_bytes: data_bytes\n\n }\n\n }\n\n\n\n pub fn data_bytes(&self) -> u64 {\n\n self.data_bytes\n\n }\n\n}", "file_path": "src/metadata.rs", "rank": 13, "score": 20360.071865749513 }, { "content": "use std::cell::Cell;\n\nuse std::fmt::{self, Debug, Formatter};\n\nuse std::io;\n\n\n\nuse rand::{Rng, StdRng};\n\n\n\npub struct Distribution {\n\n limit: u32,\n\n // TODO: Figure out how to get rid of interior mutability\n\n rng: Cell<StdRng>,\n\n // TODO: Decide if there should be a limit to the size of the table, so we don't use a massive amount of memory on large limits\n\n cumulative_probability_table: Vec<f64>\n\n}\n\n\n\nimpl Distribution {\n\n pub fn new(density_function: &ProbabilityDensityFunction, limit: u32) -> io::Result<Distribution> {\n\n let rng = StdRng::new()?;\n\n\n\n let mut lookup_table: Vec<f64> = Vec::with_capacity(limit as usize);\n\n lookup_table.push(0.0);\n", "file_path": "src/distributions.rs", "rank": 14, "score": 18829.295618726283 }, { "content": " result\n\n }\n\n}\n\n\n\nimpl Debug for Distribution {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n\n fmt.debug_struct(\"Distribution\")\n\n .field(\"limit\", &self.limit)\n\n .field(\"rng\", &\"StdRng\")\n\n .field(\"cumulative_probability_table\", &self.cumulative_probability_table)\n\n .finish()\n\n }\n\n}\n\n\n\n// Define various ProbabilityDensityFunctions\n", "file_path": "src/distributions.rs", "rank": 15, "score": 18828.065158645666 }, { "content": " expected_ripple_size: ExpectedRippleSize\n\n}\n\n\n\nimpl RobustSolitonDistribution {\n\n // TODO: Remove this allow\n\n #[allow(dead_code)]\n\n pub fn new(failure_probability: f64, expected_ripple_size: f64) -> RobustSolitonDistribution {\n\n RobustSolitonDistribution {\n\n failure_probability: failure_probability,\n\n expected_ripple_size: ExpectedRippleSize::Exactly(expected_ripple_size)\n\n }\n\n }\n\n\n\n pub fn new_using_heuristic(failure_probability: f64, hint_constant: f64) -> RobustSolitonDistribution {\n\n RobustSolitonDistribution {\n\n failure_probability: failure_probability,\n\n expected_ripple_size: ExpectedRippleSize::BasedOnHeuristic(hint_constant)\n\n }\n\n }\n\n\n", "file_path": "src/distributions.rs", "rank": 16, "score": 18825.874633889933 }, { "content": " }\n\n }\n\n\n\n panic!(\"Cumulative probabilities don't sum to 1! (limit is {}, probability table is {:?})\", self.limit, self.cumulative_probability_table)\n\n }\n\n\n\n // TODO: Exposing this method is an ugly hack that should be removed\n\n pub fn query_interior_rng_usize(&self, start: usize, end: usize) -> usize {\n\n let mut rng = self.rng.get();\n\n let result = rng.gen_range(start, end);\n\n self.rng.set(rng);\n\n\n\n result\n\n }\n\n\n\n fn query_interior_rng_float(&self) -> f64 {\n\n let mut rng = self.rng.get();\n\n let result = rng.next_f64();\n\n self.rng.set(rng);\n\n\n", "file_path": "src/distributions.rs", "rank": 17, "score": 18825.194156038782 }, { "content": "\n\n let mut cumulative_probability = 0.0;\n\n for i in 1..(limit + 1) {\n\n cumulative_probability += density_function.density(i, limit);\n\n lookup_table.push(cumulative_probability);\n\n }\n\n\n\n Ok(Distribution {\n\n limit: limit,\n\n rng: Cell::new(rng),\n\n cumulative_probability_table: lookup_table\n\n })\n\n }\n\n\n\n pub fn query(&self) -> u32 {\n\n let selector = self.query_interior_rng_float();\n\n\n\n for i in 1..(self.limit + 1) {\n\n if selector < self.cumulative_probability_table[i as usize] {\n\n return i;\n", "file_path": "src/distributions.rs", "rank": 18, "score": 18823.560715370415 }, { "content": " }else if point == switch_point {\n\n (expected_ripple_size * (expected_ripple_size / failure_probability).ln()) / (limit as f64)\n\n }else {\n\n 0.0\n\n }\n\n }\n\n}\n\n\n\nimpl ProbabilityDensityFunction for RobustSolitonDistribution {\n\n fn density(&self, point: u32, limit: u32) -> f64 {\n\n if point == 0 || point > limit {\n\n panic!(\"Point must be in the range (0, limit], but was really {}! (the limit was {})\", point, limit);\n\n }\n\n // Special case this to prevent normally good values of expected_ripple_size from failing\n\n if limit == 1 {\n\n 1.0\n\n } else {\n\n (IdealSolitonDistribution.density(point, limit) +\n\n self.robustness_probability_to_add(point, limit)\n\n ) / self.normalization_factor(limit)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/distributions.rs", "rank": 19, "score": 18822.673028832676 }, { "content": " // Helper methods for the density calculation\n\n fn normalization_factor(&self, limit: u32) -> f64{\n\n let mut normalization_factor = 0.0;\n\n for i in 1..(limit + 1) {\n\n normalization_factor += IdealSolitonDistribution.density(i, limit);\n\n normalization_factor += self.robustness_probability_to_add(i, limit);\n\n }\n\n normalization_factor\n\n }\n\n\n\n fn robustness_probability_to_add(&self, point: u32, limit: u32) -> f64{\n\n let failure_probability = self.failure_probability;\n\n let expected_ripple_size = self.expected_ripple_size.get(limit, self.failure_probability);\n\n\n\n let switch_point = (limit as f64 / expected_ripple_size) as u32;\n\n\n\n if point == 0 || point > limit {\n\n panic!(\"Point must be in the range (0, limit], but was really {}! (the limit was {})\", point, limit);\n\n }else if point < switch_point {\n\n expected_ripple_size / ((point * limit) as f64)\n", "file_path": "src/distributions.rs", "rank": 20, "score": 18822.334115266753 }, { "content": "\n\n\n\n// TODO: Replace the distribution tests\n\n//#[cfg(test)]\n\n//mod test {\n\n// use super::{ideal_soliton_probability_density, robust_soliton_probability_density, expected_ripple_size_heuristic};\n\n//\n\n// #[test]\n\n// fn check_ideal_soliton_for_small_values() {\n\n// assert_eq!(ideal_soliton_probability_density(1, 10), 0.1);\n\n//\n\n// assert_eq!(ideal_soliton_probability_density(2, 10), 0.5);\n\n//\n\n// assert_eq!(ideal_soliton_probability_density(3, 10), 1.0/6.0);\n\n// }\n\n//\n\n// #[test]\n\n// fn robust_soliton_sanity_test() {\n\n// let limit = 100;\n\n// let failure_probability = 0.1;\n", "file_path": "src/distributions.rs", "rank": 21, "score": 18820.867838578193 }, { "content": "// let hint_constant = 0.1;\n\n//\n\n// let expected_ripple_size = expected_ripple_size_heuristic(limit, failure_probability, hint_constant);\n\n//\n\n// let mut cumulative_probability = 0.0;\n\n// for i in 1..20 {\n\n// cumulative_probability += robust_soliton_probability_density(i, limit, failure_probability, expected_ripple_size);\n\n// }\n\n// println!(\"Cumulative probability is {}\", cumulative_probability);\n\n// assert!(cumulative_probability > 0.9);\n\n// }\n\n//}", "file_path": "src/distributions.rs", "rank": 22, "score": 18820.05172792996 }, { "content": "enum ExpectedRippleSize {\n\n // TODO: Remove this allow\n\n #[allow(dead_code)]\n\n Exactly(f64),\n\n BasedOnHeuristic(f64)\n\n}\n\n\n\nimpl ExpectedRippleSize {\n\n fn get(&self, limit: u32, failure_probability: f64) -> f64 {\n\n match self {\n\n &ExpectedRippleSize::Exactly(val) => {\n\n val\n\n }\n\n // TODO: Figure out if the hint_constant can sensibly be bigger than 1\n\n &ExpectedRippleSize::BasedOnHeuristic(hint_constant) => {\n\n hint_constant * (limit as f64 / failure_probability).ln() * (limit as f64).sqrt()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/distributions.rs", "rank": 23, "score": 15301.726467387462 }, { "content": "extern crate byteorder;\n\nextern crate rand;\n\n\n\nuse std::io;\n\n\n\nmod metadata;\n\npub use metadata::Metadata;\n\n\n\npub mod lt;\n\npub use lt::{LtClient, LtSource};\n\n\n\nmod distributions;\n\n\n\n// TODO: Make Data more generic\n", "file_path": "src/lib.rs", "rank": 32, "score": 11.021118787338438 }, { "content": "extern crate fountain_codes;\n\nextern crate rand;\n\n\n\nuse fountain_codes::{Metadata, Client, Source, Encoder, Decoder, LtSource, LtClient};\n\n\n\n#[test]\n", "file_path": "tests/lt_coding_test.rs", "rank": 39, "score": 9.120885437072664 } ]
Rust
src/xcb/xconn.rs
psychon/penrose
69912708f7982b7d6a61b9fa549040fb4c3625fb
/*! * API wrapper for talking to the X server using XCB * * The crate used by penrose for talking to the X server is rust-xcb, which * is a set of bindings for the C level XCB library that are autogenerated * from an XML spec. The XML files can be found * [here](https://github.com/rtbo/rust-xcb/tree/master/xml) and are useful * as reference for how the API works. Sections have been converted and added * to the documentation of the method calls and enums present in this module. * * [EWMH](https://specifications.freedesktop.org/wm-spec/wm-spec-1.3.html) * [Xlib manual](https://tronche.com/gui/x/xlib/) */ use crate::{ core::{ bindings::{KeyBindings, MouseBindings}, data_types::{Point, PropVal, Region, WinAttr, WinConfig, WinId, WinType}, manager::WindowManager, screen::Screen, xconnection::{ Atom, XConn, XEvent, AUTO_FLOAT_WINDOW_TYPES, EWMH_SUPPORTED_ATOMS, UNMANAGED_WINDOW_TYPES, }, }, xcb::{Api, XcbApi}, Result, }; use std::{collections::HashMap, str::FromStr}; const WM_NAME: &str = "penrose"; /** * Handles communication with an X server via the XCB library. * * XcbConnection is a minimal implementation that does not make use of the full asyc capabilities * of the underlying C XCB library. **/ #[derive(Debug)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct XcbConnection { api: Api, check_win: WinId, auto_float_types: Vec<u32>, dont_manage_types: Vec<u32>, } impl XcbConnection { pub fn new() -> Result<Self> { let api = Api::new()?; let auto_float_types: Vec<u32> = AUTO_FLOAT_WINDOW_TYPES .iter() .map(|a| api.known_atom(*a)) .collect(); let dont_manage_types: Vec<u32> = UNMANAGED_WINDOW_TYPES .iter() .map(|a| api.known_atom(*a)) .collect(); api.set_randr_notify_mask()?; let check_win = api.create_window(WinType::CheckWin, Region::new(0, 0, 1, 1), false)?; Ok(Self { api, check_win, auto_float_types, dont_manage_types, }) } fn window_has_type_in(&self, id: WinId, win_types: &[u32]) -> bool { if let Ok(atom) = self.api.get_atom_prop(id, Atom::NetWmWindowType) { return win_types.contains(&atom); } false } pub fn xcb_connection(&self) -> &xcb::Connection { &self.api.conn() } pub fn api(&self) -> &Api { &self.api } pub fn api_mut(&mut self) -> &mut Api { &mut self.api } pub fn known_atoms(&self) -> &HashMap<Atom, u32> { &self.api.known_atoms() } } impl WindowManager<XcbConnection> { pub fn xcb_connection(&self) -> &xcb::Connection { &self.conn().xcb_connection() } pub fn known_atoms(&self) -> &HashMap<Atom, u32> { &self.conn().known_atoms() } } impl XConn for XcbConnection { #[cfg(feature = "serde")] fn hydrate(&mut self) -> Result<()> { Ok(self.api.hydrate()?) } fn flush(&self) -> bool { self.api.flush() } fn wait_for_event(&self) -> Result<XEvent> { Ok(self.api.wait_for_event()?) } fn current_outputs(&self) -> Vec<Screen> { match self.api.current_screens() { Ok(screens) => screens, Err(e) => panic!("{}", e), } } fn cursor_position(&self) -> Point { self.api.cursor_position() } fn position_window(&self, id: WinId, reg: Region, border: u32, stack_above: bool) { let mut data = vec![WinConfig::Position(reg), WinConfig::BorderPx(border)]; if stack_above { data.push(WinConfig::StackAbove); } self.api.configure_window(id, &data) } fn raise_window(&self, id: WinId) { self.api.configure_window(id, &[WinConfig::StackAbove]) } fn mark_new_window(&self, id: WinId) { let data = &[WinAttr::ClientEventMask]; self.api.set_window_attributes(id, data) } fn map_window(&self, id: WinId) { self.api.map_window(id); } fn unmap_window(&self, id: WinId) { self.api.unmap_window(id); } fn send_client_event(&self, id: WinId, atom_name: &str) -> Result<()> { Ok(self.api.send_client_event(id, atom_name)?) } fn focused_client(&self) -> WinId { self.api.focused_client().unwrap_or(0) } fn focus_client(&self, id: WinId) { self.api.mark_focused_window(id); } fn set_client_border_color(&self, id: WinId, color: u32) { let data = &[WinAttr::BorderColor(color)]; self.api.set_window_attributes(id, data); } fn toggle_client_fullscreen(&self, id: WinId, client_is_fullscreen: bool) { let data = if client_is_fullscreen { 0 } else { self.api.known_atom(Atom::NetWmStateFullscreen) }; self.api .replace_prop(id, Atom::NetWmState, PropVal::Atom(&[data])); } fn grab_keys(&self, key_bindings: &KeyBindings<Self>, mouse_bindings: &MouseBindings<Self>) { self.api.grab_keys(&key_bindings.keys().collect::<Vec<_>>()); self.api.grab_mouse_buttons( &mouse_bindings .keys() .map(|(_, state)| state) .collect::<Vec<_>>(), ); let data = &[WinAttr::RootEventMask]; self.api.set_window_attributes(self.api.root(), data); self.flush(); } fn set_wm_properties(&self, workspaces: &[&str]) { let root = self.api.root(); for &win in &[self.check_win, root] { self.api.replace_prop( win, Atom::NetSupportingWmCheck, PropVal::Window(&[self.check_win]), ); let val = PropVal::Str(WM_NAME); self.api.replace_prop(win, Atom::WmName, val); } let supported = EWMH_SUPPORTED_ATOMS .iter() .map(|a| self.api.known_atom(*a)) .collect::<Vec<u32>>(); let prop = PropVal::Atom(&supported); self.api.replace_prop(root, Atom::NetSupported, prop); self.update_desktops(workspaces); self.api.delete_prop(root, Atom::NetClientList); } fn update_desktops(&self, workspaces: &[&str]) { let root = self.api.root(); self.api.replace_prop( root, Atom::NetNumberOfDesktops, PropVal::Cardinal(&[workspaces.len() as u32]), ); self.api.replace_prop( root, Atom::NetDesktopNames, PropVal::Str(&workspaces.join("\0")), ); } fn update_known_clients(&self, clients: &[WinId]) { self.api.replace_prop( self.api.root(), Atom::NetClientList, PropVal::Window(clients), ); self.api.replace_prop( self.api.root(), Atom::NetClientListStacking, PropVal::Window(clients), ); } fn set_current_workspace(&self, wix: usize) { self.api.replace_prop( self.api.root(), Atom::NetCurrentDesktop, PropVal::Cardinal(&[wix as u32]), ); } fn set_root_window_name(&self, root_name: &str) { self.api .replace_prop(self.api.root(), Atom::WmName, PropVal::Str(root_name)); } fn set_client_workspace(&self, id: WinId, workspace: usize) { self.api.replace_prop( id, Atom::NetWmDesktop, PropVal::Cardinal(&[workspace as u32]), ); } fn window_should_float(&self, id: WinId, floating_classes: &[&str]) -> bool { if let Ok(s) = self.str_prop(id, Atom::WmClass.as_ref()) { if s.split('\0').any(|c| floating_classes.contains(&c)) { return true; } } self.window_has_type_in(id, &self.auto_float_types) } fn is_managed_window(&self, id: WinId) -> bool { !self.window_has_type_in(id, &self.dont_manage_types) } fn window_geometry(&self, id: WinId) -> Result<Region> { Ok(self.api.window_geometry(id)?) } fn warp_cursor(&self, win_id: Option<WinId>, screen: &Screen) { let (x, y, id) = match win_id { Some(id) => { let (_, _, w, h) = match self.window_geometry(id) { Ok(region) => region.values(), Err(e) => { error!("error fetching window details while warping cursor: {}", e); return; } }; ((w / 2), (h / 2), id) } None => { let (x, y, w, h) = screen.region(true).values(); ((x + w / 2), (y + h / 2), self.api.root()) } }; self.api.warp_cursor(id, x as usize, y as usize); } fn query_for_active_windows(&self) -> Vec<WinId> { match self.api.current_clients() { Err(_) => Vec::new(), Ok(ids) => ids .iter() .filter(|&id| !self.window_has_type_in(*id, &self.dont_manage_types)) .cloned() .collect(), } } fn str_prop(&self, id: u32, name: &str) -> Result<String> { Ok(self.api.get_str_prop(id, name)?) } fn atom_prop(&self, id: u32, name: &str) -> Result<u32> { Ok(self.api.get_atom_prop(id, Atom::from_str(name)?)?) } fn intern_atom(&self, atom: &str) -> Result<u32> { Ok(self.api.atom(atom)?) } fn cleanup(&self) { self.api.ungrab_keys(); self.api.ungrab_mouse_buttons(); self.api.destroy_window(self.check_win); self.api.delete_prop(self.api.root(), Atom::NetActiveWindow); } }
/*! * API wrapper for talking to the X server using XCB * * The crate used by penrose for talking to the X server is rust-xcb, which * is a set of bindings for the C level XCB library that are autogenerated * from an XML spec. The XML files can be found * [here](https://github.com/rtbo/rust-xcb/tree/master/xml) and are useful * as reference for how the API works. Sections have been converted and added * to the documentation of the method calls and enums present in this module. * * [EWMH](https://specifications.freedesktop.org/wm-spec/wm-spec-1.3.html) * [Xlib manual](https://tronche.com/gui/x/xlib/) */ use crate::{ core::{ bindings::{KeyBindings, MouseBindings}, data_types::{Point, PropVal, Region, WinAttr, WinConfig, WinId, WinType}, manager::WindowManager, screen::Screen, xconnection::{ Atom, XConn, XEvent, AUTO_FLOAT_WINDOW_TYPES, EWMH_SUPPORTED_ATOMS, UNMANAGED_WINDOW_TYPES, }, }, xcb::{Api, XcbApi}, Result, }; use std::{collections::HashMap, str::FromStr}; const WM_NAME: &str = "penrose"; /** * Handles communication with an X server via the XCB library. * * XcbConnection is a minimal implementation that does not make use of the full asyc capabilities * of the underlying C XCB library. **/ #[derive(Debug)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct XcbConnection { api: Api, check_win: WinId, auto_float_types: Vec<u32>, dont_manage_types: Vec<u32>, } impl XcbConnection { pub fn new() -> Result<Self> { let api = Api::new()?; let auto_float_types: Vec<u32> = AUTO_FLOAT_WINDOW_TYPES .iter() .map(|a| api.known_atom(*a)) .collect(); let dont_manage_types: Vec<u32> = UNMANAGED_WINDOW_TYPES .iter() .map(|a| api.known_atom(*a)) .collect(); api.set_randr_notify_mask()?; let check_win = api.create_window(WinType::CheckWin, Region::new(0, 0, 1, 1), false)?; Ok(Self { api, check_win, auto_float_types, dont_manage_types, }) } fn window_has_type_in(&self, id: WinId, win_types: &[u32]) -> bool { if let Ok(atom) = self.api.get_atom_prop(id, Atom::NetWmWindowType) { return win_types.contains(&atom); } false } pub fn xcb_connection(&self) -> &xcb::Connection { &self.api.conn() } pub fn api(&self) -> &Api { &self.api } pub fn api_mut(&mut self) -> &mut Api { &mut self.api } pub fn known_atoms(&self) -> &HashMap<Atom, u32> { &self.api.known_atoms() } } impl WindowManager<XcbConnection> { pub fn xcb_connection(&self) -> &xcb::Connection { &self.conn().xcb_connection() } pub fn known_atoms(&self) -> &HashMap<Atom, u32> { &self.conn().known_atoms() } } impl XConn for XcbConnection { #[cfg(feature = "serde")] fn hydrate(&mut self) -> Result<()> { Ok(self.api.hydrate()?) } fn flush(&self) -> bool { self.api.flush() } fn wait_for_event(&self) -> Result<XEvent> { Ok(self.api.wait_for_event()?) } fn current_outputs(&self) -> Vec<Screen> { match self.api.current_screens() { Ok(screens) => screens, Err(e) => panic!("{}", e), } } fn cursor_position(&self) -> Point { self.api.cursor_position() } fn position_window(&self, id: WinId, reg: Region, border: u32, stack_above: bool) { let mut data = vec![WinConfig::Position(reg), WinConfig::BorderPx(border)]; if stack_above { data.push(WinConfig::StackAbove); } self.api.configure_window(id, &data) } fn raise_window(&self, id: WinId) { self.api.configure_window(id, &[WinConfig::StackAbove]) } fn mark_new_window(&self, id: WinId) { let data = &[WinAttr::ClientEventMask]; self.api.set_window_attributes(id, data) } fn map_window(&self, id: WinId) { self.api.map_window(id); } fn unmap_window(&self, id: WinId) { self.api.unmap_window(id); } fn send_client_event(&self, id: WinId, atom_name: &str) -> Result<()> { Ok(self.api.send_client_event(id, atom_name)?) } fn focused_client(&self) -> WinId { self.api.focused_client().unwrap_or(0) } fn focus_client(&self, id: WinId) { self.api.mark_focused_window(id); } fn set_client_border_color(&self, id: WinId, color: u32) { let data = &[WinAttr::BorderColor(color)]; self.api.set_window_attributes(id, data); } fn toggle_client_fullscreen(&self, id: WinId, client_is_fullscreen: bool) { let data = if client_is_fullscreen { 0 } else { self.api.known_atom(Atom::NetWmStateFullscreen) }; self.api .replace_prop(id, Atom::NetWmState, PropVal::Atom(&[data])); } fn grab_keys(&self, key_bindings: &KeyBindings<Self>, mouse_bindings: &MouseBindings<Self>) { self.api.grab_keys(&key_bindings.keys().collect::<Vec<_>>()); self.api.grab_mouse_buttons( &mouse_bindings .keys() .map(|(_, state)| state) .collect::<Vec<_>>(), ); let data = &[WinAttr::RootEventMask]; self.api.set_window_attributes(self.api.root(), data); self.flush(); } fn set_wm_properties(&self, workspaces: &[&str]) { let root = self.api.root(); for &win in &[self.check_win, root] { self.api.replace_prop( win, Atom::NetSupportingWmCheck, PropVal::Window(&[self.check_win]), ); let val = PropVal::Str(WM_NAME); self.api.replace_prop(win, Atom::WmName, val); } let supported = EWMH_SUPPORTED_ATOMS .iter() .map(|a| self.api.known_atom(*a)) .collect::<Vec<u32>>(); let prop = PropVal::Atom(&supported); self.api.replace_prop(root, Atom::NetSupported, prop); self.update_desktops(workspaces); self.api.delete_prop(root, Atom::NetClientList); } fn update_desktops(&self, workspaces: &[&str]) { let roo
fn update_known_clients(&self, clients: &[WinId]) { self.api.replace_prop( self.api.root(), Atom::NetClientList, PropVal::Window(clients), ); self.api.replace_prop( self.api.root(), Atom::NetClientListStacking, PropVal::Window(clients), ); } fn set_current_workspace(&self, wix: usize) { self.api.replace_prop( self.api.root(), Atom::NetCurrentDesktop, PropVal::Cardinal(&[wix as u32]), ); } fn set_root_window_name(&self, root_name: &str) { self.api .replace_prop(self.api.root(), Atom::WmName, PropVal::Str(root_name)); } fn set_client_workspace(&self, id: WinId, workspace: usize) { self.api.replace_prop( id, Atom::NetWmDesktop, PropVal::Cardinal(&[workspace as u32]), ); } fn window_should_float(&self, id: WinId, floating_classes: &[&str]) -> bool { if let Ok(s) = self.str_prop(id, Atom::WmClass.as_ref()) { if s.split('\0').any(|c| floating_classes.contains(&c)) { return true; } } self.window_has_type_in(id, &self.auto_float_types) } fn is_managed_window(&self, id: WinId) -> bool { !self.window_has_type_in(id, &self.dont_manage_types) } fn window_geometry(&self, id: WinId) -> Result<Region> { Ok(self.api.window_geometry(id)?) } fn warp_cursor(&self, win_id: Option<WinId>, screen: &Screen) { let (x, y, id) = match win_id { Some(id) => { let (_, _, w, h) = match self.window_geometry(id) { Ok(region) => region.values(), Err(e) => { error!("error fetching window details while warping cursor: {}", e); return; } }; ((w / 2), (h / 2), id) } None => { let (x, y, w, h) = screen.region(true).values(); ((x + w / 2), (y + h / 2), self.api.root()) } }; self.api.warp_cursor(id, x as usize, y as usize); } fn query_for_active_windows(&self) -> Vec<WinId> { match self.api.current_clients() { Err(_) => Vec::new(), Ok(ids) => ids .iter() .filter(|&id| !self.window_has_type_in(*id, &self.dont_manage_types)) .cloned() .collect(), } } fn str_prop(&self, id: u32, name: &str) -> Result<String> { Ok(self.api.get_str_prop(id, name)?) } fn atom_prop(&self, id: u32, name: &str) -> Result<u32> { Ok(self.api.get_atom_prop(id, Atom::from_str(name)?)?) } fn intern_atom(&self, atom: &str) -> Result<u32> { Ok(self.api.atom(atom)?) } fn cleanup(&self) { self.api.ungrab_keys(); self.api.ungrab_mouse_buttons(); self.api.destroy_window(self.check_win); self.api.delete_prop(self.api.root(), Atom::NetActiveWindow); } }
t = self.api.root(); self.api.replace_prop( root, Atom::NetNumberOfDesktops, PropVal::Cardinal(&[workspaces.len() as u32]), ); self.api.replace_prop( root, Atom::NetDesktopNames, PropVal::Str(&workspaces.join("\0")), ); }
function_block-function_prefixed
[ { "content": "fn process_property_notify(id: WinId, atom: String, is_root: bool) -> Vec<EventAction> {\n\n match Atom::from_str(&atom) {\n\n Ok(a) if [Atom::WmName, Atom::NetWmName].contains(&a) => {\n\n vec![EventAction::ClientNameChanged(id, is_root)]\n\n }\n\n _ => vec![EventAction::UnknownPropertyChange(id, atom, is_root)],\n\n }\n\n}\n", "file_path": "src/core/manager/event.rs", "rank": 0, "score": 266645.135895197 }, { "content": "pub fn test_bindings<X: XConn>() -> KeyBindings<X> {\n\n let mut bindings = HashMap::new();\n\n bindings.insert(\n\n EXIT_CODE,\n\n Box::new(|wm: &mut WindowManager<X>| wm.exit()) as KeyEventHandler<X>,\n\n );\n\n bindings.insert(\n\n LAYOUT_CHANGE_CODE,\n\n Box::new(|wm: &mut WindowManager<X>| wm.cycle_layout(Forward)) as KeyEventHandler<X>,\n\n );\n\n bindings.insert(\n\n WORKSPACE_CHANGE_CODE,\n\n Box::new(|wm: &mut WindowManager<X>| wm.focus_workspace(&Selector::Index(1)))\n\n as KeyEventHandler<X>,\n\n );\n\n bindings.insert(\n\n ADD_WORKSPACE_CODE,\n\n Box::new(|wm: &mut WindowManager<X>| wm.push_workspace(Workspace::new(\"new\", layouts())))\n\n as KeyEventHandler<X>,\n\n );\n", "file_path": "tests/common/mod.rs", "rank": 1, "score": 254552.51402660477 }, { "content": "fn process_enter_notify(state: WmState<'_>, id: WinId, rpt: Point) -> Vec<EventAction> {\n\n let mut actions = vec![\n\n EventAction::ClientFocusGained(id),\n\n EventAction::SetScreenFromPoint(Some(rpt)),\n\n ];\n\n\n\n if let Some(current) = state.focused_client {\n\n if current != id {\n\n actions.insert(0, EventAction::ClientFocusLost(current));\n\n }\n\n }\n\n\n\n actions\n\n}\n\n\n", "file_path": "src/core/manager/event.rs", "rank": 2, "score": 244835.99333391024 }, { "content": "fn process_map_request(state: WmState<'_>, id: WinId, ignore: bool) -> Vec<EventAction> {\n\n if ignore || state.client_map.contains_key(&id) {\n\n vec![]\n\n } else {\n\n vec![EventAction::MapWindow(id)]\n\n }\n\n}\n\n\n", "file_path": "src/core/manager/event.rs", "rank": 3, "score": 244577.34452223178 }, { "content": "/// A no-op floating layout that simply satisfies the type required for Layout\n\npub fn floating(_: &[&Client], _: Option<WinId>, _: &Region, _: u32, _: f32) -> Vec<ResizeAction> {\n\n vec![]\n\n}\n\n\n\nimpl Layout {\n\n /// Create a new Layout for a specific monitor\n\n pub fn new(\n\n symbol: impl Into<String>,\n\n conf: LayoutConf,\n\n f: LayoutFunc,\n\n max_main: u32,\n\n ratio: f32,\n\n ) -> Self {\n\n Self {\n\n symbol: symbol.into(),\n\n conf,\n\n max_main,\n\n ratio,\n\n f: Some(f),\n\n }\n", "file_path": "src/core/layout.rs", "rank": 4, "score": 244472.66386625596 }, { "content": "pub fn example_workspace(name: impl Into<String>, n_clients: u32) -> Workspace {\n\n let mut ws = Workspace::new(name, example_layouts());\n\n (0..n_clients).for_each(|n| ws.add_client(n, &InsertPoint::Last).unwrap());\n\n\n\n ws\n\n}\n\n\n", "file_path": "src/__example_helpers.rs", "rank": 5, "score": 218237.3030058707 }, { "content": "pub fn create_or_switch_to_workspace<X: XConn>(\n\n get_name: fn() -> Option<String>,\n\n layouts: Vec<Layout>,\n\n) -> KeyEventHandler<X> {\n\n Box::new(move |wm: &mut WindowManager<X>| {\n\n if let Some(s) = get_name() {\n\n let name = &s;\n\n let cond = |ws: &Workspace| ws.name() == name;\n\n let sel = Selector::Condition(&cond);\n\n if wm.workspace(&sel).is_none() {\n\n wm.push_workspace(Workspace::new(name, layouts.clone()))?;\n\n }\n\n wm.focus_workspace(&sel)\n\n } else {\n\n Ok(())\n\n }\n\n })\n\n}\n\n\n\n/**\n\n * Focus a [Client] with the given class as `WM_CLASS` or spawn the program with the given command\n\n * if no such Client exists.\n\n *\n\n * This is useful for key bindings that are based on the program you want to work with rather than\n\n * having to remember where things are running.\n\n */\n", "file_path": "src/contrib/actions.rs", "rank": 6, "score": 217369.88437904615 }, { "content": "pub fn example_screens(n: u32) -> Vec<Screen> {\n\n (0..n)\n\n .map(|i| Screen::new(Region::new(1080 * n, 800 * n, 1080, 800), i as usize))\n\n .collect()\n\n}\n\n\n", "file_path": "src/__example_helpers.rs", "rank": 7, "score": 211992.56686535297 }, { "content": "pub fn code_map_from_xmodmap() -> Result<ReverseCodeMap> {\n\n let output = match spawn_for_output(\"xmodmap -pke\") {\n\n Ok(s) => s,\n\n Err(e) => return Err(XcbError::Raw(e.to_string())), // failed to spawn\n\n };\n\n Ok(output\n\n .lines()\n\n .flat_map(|l| {\n\n let mut words = l.split_whitespace(); // keycode <code> = <names ...>\n\n let key_code: u8 = match words.nth(1) {\n\n Some(word) => match word.parse() {\n\n Ok(val) => val,\n\n Err(e) => panic!(\"{}\", e),\n\n },\n\n None => panic!(\"unexpected output format from xmodmap -pke\"),\n\n };\n\n vec![\n\n words.nth(1).map(move |name| ((0, key_code), name.into())),\n\n words.next().map(move |name| ((1, key_code), name.into())),\n\n ]\n", "file_path": "src/xcb/api.rs", "rank": 8, "score": 209258.35071859267 }, { "content": "fn dynamic_workspaces<X: XConn>() -> KeyEventHandler<X> {\n\n create_or_switch_to_workspace(\n\n || {\n\n let options = vec![\"1term\", \"2term\", \"3term\", \"web\", \"files\"];\n\n let menu = DMenu::new(\"WS-SELECT: \", options, DMenuConfig::default());\n\n if let Ok(MenuMatch::Line(_, choice)) = menu.run(0) {\n\n Some(choice)\n\n } else {\n\n None\n\n }\n\n },\n\n my_layouts(),\n\n )\n\n}\n\n\n", "file_path": "examples/dynamic_workspaces/main.rs", "rank": 9, "score": 208017.7631731393 }, { "content": "/// Create a default dwm style status bar that displays content pulled from the\n\n/// WM_NAME property of the root window.\n\npub fn dwm_bar<C, D, X>(\n\n drw: D,\n\n height: usize,\n\n style: &TextStyle,\n\n highlight: impl Into<Color>,\n\n empty_ws: impl Into<Color>,\n\n workspaces: Vec<impl Into<String>>,\n\n) -> Result<StatusBar<C, D, X>>\n\nwhere\n\n C: DrawContext + 'static,\n\n D: Draw<Ctx = C>,\n\n X: XConn,\n\n{\n\n let highlight = highlight.into();\n\n let workspaces: Vec<String> = workspaces.into_iter().map(|w| w.into()).collect();\n\n\n\n Ok(StatusBar::try_new(\n\n drw,\n\n Position::Top,\n\n height,\n", "file_path": "src/draw/bar.rs", "rank": 10, "score": 205322.4316817281 }, { "content": "pub fn example_windowmanager(n_screens: u32, events: Vec<XEvent>) -> ExampleWM {\n\n let conn = ExampleXConn::new(n_screens, events, vec![]);\n\n let conf = Config {\n\n layouts: example_layouts(),\n\n ..Default::default()\n\n };\n\n let mut wm = WindowManager::new(conf, conn, vec![], logging_error_handler());\n\n wm.init().unwrap();\n\n\n\n wm\n\n}\n\n\n", "file_path": "src/__example_helpers.rs", "rank": 11, "score": 201636.3716400133 }, { "content": "pub fn n_clients(n: u32) -> Vec<XEvent> {\n\n (0..n)\n\n .map(|id| XEvent::MapRequest { id, ignore: false })\n\n .collect()\n\n}\n\n\n", "file_path": "src/__example_helpers.rs", "rank": 12, "score": 200550.51208276278 }, { "content": "fn report_error(msg: impl AsRef<str>, b: &Binding) {\n\n panic!(\n\n \"'{}' is an invalid key binding: {}\\n\\\n\n Key bindings should be of the form <modifiers>-<key name> e.g: M-j, M-S-slash, M-C-Up\",\n\n b.raw,\n\n msg.as_ref()\n\n )\n\n}\n\n\n\n/// This is an internal macro that is used as part of `gen_keybindings` to validate user provided\n\n/// key bindings at compile time using xmodmap.\n\n///\n\n/// It is not intended for use outside of that context and may be modified and updated without\n\n/// announcing breaking API changes.\n\n///\n\n/// ```no_run\n\n/// validate_user_bindings!(\n\n/// ( \"M-a\", ... )\n\n/// (\n\n/// ( ( \"M-{}\", \"M-S-{}\" ) ( \"1\", \"2\", \"3\" ) )\n\n/// ...\n\n/// )\n\n/// );\n\n/// ```\n", "file_path": "crates/penrose_proc/src/lib.rs", "rank": 13, "score": 197012.64218785695 }, { "content": "pub fn spawn_with_args<S: Into<String>>(cmd: S, args: &[&str]) -> Result<()> {\n\n let result = Command::new(cmd.into())\n\n .args(args)\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .spawn();\n\n\n\n match result {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(e.into()),\n\n }\n\n}\n\n\n\n/**\n\n * Run an external command and return its output.\n\n *\n\n * NOTE: std::process::Command::output will not work within penrose due to the\n\n * way that signal handling is set up. Use this function if you need to access the\n\n * output of a process that you spawn.\n\n */\n", "file_path": "src/core/helpers.rs", "rank": 14, "score": 196837.46133296008 }, { "content": "pub fn process_next_event(event: XEvent, state: WmState<'_>) -> Vec<EventAction> {\n\n match event {\n\n // Direct 1-n mappings of XEvents -> EventActions\n\n XEvent::Destroy { id } => vec![EventAction::DestroyClient(id)],\n\n XEvent::KeyPress(code) => vec![EventAction::RunKeyBinding(code)],\n\n XEvent::Leave { id, rpt, .. } => vec![\n\n EventAction::ClientFocusLost(id),\n\n EventAction::SetScreenFromPoint(Some(rpt)),\n\n ],\n\n XEvent::MouseEvent(evt) => vec![EventAction::RunMouseBinding(evt)],\n\n XEvent::RandrNotify => vec![EventAction::DetectScreens],\n\n XEvent::ScreenChange => vec![EventAction::SetScreenFromPoint(None)],\n\n\n\n // Require processing based on current WindowManager state\n\n XEvent::ClientMessage { id, dtype, data } => {\n\n process_client_message(state, id, &dtype, &data)\n\n }\n\n XEvent::ConfigureNotify { is_root, .. } => process_configure_notify(is_root),\n\n XEvent::Enter { id, rpt, .. } => process_enter_notify(state, id, rpt),\n\n XEvent::MapRequest { id, ignore } => process_map_request(state, id, ignore),\n\n XEvent::PropertyNotify { id, atom, is_root } => process_property_notify(id, atom, is_root),\n\n }\n\n}\n\n\n", "file_path": "src/core/manager/event.rs", "rank": 15, "score": 196721.63814399933 }, { "content": "/// A handle on a running X11 connection that we can use for issuing X requests.\n\n///\n\n/// XConn is intended as an abstraction layer to allow for communication with the underlying\n\n/// display system (assumed to be X) using whatever mechanism the implementer wishes. In theory, it\n\n/// should be possible to write an implementation that allows penrose to run on systems not using X\n\n/// as the windowing system but X idioms and high level event types / client interations are\n\n/// assumed.\n\npub trait XConn {\n\n /// Hydrate this XConn to restore internal state following serde deserialization\n\n #[cfg(feature = \"serde\")]\n\n fn hydrate(&mut self) -> Result<()>;\n\n\n\n /// Flush pending actions to the X event loop\n\n fn flush(&self) -> bool;\n\n\n\n /// Wait for the next event from the X server and return it as an [XEvent]\n\n fn wait_for_event(&self) -> Result<XEvent>;\n\n\n\n /// Determine the currently connected CRTCs and return their details\n\n fn current_outputs(&self) -> Vec<Screen>;\n\n\n\n /// Determine the current (x,y) position of the cursor relative to the root window.\n\n fn cursor_position(&self) -> Point;\n\n\n\n /// Reposition the window identified by 'id' to the specifed region\n\n fn position_window(&self, id: WinId, r: Region, border: u32, stack_above: bool);\n\n\n", "file_path": "src/core/xconnection.rs", "rank": 16, "score": 190228.4070399406 }, { "content": "pub fn example_key_bindings() -> ExampleKeyBindings {\n\n map! {\n\n EXIT_CODE =>\n\n Box::new(|wm: &mut ExampleWM| wm.exit()) as ExampleKeyHandler,\n\n LAYOUT_CHANGE_CODE =>\n\n Box::new(|wm| wm.cycle_layout(Forward)),\n\n WORKSPACE_CHANGE_CODE =>\n\n Box::new(|wm| wm.focus_workspace(&Selector::Index(1))),\n\n ADD_WORKSPACE_CODE =>\n\n Box::new(|wm| wm.push_workspace(Workspace::new(\"new\", example_layouts()))),\n\n SCREEN_CHANGE_CODE =>\n\n Box::new(|wm| wm.cycle_screen(Forward)),\n\n FOCUS_CHANGE_CODE =>\n\n Box::new(|wm| wm.cycle_client(Forward)),\n\n KILL_CLIENT_CODE =>\n\n Box::new(|wm| wm.kill_client()),\n\n CLIENT_TO_WORKSPACE_CODE =>\n\n Box::new(|wm| wm.client_to_workspace(&Selector::Index(1))),\n\n }\n\n}\n\n\n", "file_path": "src/__example_helpers.rs", "rank": 17, "score": 187003.79285915804 }, { "content": "/// number of clients for the main area vs secondary\n\npub fn client_breakdown<T>(clients: &[T], n_main: u32) -> (u32, u32) {\n\n let n = clients.len() as u32;\n\n if n <= n_main {\n\n (n, 0)\n\n } else {\n\n (n_main, n - n_main)\n\n }\n\n}\n\n\n\n/*\n\n * Layout functions\n\n *\n\n * Each of the following is a layout function that can be passed to Layout::new.\n\n * No checks are carried out to ensure that clients are tiled correctly (i.e. that\n\n * they are non-overlapping) so when adding additional layout functions you are\n\n * free to tile them however you wish. Xmonad for example has a 'circle' layout\n\n * that deliberately overlaps clients under the main window.\n\n */\n\n\n\n// ignore paramas and return pairs of window ID and index in the client vec\n", "file_path": "src/core/layout.rs", "rank": 18, "score": 184770.81194255967 }, { "content": "pub fn focus_or_spawn<X: XConn>(class: String, command: String) -> KeyEventHandler<X> {\n\n Box::new(move |wm: &mut WindowManager<X>| {\n\n let cond = |c: &Client| c.class() == class;\n\n if let Some(client) = wm.client(&Selector::Condition(&cond)) {\n\n let workspace = client.workspace();\n\n wm.focus_workspace(&Selector::Index(workspace))\n\n } else {\n\n spawn(&command)\n\n }\n\n })\n\n}\n\n\n\n/**\n\n * Detect the current monitor set up and arrange the monitors if needed using [xrandr][1].\n\n *\n\n * NOTE\n\n * - Primary monitor will be set to `primary`\n\n * - Monitor resolution is set using the --auto flag in xrandr\n\n * - Only supports one and two monitor setups.\n\n *\n\n * [1]: https://wiki.archlinux.org/index.php/Xrandr\n\n */\n", "file_path": "src/contrib/actions.rs", "rank": 19, "score": 184079.90736107476 }, { "content": "/// Construct a new [StatusBar] using the default [dwm_bar] configuration, backed by [XcbDraw]\n\npub fn new_xcb_backed_status_bar(\n\n height: usize,\n\n style: &TextStyle,\n\n highlight: impl Into<Color>,\n\n empty_ws: impl Into<Color>,\n\n workspaces: Vec<impl Into<String>>,\n\n) -> crate::draw::Result<StatusBar<XcbDrawContext, XcbDraw, XcbConnection>> {\n\n dwm_bar(\n\n XcbDraw::new()?,\n\n height,\n\n style,\n\n highlight,\n\n empty_ws,\n\n workspaces,\n\n )\n\n}\n\n\n\n/// Enum to store the various ways that operations can fail inside of the\n\n/// XCB implementations of penrose traits.\n\n#[derive(thiserror::Error, Debug)]\n", "file_path": "src/xcb/mod.rs", "rank": 20, "score": 182422.19113115748 }, { "content": "/// Construct a penrose [WindowManager] backed by the default [xcb][crate::xcb] backend.\n\npub fn new_xcb_backed_window_manager(\n\n config: Config,\n\n hooks: Vec<Box<dyn Hook<XcbConnection>>>,\n\n error_handler: ErrorHandler,\n\n) -> crate::Result<WindowManager<XcbConnection>> {\n\n let conn = XcbConnection::new()?;\n\n let mut wm = WindowManager::new(config, conn, hooks, error_handler);\n\n wm.init()?;\n\n\n\n Ok(wm)\n\n}\n\n\n", "file_path": "src/xcb/mod.rs", "rank": 21, "score": 182419.00550690605 }, { "content": "fn has_valid_modifiers(binding: &Binding) -> bool {\n\n !binding.mods.is_empty()\n\n && binding\n\n .mods\n\n .iter()\n\n .all(|s| VALID_MODIFIERS.contains(&s.as_ref()))\n\n}\n\n\n", "file_path": "crates/penrose_proc/src/lib.rs", "rank": 22, "score": 174594.33568637606 }, { "content": "/// User defined functionality triggered by [WindowManager] actions.\n\n///\n\n/// impls of [Hook] can be registered to receive events during [WindowManager] operation. Each hook\n\n/// point is documented as individual methods detailing when and how they will be called. All\n\n/// registered hooks will be called for each trigger so the required methods all provide a no-op\n\n/// default implementation that must be overriden to provide functionality. Hooks may subscribe to\n\n/// multiple triggers to implement more complex behaviours and may store additional state.\n\n///\n\n/// *Care should be taken when writing [Hook] impls to ensure that infinite loops are not created by\n\n/// nested triggers and that, where possible, support for other hooks running from the same triggers\n\n/// is possible.*\n\n///\n\n///\n\n/// # Implementing Hook\n\n///\n\n/// For an example of how to write Hooks, please see the [module level][1] documentation.\n\n///\n\n/// Note that you only need to implement the methods for triggers you intended to respond to: all\n\n/// hook methods have a default empty implementation that is ignored by the `WindowManager`.\n\n///\n\n/// [1]: crate::core::hooks\n\npub trait Hook<X: XConn> {\n\n /// # Trigger Point\n\n ///\n\n /// Called once at [WindowManager] startup in [grab_keys_and_run][1] after setting up signal handlers\n\n /// and grabbing key / mouse bindings but before entering the main event loop that polls for\n\n /// [XEvents][2].\n\n ///\n\n /// # Example Uses\n\n ///\n\n /// When this trigger is reached, the `WindowManager` will have initialised all of its internal\n\n /// state, including setting up [Workspaces][3] and [Screens][4] so any set up logic for Hooks\n\n /// that requires access to this should be placed in a `startup` hook as opposed to being\n\n /// attempted in the `new` method of the hook itself.\n\n ///\n\n /// [1]: crate::core::manager::WindowManager::grab_keys_and_run\n\n /// [2]: crate::core::xconnection::XEvent\n\n /// [3]: crate::core::workspace::Workspace\n\n /// [4]: crate::core::screen::Screen\n\n #[allow(unused_variables)]\n\n fn startup(&mut self, wm: &mut WindowManager<X>) -> Result<()> {\n", "file_path": "src/core/hooks.rs", "rank": 23, "score": 173447.2158522883 }, { "content": "pub fn parse_key_binding(pattern: String, known_codes: &CodeMap) -> Option<KeyCode> {\n\n let mut parts: Vec<&str> = pattern.split('-').collect();\n\n match known_codes.get(parts.remove(parts.len() - 1)) {\n\n Some(code) => {\n\n let mask = parts\n\n .iter()\n\n .map(|&s| match s {\n\n \"A\" => xcb::MOD_MASK_1,\n\n \"M\" => xcb::MOD_MASK_4,\n\n \"S\" => xcb::MOD_MASK_SHIFT,\n\n \"C\" => xcb::MOD_MASK_CONTROL,\n\n _ => panic!(\"invalid key binding prefix: {}\", s),\n\n })\n\n .fold(0, |acc, v| acc | v);\n\n\n\n debug!(\"binding '{}' as [{}, {}]\", pattern, mask, code);\n\n Some(KeyCode {\n\n mask: mask as u16,\n\n code: *code,\n\n })\n\n }\n\n None => None,\n\n }\n\n}\n", "file_path": "src/xcb/helpers.rs", "rank": 24, "score": 170041.80870653383 }, { "content": "pub fn spawn<S: Into<String>>(cmd: S) -> Result<()> {\n\n let s = cmd.into();\n\n let parts: Vec<&str> = s.split_whitespace().collect();\n\n let result = if parts.len() > 1 {\n\n Command::new(parts[0])\n\n .args(&parts[1..])\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .spawn()\n\n } else {\n\n Command::new(parts[0])\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .spawn()\n\n };\n\n\n\n match result {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(e.into()),\n\n }\n\n}\n\n\n\n/**\n\n * Run an external command with the specified command line arguments\n\n *\n\n * This redirects the process stdout and stderr to /dev/null.\n\n */\n", "file_path": "src/core/helpers.rs", "rank": 25, "score": 167933.96325653535 }, { "content": "pub trait XcbApi {\n\n /// Hydrate this XcbApi to restore internal state following serde deserialization\n\n #[cfg(feature = \"serde\")]\n\n fn hydrate(&mut self) -> Result<()>;\n\n\n\n /**\n\n * Intern an atom by name, returning the corresponding id.\n\n *\n\n * Can fail if the atom name is not a known X atom or if there\n\n * is an issue with communicating with the X server. For known\n\n * atoms that are included in the [Atom] enum,\n\n * the [XcbApi::known_atom] method should be used instead.\n\n */\n\n fn atom(&self, name: &str) -> Result<u32>;\n\n\n\n /**\n\n * Fetch the id value of a known [Atom] variant.\n\n *\n\n * This operation is expected to always succeed as known atoms should\n\n * either be interned on init of the implementing struct or statically\n", "file_path": "src/xcb/mod.rs", "rank": 26, "score": 167609.22996614297 }, { "content": "pub fn simple_screen(n: usize) -> Screen {\n\n Screen::new(\n\n Region::new(\n\n n as u32 * SCREEN_WIDTH,\n\n n as u32 * SCREEN_HEIGHT,\n\n SCREEN_WIDTH,\n\n SCREEN_HEIGHT,\n\n ),\n\n n,\n\n )\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 27, "score": 166624.52420064088 }, { "content": "#[cfg(feature = \"serde\")]\n\nfn default_hooks<X: XConn>() -> Cell<Hooks<X>> {\n\n Cell::new(Vec::new())\n\n}\n\n\n\n/// WindowManager is the primary struct / owner of the event loop for penrose.\n\n///\n\n/// It handles most (if not all) of the communication with the underlying [XConn], responding to\n\n/// [XEvent][crate::core::xconnection::XEvent]s emitted by it. User key / mouse bindings are parsed\n\n/// and bound on the call to `grab_keys_and_run` and then triggered when corresponding `XEvent`\n\n/// instances come through in the main event loop.\n\n///\n\n/// # A note on examples\n\n///\n\n/// The examples provided for each of the `WindowManager` methods are written using an example\n\n/// implementation of [XConn] that mocks out calls to the X server. In each case, it is assumed\n\n/// that you have an initialised `WindowManager` struct as demonstrated in the full examples for\n\n/// `new` and `init`.\n\n///\n\n/// For full examples of how to configure the `WindowManager`, please see the [examples][1]\n\n/// directory in the Penrose repo.\n", "file_path": "src/core/manager/mod.rs", "rank": 28, "score": 164654.43087441218 }, { "content": "fn is_valid_keyname(binding: &Binding, names: &[String]) -> bool {\n\n if let Some(ref k) = binding.keyname {\n\n names.contains(&k)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "crates/penrose_proc/src/lib.rs", "rank": 29, "score": 162174.68251131914 }, { "content": "/// A really simple stub implementation of [XConn] to simplify setting up test cases.\n\n///\n\n/// Intended use is to override the mock_* methods that you need for running your test case in order\n\n/// to inject behaviour into a WindowManager instance which is driven by X server state.\n\n/// [StubXConn] will then implement [XConn] and call through to your overwritten methods or the\n\n/// provided default.\n\n///\n\n/// This is being done to avoid providing broken default methods on the real XConn trait that would\n\n/// make writing real impls more error prone if and when new methods are added to the trait.\n\npub trait StubXConn {\n\n /// Mocked version of hydrate\n\n #[cfg(feature = \"serde\")]\n\n fn mock_hydrate(&mut self) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n /// Mocked version of flush\n\n fn mock_flush(&self) -> bool {\n\n true\n\n }\n\n\n\n /// Mocked version of wait_for_event\n\n fn mock_wait_for_event(&self) -> Result<XEvent> {\n\n Err(PenroseError::Raw(\"mock impl\".into()))\n\n }\n\n\n\n /// Mocked version of current_outputs\n\n fn mock_current_outputs(&self) -> Vec<Screen> {\n\n vec![]\n", "file_path": "src/core/xconnection.rs", "rank": 30, "score": 159024.30842042106 }, { "content": "pub fn spawn_for_output<S: Into<String>>(cmd: S) -> Result<String> {\n\n let s = cmd.into();\n\n let parts: Vec<&str> = s.split_whitespace().collect();\n\n let result = if parts.len() > 1 {\n\n Command::new(parts[0])\n\n .stdout(std::process::Stdio::piped())\n\n .args(&parts[1..])\n\n .spawn()\n\n } else {\n\n Command::new(parts[0])\n\n .stdout(std::process::Stdio::piped())\n\n .spawn()\n\n };\n\n\n\n let child = result?;\n\n let mut buff = String::new();\n\n Ok(child\n\n .stdout\n\n .ok_or(PenroseError::SpawnProc(s))?\n\n .read_to_string(&mut buff)\n", "file_path": "src/core/helpers.rs", "rank": 31, "score": 157998.3002863917 }, { "content": "fn main() -> Result<()> {\n\n let config = Config::default();\n\n let hooks = vec![];\n\n\n\n let key_bindings = gen_keybindings! {\n\n \"M-j\" => run_internal!(cycle_client, Forward);\n\n \"M-k\" => run_internal!(cycle_client, Backward);\n\n \"M-S-j\" => run_internal!(drag_client, Forward);\n\n \"M-S-k\" => run_internal!(drag_client, Backward);\n\n \"M-S-q\" => run_internal!(kill_client);\n\n \"M-Tab\" => run_internal!(toggle_workspace);\n\n \"M-bracketright\" => run_internal!(cycle_screen, Forward);\n\n \"M-bracketleft\" => run_internal!(cycle_screen, Backward);\n\n \"M-S-bracketright\" => run_internal!(drag_workspace, Forward);\n\n \"M-S-bracketleft\" => run_internal!(drag_workspace, Backward);\n\n \"M-grave\" => run_internal!(cycle_layout, Forward);\n\n \"M-S-grave\" => run_internal!(cycle_layout, Backward);\n\n \"M-A-Up\" => run_internal!(update_max_main, More);\n\n \"M-A-Down\" => run_internal!(update_max_main, Less);\n\n \"M-A-Right\" => run_internal!(update_main_ratio, More);\n", "file_path": "examples/minimal/main.rs", "rank": 32, "score": 157457.80336437197 }, { "content": "#[cfg(feature = \"serde\")]\n\nfn default_conn() -> xcb::Connection {\n\n let (conn, _) = xcb::Connection::connect(None).expect(\"unable to connect using XCB\");\n\n conn\n\n}\n\n\n\n/**\n\n * Use `xmodmap -pke` to determine the user's current keymap to allow for mapping X KeySym values\n\n * to their string representation on the user's system.\n\n *\n\n * # Panics\n\n * This function will panic if it is unable to fetch keycodes using the xmodmap\n\n * binary on your system or if the output of `xmodmap -pke` is not valid\n\n */\n", "file_path": "src/xcb/api.rs", "rank": 33, "score": 156613.4107018389 }, { "content": "fn main() -> Result<()> {\n\n SimpleLogger::init(LevelFilter::Debug, simplelog::Config::default())\n\n .expect(\"failed to init logging\");\n\n\n\n let mut config_builder = Config::default().builder();\n\n let config = config_builder\n\n .workspaces(vec![\"main\"])\n\n .layouts(my_layouts())\n\n .build()\n\n .unwrap();\n\n\n\n let sp = Scratchpad::new(\"st\", 0.8, 0.8);\n\n\n\n let hooks: Hooks<_> = vec![\n\n LayoutSymbolAsRootName::new(),\n\n RemoveEmptyWorkspaces::new(config.workspaces().clone()),\n\n DefaultWorkspace::new(\"1term\", \"[side]\", vec![\"st\"]),\n\n DefaultWorkspace::new(\"2term\", \"[botm]\", vec![\"st\", \"st\"]),\n\n DefaultWorkspace::new(\"3term\", \"[side]\", vec![\"st\", \"st\", \"st\"]),\n\n DefaultWorkspace::new(\"web\", \"[papr]\", vec![\"firefox\"]),\n", "file_path": "examples/dynamic_workspaces/main.rs", "rank": 34, "score": 152990.52933496714 }, { "content": "fn process_configure_notify(is_root: bool) -> Vec<EventAction> {\n\n if is_root {\n\n vec![EventAction::DetectScreens]\n\n } else {\n\n vec![]\n\n }\n\n}\n\n\n", "file_path": "src/core/manager/event.rs", "rank": 35, "score": 151512.90879809923 }, { "content": "/// A simple monolve layout that places uses the maximum available space for the focused client and\n\n/// unmaps all other windows.\n\npub fn monocle(\n\n clients: &[&Client],\n\n focused: Option<WinId>,\n\n monitor_region: &Region,\n\n _: u32,\n\n _: f32,\n\n) -> Vec<ResizeAction> {\n\n if let Some(fid) = focused {\n\n let (mx, my, mw, mh) = monitor_region.values();\n\n clients\n\n .iter()\n\n .map(|c| {\n\n let cid = c.id();\n\n if cid == fid {\n\n (cid, Some(Region::new(mx, my, mw, mh)))\n\n } else {\n\n (cid, None)\n\n }\n\n })\n\n .collect()\n\n } else {\n\n Vec::new()\n\n }\n\n}\n", "file_path": "src/core/layout.rs", "rank": 36, "score": 151386.4568693073 }, { "content": "#[cfg(feature = \"serde\")]\n\n#[test]\n\nfn serde_hydrate_and_init_works_with_serialized_state() {\n\n let mut wm = get_seeded_wm(true);\n\n wm.grab_keys_and_run(common::test_bindings(), HashMap::new())\n\n .unwrap();\n\n let as_json = serde_json::to_string(&wm).unwrap();\n\n let mut unchecked_wm: WindowManager<EarlyExitConn> = serde_json::from_str(&as_json).unwrap();\n\n\n\n let res = unchecked_wm.hydrate_and_init(vec![], logging_error_handler(), layout_funcs());\n\n assert!(res.is_ok());\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 37, "score": 150716.6601469854 }, { "content": "/// A simple layout that places the main region at the top of the screen and tiles\n\n/// remaining windows in a single row underneath.\n\npub fn bottom_stack(\n\n clients: &[&Client],\n\n _: Option<WinId>,\n\n monitor_region: &Region,\n\n max_main: u32,\n\n ratio: f32,\n\n) -> Vec<ResizeAction> {\n\n let n = clients.len() as u32;\n\n\n\n if n <= max_main || max_main == 0 {\n\n return monitor_region\n\n .as_columns(n)\n\n .iter()\n\n .zip(clients)\n\n .map(|(r, c)| (c.id(), Some(*r)))\n\n .collect();\n\n }\n\n\n\n let split = ((monitor_region.h as f32) * ratio) as u32;\n\n let (main, stack) = monitor_region.split_at_height(split).unwrap();\n\n\n\n main.as_columns(max_main)\n\n .into_iter()\n\n .chain(stack.as_columns(n.saturating_sub(max_main)))\n\n .zip(clients)\n\n .map(|(r, c)| (c.id(), Some(r)))\n\n .collect()\n\n}\n\n\n", "file_path": "src/core/layout.rs", "rank": 38, "score": 147272.18947642442 }, { "content": "/// A simple layout that places the main region on the left and tiles remaining\n\n/// windows in a single column to the right.\n\npub fn side_stack(\n\n clients: &[&Client],\n\n _: Option<WinId>,\n\n monitor_region: &Region,\n\n max_main: u32,\n\n ratio: f32,\n\n) -> Vec<ResizeAction> {\n\n let n = clients.len() as u32;\n\n\n\n if n <= max_main || max_main == 0 {\n\n return monitor_region\n\n .as_rows(n)\n\n .iter()\n\n .zip(clients)\n\n .map(|(r, c)| (c.id(), Some(*r)))\n\n .collect();\n\n }\n\n\n\n let split = ((monitor_region.w as f32) * ratio) as u32;\n\n let (main, stack) = monitor_region.split_at_width(split).unwrap();\n\n\n\n main.as_rows(max_main)\n\n .into_iter()\n\n .chain(stack.as_rows(n.saturating_sub(max_main)))\n\n .zip(clients)\n\n .map(|(r, c)| (c.id(), Some(r)))\n\n .collect()\n\n}\n\n\n", "file_path": "src/core/layout.rs", "rank": 39, "score": 147267.9386535453 }, { "content": "pub fn example_mouse_bindings() -> ExampleMouseBindings {\n\n map! {}\n\n}\n\n\n\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n\npub struct ExampleXConn {\n\n #[cfg_attr(feature = \"serde\", serde(skip))]\n\n events: Cell<Vec<XEvent>>,\n\n focused: Cell<WinId>,\n\n n_screens: Cell<u32>,\n\n unmanaged_ids: Vec<WinId>,\n\n}\n\n\n\nimpl fmt::Debug for ExampleXConn {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"ExampleXConn\")\n\n .field(\"n_screens\", &self.n_screens.get())\n\n .field(\"remaining_events\", &self.remaining_events())\n\n .field(\"focused\", &self.focused.get())\n\n .field(\"unmanaged_ids\", &self.unmanaged_ids)\n", "file_path": "src/__example_helpers.rs", "rank": 40, "score": 144172.5303659534 }, { "content": "#[proc_macro]\n\npub fn validate_user_bindings(input: TokenStream) -> TokenStream {\n\n let BindingsInput(mut bindings) = parse_macro_input!(input as BindingsInput);\n\n let names: Vec<String> = XKeySym::iter().map(|x| x.as_ref().to_string()).collect();\n\n let mut seen = HashSet::new();\n\n\n\n for b in bindings.iter_mut() {\n\n if seen.contains(&b.raw) {\n\n panic!(\"'{}' is bound as a keybinding more than once\", b.raw);\n\n } else {\n\n seen.insert(&b.raw);\n\n }\n\n\n\n if b.keyname.is_none() {\n\n report_error(\"no key name specified\", b)\n\n }\n\n\n\n if !is_valid_keyname(b, &names) {\n\n report_error(\n\n format!(\n\n \"'{}' is not a known key: run 'xmodmap -pke' to see valid key names\",\n", "file_path": "crates/penrose_proc/src/lib.rs", "rank": 41, "score": 143576.44664644322 }, { "content": "pub fn update_monitors_via_xrandr(\n\n primary: &str,\n\n secondary: &str,\n\n position: RelativePosition,\n\n) -> Result<()> {\n\n let raw = spawn_for_output(\"xrandr\")?;\n\n let secondary_line = raw\n\n .lines()\n\n .find(|line| line.starts_with(secondary))\n\n .ok_or_else(|| {\n\n PenroseError::Raw(\"unable to find secondary monitor in xrandr output\".into())\n\n })?;\n\n let status = secondary_line\n\n .split(' ')\n\n .nth(1)\n\n .ok_or_else(|| PenroseError::Raw(\"unexpected xrandr output\".into()))?;\n\n\n\n let position_flag = match position {\n\n RelativePosition::Left => \"--left-of\",\n\n RelativePosition::Right => \"--right-of\",\n", "file_path": "src/contrib/actions.rs", "rank": 42, "score": 142741.12226455772 }, { "content": "pub fn keycodes_from_xmodmap() -> CodeMap {\n\n match Command::new(\"xmodmap\").arg(\"-pke\").output() {\n\n Err(e) => panic!(\"unable to fetch keycodes via xmodmap: {}\", e),\n\n Ok(o) => match String::from_utf8(o.stdout) {\n\n Err(e) => panic!(\"invalid utf8 from xmodmap: {}\", e),\n\n Ok(s) => s\n\n .lines()\n\n .flat_map(|l| {\n\n let mut words = l.split_whitespace(); // keycode <code> = <names ...>\n\n let key_code: u8 = match words.nth(1) {\n\n Some(word) => match word.parse() {\n\n Ok(val) => val,\n\n Err(e) => panic!(\"{}\", e),\n\n },\n\n None => panic!(\"unexpected output format from xmodmap -pke\"),\n\n };\n\n words.skip(1).map(move |name| (name.into(), key_code))\n\n })\n\n .collect::<CodeMap>(),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/core/helpers.rs", "rank": 43, "score": 136641.59424526332 }, { "content": "struct Binding {\n\n raw: String,\n\n mods: Vec<String>,\n\n keyname: Option<String>,\n\n}\n\n\n", "file_path": "crates/penrose_proc/src/lib.rs", "rank": 44, "score": 135691.4829637852 }, { "content": "/// A simple error handler that just logs the error to the penrose log stream\n\npub fn logging_error_handler() -> ErrorHandler {\n\n Box::new(|e: PenroseError| error!(\"{}\", e))\n\n}\n", "file_path": "src/core/helpers.rs", "rank": 45, "score": 133333.0718773338 }, { "content": "#[allow(clippy::type_complexity)]\n\nfn data_from_event(\n\n raw: XcbGenericEvent,\n\n) -> Result<(u8, u16, u32, i16, i16, i16, i16, MouseEventKind)> {\n\n Ok(match raw.response_type() {\n\n xcb::BUTTON_PRESS => {\n\n let e: &xcb::ButtonPressEvent = unsafe { xcb::cast_event(&raw) };\n\n (\n\n e.detail(),\n\n e.state(),\n\n e.event(),\n\n e.root_x(),\n\n e.root_y(),\n\n e.event_x(),\n\n e.event_y(),\n\n MouseEventKind::Press,\n\n )\n\n }\n\n\n\n xcb::BUTTON_RELEASE => {\n\n let e: &xcb::ButtonReleaseEvent = unsafe { xcb::cast_event(&raw) };\n", "file_path": "src/xcb/conversions.rs", "rank": 46, "score": 132757.11447953785 }, { "content": "struct BindingsInput(Vec<Binding>);\n\n\n\nimpl Parse for BindingsInput {\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n let mut bindings = as_bindings(comma_sep_strs(input)?);\n\n\n\n let templated_content;\n\n parenthesized!(templated_content in input);\n\n\n\n while !templated_content.is_empty() {\n\n let content;\n\n parenthesized!(content in templated_content);\n\n bindings.extend(expand_templates(\n\n comma_sep_strs(&content)?,\n\n comma_sep_strs(&content)?,\n\n ));\n\n }\n\n\n\n Ok(Self(bindings))\n\n }\n\n}\n\n\n", "file_path": "crates/penrose_proc/src/lib.rs", "rank": 47, "score": 131720.5616476545 }, { "content": "#[cfg(feature = \"serde\")]\n\n#[test]\n\nfn serde_windowmanager_can_be_deserialized() {\n\n let wm = get_seeded_wm(true);\n\n let as_json = serde_json::to_string(&wm).unwrap();\n\n let unchecked_wm: Result<WindowManager<EarlyExitConn>, serde_json::Error> =\n\n serde_json::from_str(&as_json);\n\n assert!(unchecked_wm.is_ok());\n\n}\n\n\n\n#[cfg(feature = \"serde\")]\n\n#[test]\n\n#[should_panic(\n\n expected = \"'hydrate_and_init' must be called before 'grab_keys_and_run' when restoring from serialised state\"\n\n)]\n", "file_path": "tests/serialization_tests.rs", "rank": 48, "score": 125332.09337046856 }, { "content": "#[cfg(feature = \"serde\")]\n\n#[test]\n\nfn serde_running_after_hydration_works() {\n\n let mut wm = get_seeded_wm(true);\n\n wm.grab_keys_and_run(common::test_bindings(), HashMap::new())\n\n .unwrap();\n\n let as_json = serde_json::to_string(&wm).unwrap();\n\n let mut unchecked_wm: WindowManager<EarlyExitConn> = serde_json::from_str(&as_json).unwrap();\n\n\n\n unchecked_wm\n\n .hydrate_and_init(vec![], logging_error_handler(), layout_funcs())\n\n .unwrap();\n\n\n\n unchecked_wm\n\n .grab_keys_and_run(common::test_bindings(), HashMap::new())\n\n .unwrap();\n\n}\n", "file_path": "tests/serialization_tests.rs", "rank": 49, "score": 125325.19206866046 }, { "content": "fn pango_layout(ctx: &cairo::Context) -> Result<pango::Layout> {\n\n Ok(create_layout(ctx).ok_or_else(|| XcbError::Pango(\"unable to create layout\".into()))?)\n\n}\n\n\n\n/// An XCB based [Draw] implementation backed by pango and cairo\n\n#[derive(Debug)]\n\npub struct XcbDraw {\n\n api: Api,\n\n fonts: HashMap<String, pango::FontDescription>,\n\n surfaces: HashMap<WinId, cairo::XCBSurface>,\n\n}\n\n\n\nimpl XcbDraw {\n\n /// Create a new empty [XcbDraw]. Fails if unable to connect to the X server\n\n pub fn new() -> Result<Self> {\n\n Ok(Self {\n\n api: Api::new()?,\n\n fonts: HashMap::new(),\n\n surfaces: HashMap::new(),\n\n })\n", "file_path": "src/xcb/draw.rs", "rank": 50, "score": 122260.77700132335 }, { "content": "#[cfg(feature = \"serde\")]\n\n#[test]\n\n#[should_panic(expected = \"Need to call 'hydrate_and_init' when restoring from serialised state\")]\n\nfn serde_running_init_directly_panics() {\n\n let wm = get_seeded_wm(true);\n\n let as_json = serde_json::to_string(&wm).unwrap();\n\n let mut unchecked_wm: WindowManager<EarlyExitConn> = serde_json::from_str(&as_json).unwrap();\n\n\n\n // Should panic due to self.hydrated being false\n\n unchecked_wm.init().unwrap();\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 51, "score": 121676.6078299781 }, { "content": "fn serde_running_without_hydrating_panics() {\n\n let wm = get_seeded_wm(true);\n\n let as_json = serde_json::to_string(&wm).unwrap();\n\n let mut unchecked_wm: WindowManager<EarlyExitConn> = serde_json::from_str(&as_json).unwrap();\n\n\n\n // Should panic due to self.hydrated being false\n\n unchecked_wm\n\n .grab_keys_and_run(common::test_bindings(), HashMap::new())\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 52, "score": 121658.9086560174 }, { "content": "#[cfg(feature = \"serde\")]\n\n#[test]\n\nfn serde_hydrating_when_x_state_is_wrong_errors() {\n\n let mut wm = get_seeded_wm(false);\n\n wm.grab_keys_and_run(common::test_bindings(), HashMap::new())\n\n .unwrap();\n\n let as_json = serde_json::to_string(&wm).unwrap();\n\n let mut unchecked_wm: WindowManager<EarlyExitConn> = serde_json::from_str(&as_json).unwrap();\n\n let res = unchecked_wm.hydrate_and_init(vec![], logging_error_handler(), layout_funcs());\n\n\n\n match res {\n\n Ok(_) => panic!(\"this should have returned an error\"),\n\n Err(e) => match e {\n\n PenroseError::MissingClientIds(ids) => assert_eq!(&ids, &[1, 2, 3]),\n\n _ => panic!(\"unexpected Error type from hydration\"),\n\n },\n\n }\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 53, "score": 121645.45945689955 }, { "content": "fn main() -> Result<()> {\n\n simple_draw()?;\n\n bar_draw()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/draw/main.rs", "rank": 54, "score": 119872.28771356058 }, { "content": "fn bar_draw() -> Result<()> {\n\n let workspaces = vec![\"1\", \"2\", \"3\", \"4\", \"5\", \"6\"];\n\n let style = TextStyle {\n\n font: PROFONT.to_string(),\n\n point_size: 11,\n\n fg: WHITE.into(),\n\n bg: Some(BLACK.into()),\n\n padding: (2.0, 2.0),\n\n };\n\n let highlight = BLUE;\n\n let empty_ws = GREY;\n\n let mut bar = dwm_bar(\n\n XcbDraw::new()?,\n\n HEIGHT,\n\n &style,\n\n highlight,\n\n empty_ws,\n\n workspaces,\n\n )?;\n\n\n", "file_path": "examples/draw/main.rs", "rank": 55, "score": 117047.63170267353 }, { "content": "fn simple_draw() -> Result<()> {\n\n let mut drw = XcbDraw::new()?;\n\n let (_, _, w, _) = drw.screen_sizes()?[0].values();\n\n let id = drw.new_window(\n\n WinType::InputOutput(Atom::NetWindowTypeNormal),\n\n Region::new(0, 0, w, HEIGHT as u32),\n\n false,\n\n )?;\n\n drw.register_font(PROFONT);\n\n drw.register_font(SERIF);\n\n drw.register_font(FIRA);\n\n\n\n let mut ctx = drw.context_for(id)?;\n\n\n\n ctx.color(&WHITE.into());\n\n ctx.rectangle(0.0, 0.0, w as f64, HEIGHT as f64);\n\n ctx.translate(1.0, 1.0);\n\n\n\n ctx.color(&BLACK.into());\n\n ctx.font(PROFONT, 12)?;\n", "file_path": "examples/draw/main.rs", "rank": 56, "score": 117047.63170267353 }, { "content": "pub fn paper(\n\n clients: &[&Client],\n\n focused: Option<WinId>,\n\n monitor_region: &Region,\n\n _: u32,\n\n ratio: f32,\n\n) -> Vec<ResizeAction> {\n\n let n = clients.len();\n\n if n == 1 {\n\n return vec![(clients[0].id(), Some(*monitor_region))];\n\n }\n\n\n\n let (mx, my, mw, mh) = monitor_region.values();\n\n let min_w = 0.5; // clamp client width at 50% screen size (we're effectively fancy monocle)\n\n let cw = (mw as f32 * if ratio > min_w { ratio } else { min_w }) as u32;\n\n let step = (mw - cw) / (n - 1) as u32;\n\n\n\n let fid = focused.unwrap(); // we know we have at least one client now\n\n let mut after_focused = false;\n\n clients\n", "file_path": "src/contrib/layouts.rs", "rank": 57, "score": 116963.67298475312 }, { "content": "pub trait HookableWidget<X>: Hook<X> + Widget\n\nwhere\n\n X: XConn,\n\n{\n\n}\n\n\n\n// Blanket implementation for anything that implements both Hook and Widget\n\nimpl<X, T> HookableWidget<X> for T\n\nwhere\n\n X: XConn,\n\n T: Hook<X> + Widget,\n\n{\n\n}\n\n\n", "file_path": "src/draw/widget/mod.rs", "rank": 58, "score": 116591.57024055929 }, { "content": "fn comma_sep_strs(input: ParseStream) -> Result<Vec<String>> {\n\n let content;\n\n parenthesized!(content in input);\n\n Ok(Punctuated::<LitStr, Token![,]>::parse_terminated(&content)?\n\n .iter()\n\n .map(LitStr::value)\n\n .collect())\n\n}\n\n\n", "file_path": "crates/penrose_proc/src/lib.rs", "rank": 59, "score": 115981.93263316924 }, { "content": "fn as_bindings(raw: Vec<String>) -> Vec<Binding> {\n\n raw.iter()\n\n .map(|s| {\n\n let mut parts: Vec<&str> = s.split('-').collect();\n\n let (keyname, mods) = if parts.len() <= 1 {\n\n (None, vec![s.clone()])\n\n } else {\n\n (\n\n parts.pop().map(String::from),\n\n parts.into_iter().map(String::from).collect(),\n\n )\n\n };\n\n\n\n Binding {\n\n raw: s.clone(),\n\n keyname,\n\n mods,\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "crates/penrose_proc/src/lib.rs", "rank": 60, "score": 115866.93341984136 }, { "content": "fn main() -> Result<()> {\n\n // penrose will log useful information about the current state of the WindowManager during\n\n // normal operation that can be used to drive scripts and related programs. Additional debug\n\n // output can be helpful if you are hitting issues.\n\n SimpleLogger::init(LevelFilter::Debug, simplelog::Config::default())\n\n .expect(\"failed to init logging\");\n\n\n\n // Created at startup. See keybindings below for how to access them\n\n let mut config_builder = Config::default().builder();\n\n config_builder\n\n .workspaces(vec![\"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\"])\n\n // Windows with a matching WM_CLASS will always float\n\n .floating_classes(vec![\"dmenu\", \"dunst\", \"polybar\"])\n\n // Client border colors are set based on X focus\n\n .focused_border(0xcc241d) // #cc241d\n\n .unfocused_border(0x3c3836); // #3c3836\n\n\n\n // When specifying a layout, most of the time you will want LayoutConf::default() as shown\n\n // below, which will honour gap settings and will not be run on focus changes (only when\n\n // clients are added/removed). To customise when/how each layout is applied you can create a\n", "file_path": "examples/simple_config_with_hooks/main.rs", "rank": 61, "score": 114428.92418918805 }, { "content": "pub fn row_layout(\n\n clients: &[&Client],\n\n _focused: Option<WinId>,\n\n monitor_region: &Region,\n\n _max_main: u32,\n\n _ratio: f32,\n\n) -> Vec<ResizeAction> {\n\n monitor_region\n\n .as_rows(clients.len() as u32)\n\n .iter()\n\n .zip(clients)\n\n .map(|(r, c)| (c.id(), Some(*r)))\n\n .collect()\n\n}\n\n\n", "file_path": "src/__example_helpers.rs", "rank": 62, "score": 114147.23639532589 }, { "content": "/// Create a Vec of index selectors for the given input slice\n\npub fn index_selectors<'a, T>(len: usize) -> Vec<Selector<'a, T>> {\n\n (0..len).map(Selector::Index).collect()\n\n}\n\n\n", "file_path": "src/core/helpers.rs", "rank": 63, "score": 110934.48337226012 }, { "content": "/// Send a notification using the `notify-send` command line program\n\n///\n\n/// # Example\n\n/// ```no_run\n\n/// # use penrose::{contrib::extensions::notify_send::*};\n\n/// # fn example() -> penrose::Result<()> {\n\n/// notify_send(\"My Notification\", \"hello from penrose!\", NotifyConfig::default())?;\n\n///\n\n/// // equivalent to the following on the command line:\n\n/// // $ notify-send 'My Notification' 'hello from penrose!' -u normal -t 5000\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn notify_send(\n\n title: impl Into<String>,\n\n body: impl Into<String>,\n\n config: NotifyConfig,\n\n) -> Result<()> {\n\n spawn_with_args(\n\n \"notify-send\",\n\n &[\n\n \"-u\",\n\n &config.level.to_string(),\n\n \"-t\",\n\n &config.duration.to_string(),\n\n &title.into(),\n\n &body.into(),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "src/contrib/extensions/notify_send.rs", "rank": 64, "score": 109119.9888467672 }, { "content": "// Hleper for formatting lines with optional line numbers\n\nfn fmt_lines(lines: &[(usize, &String)], show_line_numbers: bool) -> Vec<String> {\n\n lines\n\n .iter()\n\n .map(|(i, line)| {\n\n if show_line_numbers {\n\n format!(\"{:<3} {}\", i, line)\n\n } else {\n\n line.to_string()\n\n }\n\n })\n\n .collect()\n\n}\n", "file_path": "crates/penrose_menu/src/lib.rs", "rank": 65, "score": 107962.57410913319 }, { "content": "pub fn example_layouts() -> Vec<Layout> {\n\n vec![\n\n Layout::new(\"first\", LayoutConf::default(), row_layout, 1, 0.6),\n\n Layout::new(\"second\", LayoutConf::default(), row_layout, 1, 0.6),\n\n ]\n\n}\n\n\n", "file_path": "src/__example_helpers.rs", "rank": 66, "score": 103063.66532007098 }, { "content": "/// A simple error handler that uses 'notify-send' to display a dialog window with the error\n\n/// message.\n\npub fn notify_send_error_handler() -> ErrorHandler {\n\n Box::new(|e: PenroseError| {\n\n if notify_send(\n\n \"Unhandled Error\",\n\n e.to_string(),\n\n NotifyConfig {\n\n level: NotifyLevel::Critical,\n\n duration: 10000,\n\n },\n\n )\n\n .is_err()\n\n {\n\n error!(\"Unable to display error via notify-send. Error was: {}\", e);\n\n }\n\n })\n\n}\n", "file_path": "src/contrib/extensions/notify_send.rs", "rank": 67, "score": 97646.60452176639 }, { "content": "fn layout_funcs() -> HashMap<&'static str, LayoutFunc> {\n\n map! {\n\n \"[side]\" => side_stack as LayoutFunc,\n\n \"[----]\" => floating as LayoutFunc,\n\n }\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 68, "score": 97133.04653972635 }, { "content": "fn expand_templates(templates: Vec<String>, keynames: Vec<String>) -> Vec<Binding> {\n\n templates\n\n .iter()\n\n .flat_map(|t| {\n\n let mut parts: Vec<&str> = t.split('-').collect();\n\n if parts.pop() != Some(\"{}\") {\n\n panic!(\n\n \"'{}' is an invalid template: expected '<Modifiers>-{{}}'\",\n\n t\n\n )\n\n };\n\n keynames\n\n .iter()\n\n .map(|k| Binding {\n\n raw: format!(\"{}-{}\", parts.join(\"-\"), k),\n\n mods: parts.iter().map(|m| m.to_string()).collect(),\n\n keyname: Some(k.into()),\n\n })\n\n .collect::<Vec<Binding>>()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "crates/penrose_proc/src/lib.rs", "rank": 69, "score": 91969.32293843248 }, { "content": "fn get_seeded_wm(valid_clients: bool) -> WindowManager<EarlyExitConn> {\n\n // Seeding the MockXConn with events so that we should end up with:\n\n // - clients 1 on workspace 0\n\n // - client 2 & 3 on workspace 1\n\n // - focus on client 2\n\n // - screen 0 holding workspace 1\n\n // - screen 1 holding workspace 0\n\n let conn = EarlyExitConn::new(\n\n valid_clients,\n\n vec![\n\n XEvent::MapRequest {\n\n id: 1,\n\n ignore: false,\n\n },\n\n XEvent::KeyPress(common::WORKSPACE_CHANGE_CODE),\n\n XEvent::MapRequest {\n\n id: 2,\n\n ignore: false,\n\n },\n\n XEvent::MapRequest {\n", "file_path": "tests/serialization_tests.rs", "rank": 70, "score": 91642.97917226925 }, { "content": "#[cfg(feature = \"serde\")]\n\n#[test]\n\nfn serde_windowmanager_can_be_serialized() {\n\n let wm = get_seeded_wm(true);\n\n let as_json = serde_json::to_string(&wm);\n\n assert!(as_json.is_ok());\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 71, "score": 90473.62832324322 }, { "content": "/// A [Draw] that can return the [KeyPress] events from the user for its windows\n\npub trait KeyPressDraw: Draw {\n\n /// Attempt to parse the next [XEvent] from an underlying connection as a [KeyPress] if there\n\n /// is one.\n\n ///\n\n /// Should return Ok(None) if no events are currently available.\n\n fn next_keypress(&self) -> Result<Option<KeyPressParseAttempt>>;\n\n\n\n /// Wait for the next [XEvent] from an underlying connection as a [KeyPress] and attempt to\n\n /// parse it as a [KeyPress].\n\n fn next_keypress_blocking(&self) -> Result<KeyPressParseAttempt>;\n\n}\n\n\n", "file_path": "src/draw/mod.rs", "rank": 72, "score": 88275.8139189464 }, { "content": "fn process_client_message(\n\n state: WmState<'_>,\n\n id: WinId,\n\n dtype: &str,\n\n data: &[usize],\n\n) -> Vec<EventAction> {\n\n let is_full_screen = [data.get(1), data.get(2)].contains(&Some(&state.full_screen_atom));\n\n match Atom::from_str(&dtype) {\n\n Ok(Atom::NetWmState) if is_full_screen => {\n\n // _NET_WM_STATE_ADD == 1, _NET_WM_STATE_TOGGLE == 2\n\n let should_fullscreen = [1, 2].contains(&data[0]);\n\n vec![EventAction::ToggleClientFullScreen(id, should_fullscreen)]\n\n }\n\n _ => vec![],\n\n }\n\n}\n\n\n", "file_path": "src/core/manager/event.rs", "rank": 73, "score": 87557.59028870084 }, { "content": "fn my_layouts() -> Vec<Layout> {\n\n let n_main = 1;\n\n let ratio = 0.6;\n\n let follow_focus_conf = LayoutConf {\n\n floating: false,\n\n gapless: true,\n\n follow_focus: true,\n\n allow_wrapping: false,\n\n };\n\n\n\n vec![\n\n Layout::new(\"[side]\", LayoutConf::default(), side_stack, n_main, ratio),\n\n Layout::new(\"[botm]\", LayoutConf::default(), bottom_stack, n_main, ratio),\n\n Layout::new(\"[papr]\", follow_focus_conf, paper, n_main, ratio),\n\n ]\n\n}\n\n\n", "file_path": "examples/dynamic_workspaces/main.rs", "rank": 74, "score": 87033.16339529134 }, { "content": "//! A wrapper around the underlying xcb api layer that only exposes Penrose types\n\nuse crate::{\n\n core::{\n\n bindings::{KeyCode, KeyCodeMask, KeyCodeValue, MouseEvent, MouseState},\n\n data_types::{Point, PropVal, Region, WinAttr, WinConfig, WinId, WinType},\n\n helpers::spawn_for_output,\n\n screen::Screen,\n\n xconnection::{Atom, XEvent},\n\n },\n\n xcb::{Result, XcbApi, XcbError, XcbGenericEvent},\n\n};\n\nuse strum::*;\n\n\n\nuse std::{collections::HashMap, convert::TryFrom, fmt, str::FromStr};\n\n\n\n#[cfg(feature = \"keysyms\")]\n\nuse crate::{core::bindings::KeyPress, draw::KeyPressParseAttempt};\n\n#[cfg(feature = \"keysyms\")]\n\nuse penrose_keysyms::XKeySym;\n\n\n\n/// A reverse lookup of KeyCode mask and value to key as a String using XKeySym mappings\n\npub type ReverseCodeMap = HashMap<(KeyCodeMask, KeyCodeValue), String>;\n\n\n\n#[cfg(feature = \"serde\")]\n", "file_path": "src/xcb/api.rs", "rank": 92, "score": 80694.38828044629 }, { "content": "\n\n let (ty, data) = match val {\n\n PropVal::Atom(data) => (xcb::xproto::ATOM_ATOM, data),\n\n PropVal::Cardinal(data) => (xcb::xproto::ATOM_CARDINAL, data),\n\n PropVal::Window(data) => (xcb::xproto::ATOM_WINDOW, data),\n\n PropVal::Str(s) => {\n\n let (ty, data) = (xcb::xproto::ATOM_STRING, s.as_bytes());\n\n xcb::change_property(&self.conn, mode, id, a, ty, 8, data);\n\n return;\n\n }\n\n };\n\n\n\n xcb::change_property(&self.conn, mode, id, a, ty, 32, data);\n\n }\n\n\n\n fn create_window(&self, ty: WinType, reg: Region, managed: bool) -> Result<WinId> {\n\n let (ty, mut data, class, root, depth, visual_id) = match ty {\n\n WinType::CheckWin => (\n\n None,\n\n Vec::new(),\n", "file_path": "src/xcb/api.rs", "rank": 93, "score": 80677.80438630449 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"XCB Api\")\n\n .field(\"root\", &self.root)\n\n .field(\"randr_base\", &self.randr_base)\n\n .field(\"atoms\", &self.atoms)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl Drop for Api {\n\n fn drop(&mut self) {\n\n self.destroy_window(self.check_win)\n\n }\n\n}\n\n\n\nimpl Api {\n\n /// Connect to the X server using the [XCB API][1]\n\n ///\n\n /// Each [Api] contains and embedded [xcb Connection][2] which is used for making\n\n /// all api calls through to the X server. Some state is cached in the Api itself\n", "file_path": "src/xcb/api.rs", "rank": 94, "score": 80677.36878087533 }, { "content": " .into_iter()\n\n .flatten()\n\n })\n\n .collect::<HashMap<(u16, u8), String>>())\n\n}\n\n\n\n/// A connection to the X server using the XCB C API\n\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n\npub struct Api {\n\n #[cfg_attr(feature = \"serde\", serde(skip, default = \"default_conn\"))]\n\n conn: xcb::Connection,\n\n root: WinId,\n\n check_win: WinId,\n\n randr_base: u8,\n\n atoms: HashMap<Atom, u32>,\n\n #[cfg(feature = \"keysyms\")]\n\n code_map: ReverseCodeMap,\n\n}\n\n\n\nimpl fmt::Debug for Api {\n", "file_path": "src/xcb/api.rs", "rank": 95, "score": 80676.69644297619 }, { "content": " /// in order to prevent redundant calls through to the X server.\n\n ///\n\n /// Creating a new [Api] instance will establish the underlying connection and if\n\n /// the `keysyms` feature is enabled, pull [KeyCode] mappings from the user\n\n /// system using `xmodmap`.\n\n ///\n\n /// [1]: http://rtbo.github.io/rust-xcb\n\n /// [2]: http://rtbo.github.io/rust-xcb/xcb/base/struct.Connection.html\n\n pub fn new() -> Result<Self> {\n\n let (conn, _) = xcb::Connection::connect(None)?;\n\n let mut api = Self {\n\n conn,\n\n root: 0,\n\n check_win: 0,\n\n randr_base: 0,\n\n atoms: HashMap::new(),\n\n #[cfg(feature = \"keysyms\")]\n\n code_map: code_map_from_xmodmap()?,\n\n };\n\n api.init()?;\n", "file_path": "src/xcb/api.rs", "rank": 96, "score": 80668.82948415155 }, { "content": " id, // window to focus\n\n 0, // event time (0 == current time)\n\n );\n\n\n\n self.replace_prop(id, Atom::NetActiveWindow, PropVal::Window(&[id]));\n\n }\n\n\n\n fn send_client_event(&self, id: WinId, atom_name: &str) -> Result<()> {\n\n let atom = self.atom(atom_name)?;\n\n let wm_protocols = self.known_atom(Atom::WmProtocols);\n\n let data = xcb::ClientMessageData::from_data32([atom, xcb::CURRENT_TIME, 0, 0, 0]);\n\n let event = xcb::ClientMessageEvent::new(32, id, wm_protocols, data);\n\n\n\n xcb::send_event(&self.conn, false, id, xcb::EVENT_MASK_NO_EVENT, &event);\n\n Ok(())\n\n }\n\n\n\n fn set_window_attributes(&self, id: WinId, attrs: &[WinAttr]) {\n\n let data: Vec<(u32, u32)> = attrs.iter().flat_map::<Vec<_>, _>(|c| c.into()).collect();\n\n xcb::change_window_attributes(&self.conn, id, &data);\n", "file_path": "src/xcb/api.rs", "rank": 97, "score": 80667.20295541626 }, { "content": " }\n\n\n\n pub(crate) fn known_atoms(&self) -> &HashMap<Atom, u32> {\n\n &self.atoms\n\n }\n\n\n\n pub(crate) fn conn(&self) -> &xcb::Connection {\n\n &self.conn\n\n }\n\n\n\n pub(crate) fn screen(&self, ix: usize) -> Result<xcb::Screen<'_>> {\n\n let mut roots: Vec<_> = self.conn.get_setup().roots().collect();\n\n let len = roots.len();\n\n if ix >= len {\n\n Err(XcbError::UnknownScreen(ix, len - 1))\n\n } else {\n\n Ok(roots.remove(ix))\n\n }\n\n }\n\n\n", "file_path": "src/xcb/api.rs", "rank": 98, "score": 80664.57964558275 }, { "content": " })\n\n .collect())\n\n }\n\n\n\n fn screen_sizes(&self) -> Result<Vec<Region>> {\n\n self.current_screens()\n\n .map(|screens| screens.iter().map(|s| s.region(false)).collect())\n\n }\n\n\n\n fn current_clients(&self) -> Result<Vec<WinId>> {\n\n Ok(xcb::query_tree(&self.conn, self.root)\n\n .get_reply()\n\n .map(|reply| reply.children().into())?)\n\n }\n\n\n\n fn cursor_position(&self) -> Point {\n\n xcb::query_pointer(&self.conn, self.root)\n\n .get_reply()\n\n .map_or_else(\n\n |_| Point::new(0, 0),\n", "file_path": "src/xcb/api.rs", "rank": 99, "score": 80662.62811185722 } ]
Rust
src/fcfg1/config_if_adc.rs
luojia65/cc2640r2f
03a5bfca3e739dbb8310e2b0dabb07a8ca572fe5
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::CONFIG_IF_ADC { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct FF2ADJR { bits: u8, } impl FF2ADJR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct FF3ADJR { bits: u8, } impl FF3ADJR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct INT3ADJR { bits: u8, } impl INT3ADJR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct FF1ADJR { bits: u8, } impl FF1ADJR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct AAFCAPR { bits: u8, } impl AAFCAPR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct INT2ADJR { bits: u8, } impl INT2ADJR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct IFDIGLDO_TRIM_OUTPUTR { bits: u8, } impl IFDIGLDO_TRIM_OUTPUTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct IFANALDO_TRIM_OUTPUTR { bits: u8, } impl IFANALDO_TRIM_OUTPUTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Proxy"] pub struct _FF2ADJW<'a> { w: &'a mut W, } impl<'a> _FF2ADJW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 28; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FF3ADJW<'a> { w: &'a mut W, } impl<'a> _FF3ADJW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _INT3ADJW<'a> { w: &'a mut W, } impl<'a> _INT3ADJW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 20; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FF1ADJW<'a> { w: &'a mut W, } impl<'a> _FF1ADJW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _AAFCAPW<'a> { w: &'a mut W, } impl<'a> _AAFCAPW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 14; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _INT2ADJW<'a> { w: &'a mut W, } impl<'a> _INT2ADJW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 10; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _IFDIGLDO_TRIM_OUTPUTW<'a> { w: &'a mut W, } impl<'a> _IFDIGLDO_TRIM_OUTPUTW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 31; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _IFANALDO_TRIM_OUTPUTW<'a> { w: &'a mut W, } impl<'a> _IFANALDO_TRIM_OUTPUTW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 31; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff2adj(&self) -> FF2ADJR { let bits = { const MASK: u8 = 15; const OFFSET: u8 = 28; ((self.bits >> OFFSET) & MASK as u32) as u8 }; FF2ADJR { bits } } #[doc = "Bits 24:27 - 27:24\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff3adj(&self) -> FF3ADJR { let bits = { const MASK: u8 = 15; const OFFSET: u8 = 24; ((self.bits >> OFFSET) & MASK as u32) as u8 }; FF3ADJR { bits } } #[doc = "Bits 20:23 - 23:20\\] Internal. Only to be used through TI provided API."] #[inline] pub fn int3adj(&self) -> INT3ADJR { let bits = { const MASK: u8 = 15; const OFFSET: u8 = 20; ((self.bits >> OFFSET) & MASK as u32) as u8 }; INT3ADJR { bits } } #[doc = "Bits 16:19 - 19:16\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff1adj(&self) -> FF1ADJR { let bits = { const MASK: u8 = 15; const OFFSET: u8 = 16; ((self.bits >> OFFSET) & MASK as u32) as u8 }; FF1ADJR { bits } } #[doc = "Bits 14:15 - 15:14\\] Internal. Only to be used through TI provided API."] #[inline] pub fn aafcap(&self) -> AAFCAPR { let bits = { const MASK: u8 = 3; const OFFSET: u8 = 14; ((self.bits >> OFFSET) & MASK as u32) as u8 }; AAFCAPR { bits } } #[doc = "Bits 10:13 - 13:10\\] Internal. Only to be used through TI provided API."] #[inline] pub fn int2adj(&self) -> INT2ADJR { let bits = { const MASK: u8 = 15; const OFFSET: u8 = 10; ((self.bits >> OFFSET) & MASK as u32) as u8 }; INT2ADJR { bits } } #[doc = "Bits 5:9 - 9:5\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ifdigldo_trim_output(&self) -> IFDIGLDO_TRIM_OUTPUTR { let bits = { const MASK: u8 = 31; const OFFSET: u8 = 5; ((self.bits >> OFFSET) & MASK as u32) as u8 }; IFDIGLDO_TRIM_OUTPUTR { bits } } #[doc = "Bits 0:4 - 4:0\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ifanaldo_trim_output(&self) -> IFANALDO_TRIM_OUTPUTR { let bits = { const MASK: u8 = 31; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 }; IFANALDO_TRIM_OUTPUTR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 878769152 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff2adj(&mut self) -> _FF2ADJW { _FF2ADJW { w: self } } #[doc = "Bits 24:27 - 27:24\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff3adj(&mut self) -> _FF3ADJW { _FF3ADJW { w: self } } #[doc = "Bits 20:23 - 23:20\\] Internal. Only to be used through TI provided API."] #[inline] pub fn int3adj(&mut self) -> _INT3ADJW { _INT3ADJW { w: self } } #[doc = "Bits 16:19 - 19:16\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff1adj(&mut self) -> _FF1ADJW { _FF1ADJW { w: self } } #[doc = "Bits 14:15 - 15:14\\] Internal. Only to be used through TI provided API."] #[inline] pub fn aafcap(&mut self) -> _AAFCAPW { _AAFCAPW { w: self } } #[doc = "Bits 10:13 - 13:10\\] Internal. Only to be used through TI provided API."] #[inline] pub fn int2adj(&mut self) -> _INT2ADJW { _INT2ADJW { w: self } } #[doc = "Bits 5:9 - 9:5\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ifdigldo_trim_output(&mut self) -> _IFDIGLDO_TRIM_OUTPUTW { _IFDIGLDO_TRIM_OUTPUTW { w: self } } #[doc = "Bits 0:4 - 4:0\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ifanaldo_trim_output(&mut self) -> _IFANALDO_TRIM_OUTPUTW { _IFANALDO_TRIM_OUTPUTW { w: self } } }
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::CONFIG_IF_ADC { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct FF2ADJR { bits: u8, } impl FF2ADJR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct FF3ADJR { bits: u8, } impl FF3ADJR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct INT3ADJR { bits: u8, } impl INT3ADJR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct FF1ADJR { bits: u8, } impl FF1ADJR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct AAFCAPR { bits: u8, } impl AAFCAPR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct INT2ADJR { bits: u8, } impl INT2ADJR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct IFDIGLDO_TRIM_OUTPUTR { bits: u8, } impl IFDIGLDO_TRIM_OUTPUTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct IFANALDO_TRIM_OUTPUTR { bits: u8, } impl IFANALDO_TRIM_OUTPUTR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Proxy"] pub struct _FF2ADJW<'a> { w: &'a mut W, } impl<'a> _FF2ADJW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 28; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FF3ADJW<'a> { w: &'a mut W, } impl<'a> _FF3ADJW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _INT3ADJW<'a> { w: &'a mut W, } impl<'a> _INT3ADJW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 20; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _FF1ADJW<'a> { w: &'a mut W, } impl<'a> _FF1ADJW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _AAFCAPW<'a> { w: &'a mut W, } impl<'a> _AAFCAPW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 14; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _INT2ADJW<'a> { w: &'a mut W, } impl<'a> _INT2ADJW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 10; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _IFDIGLDO_TRIM_OUTPUTW<'a> { w: &'a mut W, } impl<'a> _IFDIGLDO_TRIM_OUTPUTW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 31; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _IFANALDO_TRIM_OUTPUTW<'a> { w: &'a mut W, } impl<'a> _IFANALDO_TRIM_OUTPUTW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 31; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff2adj(&self) -> FF2ADJR { let bits = { const MASK: u8 = 15; const OFFSET: u8 = 28; ((self.bits >> OFFSET) & MASK as u32) as u8 }; FF2ADJR { bits } } #[doc = "Bits 24:27 - 27:24\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff3adj(&self) -> FF3ADJR { let bits = { const MASK: u8 = 15; const OFFSET: u8 = 24; ((self.bits >> OFFSET) & MASK as u32) as u8 }; FF3ADJR { bits } } #[doc = "Bits 20:23 - 23:20\\] Internal. Only to be used through TI provided API."] #[inline] pub fn int3adj(&self) -> INT3ADJR {
INT3ADJR { bits } } #[doc = "Bits 16:19 - 19:16\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff1adj(&self) -> FF1ADJR { let bits = { const MASK: u8 = 15; const OFFSET: u8 = 16; ((self.bits >> OFFSET) & MASK as u32) as u8 }; FF1ADJR { bits } } #[doc = "Bits 14:15 - 15:14\\] Internal. Only to be used through TI provided API."] #[inline] pub fn aafcap(&self) -> AAFCAPR { let bits = { const MASK: u8 = 3; const OFFSET: u8 = 14; ((self.bits >> OFFSET) & MASK as u32) as u8 }; AAFCAPR { bits } } #[doc = "Bits 10:13 - 13:10\\] Internal. Only to be used through TI provided API."] #[inline] pub fn int2adj(&self) -> INT2ADJR { let bits = { const MASK: u8 = 15; const OFFSET: u8 = 10; ((self.bits >> OFFSET) & MASK as u32) as u8 }; INT2ADJR { bits } } #[doc = "Bits 5:9 - 9:5\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ifdigldo_trim_output(&self) -> IFDIGLDO_TRIM_OUTPUTR { let bits = { const MASK: u8 = 31; const OFFSET: u8 = 5; ((self.bits >> OFFSET) & MASK as u32) as u8 }; IFDIGLDO_TRIM_OUTPUTR { bits } } #[doc = "Bits 0:4 - 4:0\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ifanaldo_trim_output(&self) -> IFANALDO_TRIM_OUTPUTR { let bits = { const MASK: u8 = 31; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 }; IFANALDO_TRIM_OUTPUTR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 878769152 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff2adj(&mut self) -> _FF2ADJW { _FF2ADJW { w: self } } #[doc = "Bits 24:27 - 27:24\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff3adj(&mut self) -> _FF3ADJW { _FF3ADJW { w: self } } #[doc = "Bits 20:23 - 23:20\\] Internal. Only to be used through TI provided API."] #[inline] pub fn int3adj(&mut self) -> _INT3ADJW { _INT3ADJW { w: self } } #[doc = "Bits 16:19 - 19:16\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ff1adj(&mut self) -> _FF1ADJW { _FF1ADJW { w: self } } #[doc = "Bits 14:15 - 15:14\\] Internal. Only to be used through TI provided API."] #[inline] pub fn aafcap(&mut self) -> _AAFCAPW { _AAFCAPW { w: self } } #[doc = "Bits 10:13 - 13:10\\] Internal. Only to be used through TI provided API."] #[inline] pub fn int2adj(&mut self) -> _INT2ADJW { _INT2ADJW { w: self } } #[doc = "Bits 5:9 - 9:5\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ifdigldo_trim_output(&mut self) -> _IFDIGLDO_TRIM_OUTPUTW { _IFDIGLDO_TRIM_OUTPUTW { w: self } } #[doc = "Bits 0:4 - 4:0\\] Internal. Only to be used through TI provided API."] #[inline] pub fn ifanaldo_trim_output(&mut self) -> _IFANALDO_TRIM_OUTPUTW { _IFANALDO_TRIM_OUTPUTW { w: self } } }
let bits = { const MASK: u8 = 15; const OFFSET: u8 = 20; ((self.bits >> OFFSET) & MASK as u32) as u8 };
assignment_statement
[ { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 0, "score": 66112.13639546047 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::VALUE {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/wdt/value.rs", "rank": 1, "score": 62802.82347385055 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::FR {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/uart0/fr.rs", "rank": 2, "score": 62801.546004497635 }, { "content": " const MASK: u32 = 4294967295;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u32\n\n };\n\n WDTVALUER { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 4294967295 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 0:31 - 31:0\\\\] This register contains the current count value of the timer.\"]\n\n #[inline]\n\n pub fn wdtvalue(&mut self) -> _WDTVALUEW {\n\n _WDTVALUEW { w: self }\n\n }\n\n}\n", "file_path": "src/wdt/value.rs", "rank": 3, "score": 62799.58684985877 }, { "content": " #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n\n const MASK: u32 = 4294967295;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 0:31 - 31:0\\\\] This register contains the current count value of the timer.\"]\n\n #[inline]\n\n pub fn wdtvalue(&self) -> WDTVALUER {\n\n let bits = {\n", "file_path": "src/wdt/value.rs", "rank": 4, "score": 62797.70661828424 }, { "content": " }\n\n #[doc = r\" Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&self) -> R {\n\n R {\n\n bits: self.register.get(),\n\n }\n\n }\n\n #[doc = r\" Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W::reset_value();\n\n f(&mut w);\n\n self.register.set(w.bits);\n\n }\n\n #[doc = r\" Writes the reset value to the register\"]\n\n #[inline]\n", "file_path": "src/wdt/value.rs", "rank": 5, "score": 62796.37280598504 }, { "content": " const MASK: bool = true;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) != 0\n\n };\n\n CTSR { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 144 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 8:31 - 31:8\\\\] Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior.\"]\n", "file_path": "src/uart0/fr.rs", "rank": 6, "score": 62792.81011427449 }, { "content": " }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _RESERVED0W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _RESERVED0W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 3;\n\n const OFFSET: u8 = 1;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _CTSW<'a> {\n\n w: &'a mut W,\n", "file_path": "src/uart0/fr.rs", "rank": 7, "score": 62791.527749898385 }, { "content": " }\n\n #[doc = r\" Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&self) -> R {\n\n R {\n\n bits: self.register.get(),\n\n }\n\n }\n\n #[doc = r\" Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W::reset_value();\n\n f(&mut w);\n\n self.register.set(w.bits);\n\n }\n\n #[doc = r\" Writes the reset value to the register\"]\n\n #[inline]\n", "file_path": "src/uart0/fr.rs", "rank": 8, "score": 62790.51155721158 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n const MASK: bool = true;\n\n const OFFSET: u8 = 6;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _TXFFW<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _TXFFW<'a> {\n\n #[doc = r\" Sets the field bit\"]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/uart0/fr.rs", "rank": 9, "score": 62780.20058045505 }, { "content": " #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n const MASK: bool = true;\n\n const OFFSET: u8 = 7;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _RXFFW<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _RXFFW<'a> {\n\n #[doc = r\" Sets the field bit\"]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\" Clears the field bit\"]\n", "file_path": "src/uart0/fr.rs", "rank": 10, "score": 62779.759135968205 }, { "content": "impl<'a> _RXFEW<'a> {\n\n #[doc = r\" Sets the field bit\"]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\" Clears the field bit\"]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n const MASK: bool = true;\n\n const OFFSET: u8 = 4;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n", "file_path": "src/uart0/fr.rs", "rank": 11, "score": 62778.7949850279 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\" Clears the field bit\"]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n const MASK: bool = true;\n\n const OFFSET: u8 = 5;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _RXFEW<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/uart0/fr.rs", "rank": 12, "score": 62775.95730517481 }, { "content": " pub fn reset(&self) {\n\n self.write(|w| w)\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct WDTVALUER {\n\n bits: u32,\n\n}\n\nimpl WDTVALUER {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _WDTVALUEW<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _WDTVALUEW<'a> {\n", "file_path": "src/wdt/value.rs", "rank": 13, "score": 62774.80805675678 }, { "content": "}\n\nimpl<'a> _CTSW<'a> {\n\n #[doc = r\" Sets the field bit\"]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\" Clears the field bit\"]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n const MASK: bool = true;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n", "file_path": "src/uart0/fr.rs", "rank": 14, "score": 62774.06930388973 }, { "content": "pub struct _BUSYW<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _BUSYW<'a> {\n\n #[doc = r\" Sets the field bit\"]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\" Clears the field bit\"]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n const MASK: bool = true;\n\n const OFFSET: u8 = 3;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n", "file_path": "src/uart0/fr.rs", "rank": 15, "score": 62773.98782221788 }, { "content": "impl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 8:31 - 31:8\\\\] Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn reserved1(&self) -> RESERVED1R {\n\n let bits = {\n\n const MASK: u32 = 16777215;\n\n const OFFSET: u8 = 8;\n\n ((self.bits >> OFFSET) & MASK as u32) as u32\n\n };\n\n RESERVED1R { bits }\n\n }\n\n #[doc = \"Bit 7 - 7:7\\\\] UART Transmit FIFO Empty: The meaning of this bit depends on the state of LCRH.FEN . - If the FIFO is disabled, this bit is set when the transmit holding register is empty. - If the FIFO is enabled, this bit is set when the transmit FIFO is empty. This bit does not indicate if there is data in the transmit shift register.\"]\n\n #[inline]\n\n pub fn txfe(&self) -> TXFER {\n\n let bits = {\n", "file_path": "src/uart0/fr.rs", "rank": 16, "score": 62770.45531899199 }, { "content": " const MASK: u32 = 16777215;\n\n const OFFSET: u8 = 8;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _TXFEW<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _TXFEW<'a> {\n\n #[doc = r\" Sets the field bit\"]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\" Clears the field bit\"]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n", "file_path": "src/uart0/fr.rs", "rank": 17, "score": 62766.806376532484 }, { "content": " }\n\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n\n #[inline]\n\n pub fn bit_is_clear(&self) -> bool {\n\n !self.bit()\n\n }\n\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n\n #[inline]\n\n pub fn bit_is_set(&self) -> bool {\n\n self.bit()\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _RESERVED1W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _RESERVED1W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n", "file_path": "src/uart0/fr.rs", "rank": 18, "score": 62765.13382181016 }, { "content": " const MASK: bool = true;\n\n const OFFSET: u8 = 3;\n\n ((self.bits >> OFFSET) & MASK as u32) != 0\n\n };\n\n BUSYR { bits }\n\n }\n\n #[doc = \"Bits 1:2 - 2:1\\\\] Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn reserved0(&self) -> RESERVED0R {\n\n let bits = {\n\n const MASK: u8 = 3;\n\n const OFFSET: u8 = 1;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n RESERVED0R { bits }\n\n }\n\n #[doc = \"Bit 0 - 0:0\\\\] Clear To Send: This bit is the complement of the active-low UART CTS input pin. That is, the bit is 1 when CTS input pin is LOW.\"]\n\n #[inline]\n\n pub fn cts(&self) -> CTSR {\n\n let bits = {\n", "file_path": "src/uart0/fr.rs", "rank": 19, "score": 62757.291624493075 }, { "content": " pub fn reset(&self) {\n\n self.write(|w| w)\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct RESERVED1R {\n\n bits: u32,\n\n}\n\nimpl RESERVED1R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct TXFER {\n\n bits: bool,\n\n}\n\nimpl TXFER {\n", "file_path": "src/uart0/fr.rs", "rank": 20, "score": 62752.35132934932 }, { "content": " const MASK: bool = true;\n\n const OFFSET: u8 = 7;\n\n ((self.bits >> OFFSET) & MASK as u32) != 0\n\n };\n\n TXFER { bits }\n\n }\n\n #[doc = \"Bit 6 - 6:6\\\\] UART Receive FIFO Full: The meaning of this bit depends on the state of LCRH.FEN. - If the FIFO is disabled, this bit is set when the receive holding register is full. - If the FIFO is enabled, this bit is set when the receive FIFO is full.\"]\n\n #[inline]\n\n pub fn rxff(&self) -> RXFFR {\n\n let bits = {\n\n const MASK: bool = true;\n\n const OFFSET: u8 = 6;\n\n ((self.bits >> OFFSET) & MASK as u32) != 0\n\n };\n\n RXFFR { bits }\n\n }\n\n #[doc = \"Bit 5 - 5:5\\\\] UART Transmit FIFO Full: Transmit FIFO full. The meaning of this bit depends on the state of LCRH.FEN. - If the FIFO is disabled, this bit is set when the transmit holding register is full. - If the FIFO is enabled, this bit is set when the transmit FIFO is full.\"]\n\n #[inline]\n\n pub fn txff(&self) -> TXFFR {\n\n let bits = {\n", "file_path": "src/uart0/fr.rs", "rank": 21, "score": 62743.41768110229 }, { "content": "#[doc = r\" Value of the field\"]\n\npub struct RESERVED0R {\n\n bits: u8,\n\n}\n\nimpl RESERVED0R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct CTSR {\n\n bits: bool,\n\n}\n\nimpl CTSR {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bit(&self) -> bool {\n\n self.bits\n", "file_path": "src/uart0/fr.rs", "rank": 22, "score": 62743.19150841744 }, { "content": " const MASK: bool = true;\n\n const OFFSET: u8 = 5;\n\n ((self.bits >> OFFSET) & MASK as u32) != 0\n\n };\n\n TXFFR { bits }\n\n }\n\n #[doc = \"Bit 4 - 4:4\\\\] UART Receive FIFO Empty: Receive FIFO empty. The meaning of this bit depends on the state of LCRH.FEN. - If the FIFO is disabled, this bit is set when the receive holding register is empty. - If the FIFO is enabled, this bit is set when the receive FIFO is empty.\"]\n\n #[inline]\n\n pub fn rxfe(&self) -> RXFER {\n\n let bits = {\n\n const MASK: bool = true;\n\n const OFFSET: u8 = 4;\n\n ((self.bits >> OFFSET) & MASK as u32) != 0\n\n };\n\n RXFER { bits }\n\n }\n\n #[doc = \"Bit 3 - 3:3\\\\] UART Busy: If this bit is set to 1, the UART is busy transmitting data. This bit remains set until the complete byte, including all the stop bits, has been sent from the shift register. This bit is set as soon as the transmit FIFO becomes non-empty, regardless of whether the UART is enabled or not.\"]\n\n #[inline]\n\n pub fn busy(&self) -> BUSYR {\n\n let bits = {\n", "file_path": "src/uart0/fr.rs", "rank": 23, "score": 62742.6796510981 }, { "content": " #[inline]\n\n pub fn rxfe(&mut self) -> _RXFEW {\n\n _RXFEW { w: self }\n\n }\n\n #[doc = \"Bit 3 - 3:3\\\\] UART Busy: If this bit is set to 1, the UART is busy transmitting data. This bit remains set until the complete byte, including all the stop bits, has been sent from the shift register. This bit is set as soon as the transmit FIFO becomes non-empty, regardless of whether the UART is enabled or not.\"]\n\n #[inline]\n\n pub fn busy(&mut self) -> _BUSYW {\n\n _BUSYW { w: self }\n\n }\n\n #[doc = \"Bits 1:2 - 2:1\\\\] Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn reserved0(&mut self) -> _RESERVED0W {\n\n _RESERVED0W { w: self }\n\n }\n\n #[doc = \"Bit 0 - 0:0\\\\] Clear To Send: This bit is the complement of the active-low UART CTS input pin. That is, the bit is 1 when CTS input pin is LOW.\"]\n\n #[inline]\n\n pub fn cts(&mut self) -> _CTSW {\n\n _CTSW { w: self }\n\n }\n\n}\n", "file_path": "src/uart0/fr.rs", "rank": 24, "score": 62739.16394511497 }, { "content": "}\n\nimpl TXFFR {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bit(&self) -> bool {\n\n self.bits\n\n }\n\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n\n #[inline]\n\n pub fn bit_is_clear(&self) -> bool {\n\n !self.bit()\n\n }\n\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n\n #[inline]\n\n pub fn bit_is_set(&self) -> bool {\n\n self.bit()\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct RXFER {\n", "file_path": "src/uart0/fr.rs", "rank": 25, "score": 62732.96729091451 }, { "content": " bits: bool,\n\n}\n\nimpl RXFER {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bit(&self) -> bool {\n\n self.bits\n\n }\n\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n\n #[inline]\n\n pub fn bit_is_clear(&self) -> bool {\n\n !self.bit()\n\n }\n\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n\n #[inline]\n\n pub fn bit_is_set(&self) -> bool {\n\n self.bit()\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n", "file_path": "src/uart0/fr.rs", "rank": 26, "score": 62732.74449766577 }, { "content": "impl RXFFR {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bit(&self) -> bool {\n\n self.bits\n\n }\n\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n\n #[inline]\n\n pub fn bit_is_clear(&self) -> bool {\n\n !self.bit()\n\n }\n\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n\n #[inline]\n\n pub fn bit_is_set(&self) -> bool {\n\n self.bit()\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct TXFFR {\n\n bits: bool,\n", "file_path": "src/uart0/fr.rs", "rank": 27, "score": 62732.72525261569 }, { "content": "pub struct BUSYR {\n\n bits: bool,\n\n}\n\nimpl BUSYR {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bit(&self) -> bool {\n\n self.bits\n\n }\n\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n\n #[inline]\n\n pub fn bit_is_clear(&self) -> bool {\n\n !self.bit()\n\n }\n\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n\n #[inline]\n\n pub fn bit_is_set(&self) -> bool {\n\n self.bit()\n\n }\n\n}\n", "file_path": "src/uart0/fr.rs", "rank": 28, "score": 62731.70596210931 }, { "content": " #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bit(&self) -> bool {\n\n self.bits\n\n }\n\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n\n #[inline]\n\n pub fn bit_is_clear(&self) -> bool {\n\n !self.bit()\n\n }\n\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n\n #[inline]\n\n pub fn bit_is_set(&self) -> bool {\n\n self.bit()\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct RXFFR {\n\n bits: bool,\n\n}\n", "file_path": "src/uart0/fr.rs", "rank": 29, "score": 62727.81225816912 }, { "content": " #[inline]\n\n pub fn reserved1(&mut self) -> _RESERVED1W {\n\n _RESERVED1W { w: self }\n\n }\n\n #[doc = \"Bit 7 - 7:7\\\\] UART Transmit FIFO Empty: The meaning of this bit depends on the state of LCRH.FEN . - If the FIFO is disabled, this bit is set when the transmit holding register is empty. - If the FIFO is enabled, this bit is set when the transmit FIFO is empty. This bit does not indicate if there is data in the transmit shift register.\"]\n\n #[inline]\n\n pub fn txfe(&mut self) -> _TXFEW {\n\n _TXFEW { w: self }\n\n }\n\n #[doc = \"Bit 6 - 6:6\\\\] UART Receive FIFO Full: The meaning of this bit depends on the state of LCRH.FEN. - If the FIFO is disabled, this bit is set when the receive holding register is full. - If the FIFO is enabled, this bit is set when the receive FIFO is full.\"]\n\n #[inline]\n\n pub fn rxff(&mut self) -> _RXFFW {\n\n _RXFFW { w: self }\n\n }\n\n #[doc = \"Bit 5 - 5:5\\\\] UART Transmit FIFO Full: Transmit FIFO full. The meaning of this bit depends on the state of LCRH.FEN. - If the FIFO is disabled, this bit is set when the transmit holding register is full. - If the FIFO is enabled, this bit is set when the transmit FIFO is full.\"]\n\n #[inline]\n\n pub fn txff(&mut self) -> _TXFFW {\n\n _TXFFW { w: self }\n\n }\n\n #[doc = \"Bit 4 - 4:4\\\\] UART Receive FIFO Empty: Receive FIFO empty. The meaning of this bit depends on the state of LCRH.FEN. - If the FIFO is disabled, this bit is set when the receive holding register is empty. - If the FIFO is enabled, this bit is set when the receive FIFO is empty.\"]\n", "file_path": "src/uart0/fr.rs", "rank": 30, "score": 62726.44839956903 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::RTC_OFFSET {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ccfg/rtc_offset.rs", "rank": 31, "score": 60581.50866556378 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::FREQ_OFFSET {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ccfg/freq_offset.rs", "rank": 32, "score": 60581.50866556378 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::FREQ_OFFSET {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fcfg1/freq_offset.rs", "rank": 33, "score": 60581.50866556378 }, { "content": " }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 16:31 - 31:16\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n\n pub fn hposc_comp_p0(&self) -> HPOSC_COMP_P0R {\n\n let bits = {\n\n const MASK: u16 = 65535;\n\n const OFFSET: u8 = 16;\n\n ((self.bits >> OFFSET) & MASK as u32) as u16\n\n };\n\n HPOSC_COMP_P0R { bits }\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n", "file_path": "src/fcfg1/freq_offset.rs", "rank": 34, "score": 60580.10011305303 }, { "content": " #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 0 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 16:31 - 31:16\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n\n pub fn hposc_comp_p0(&mut self) -> _HPOSC_COMP_P0W {\n\n _HPOSC_COMP_P0W { w: self }\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n\n pub fn hposc_comp_p1(&mut self) -> _HPOSC_COMP_P1W {\n\n _HPOSC_COMP_P1W { w: self }\n\n }\n\n #[doc = \"Bits 0:7 - 7:0\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n\n pub fn hposc_comp_p2(&mut self) -> _HPOSC_COMP_P2W {\n\n _HPOSC_COMP_P2W { w: self }\n\n }\n\n}\n", "file_path": "src/fcfg1/freq_offset.rs", "rank": 35, "score": 60576.72391996416 }, { "content": "}\n\nimpl<'a> _HPOSC_COMP_P0W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u16) -> &'a mut W {\n\n const MASK: u16 = 65535;\n\n const OFFSET: u8 = 16;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _HPOSC_COMP_P1W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _HPOSC_COMP_P1W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "src/fcfg1/freq_offset.rs", "rank": 36, "score": 60572.18426952765 }, { "content": "}\n\nimpl<'a> _RTC_COMP_P0W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u16) -> &'a mut W {\n\n const MASK: u16 = 65535;\n\n const OFFSET: u8 = 16;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _RTC_COMP_P1W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _RTC_COMP_P1W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "src/ccfg/rtc_offset.rs", "rank": 37, "score": 60572.18426952765 }, { "content": "}\n\nimpl<'a> _HF_COMP_P0W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u16) -> &'a mut W {\n\n const MASK: u16 = 65535;\n\n const OFFSET: u8 = 16;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _HF_COMP_P1W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _HF_COMP_P1W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "src/ccfg/freq_offset.rs", "rank": 38, "score": 60572.18426952765 }, { "content": " }\n\n #[doc = r\" Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&self) -> R {\n\n R {\n\n bits: self.register.get(),\n\n }\n\n }\n\n #[doc = r\" Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W::reset_value();\n\n f(&mut w);\n\n self.register.set(w.bits);\n\n }\n\n #[doc = r\" Writes the reset value to the register\"]\n\n #[inline]\n", "file_path": "src/ccfg/freq_offset.rs", "rank": 39, "score": 60571.159320833656 }, { "content": " }\n\n #[doc = r\" Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&self) -> R {\n\n R {\n\n bits: self.register.get(),\n\n }\n\n }\n\n #[doc = r\" Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W::reset_value();\n\n f(&mut w);\n\n self.register.set(w.bits);\n\n }\n\n #[doc = r\" Writes the reset value to the register\"]\n\n #[inline]\n", "file_path": "src/ccfg/rtc_offset.rs", "rank": 40, "score": 60571.159320833656 }, { "content": " }\n\n #[doc = r\" Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&self) -> R {\n\n R {\n\n bits: self.register.get(),\n\n }\n\n }\n\n #[doc = r\" Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W::reset_value();\n\n f(&mut w);\n\n self.register.set(w.bits);\n\n }\n\n #[doc = r\" Writes the reset value to the register\"]\n\n #[inline]\n", "file_path": "src/fcfg1/freq_offset.rs", "rank": 41, "score": 60571.159320833656 }, { "content": " const MASK: u8 = 255;\n\n const OFFSET: u8 = 8;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _RTC_COMP_P2W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _RTC_COMP_P2W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n", "file_path": "src/ccfg/rtc_offset.rs", "rank": 42, "score": 60569.09042612349 }, { "content": " const MASK: u8 = 255;\n\n const OFFSET: u8 = 8;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _HF_COMP_P2W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _HF_COMP_P2W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n", "file_path": "src/ccfg/freq_offset.rs", "rank": 43, "score": 60569.09042612349 }, { "content": " const MASK: u8 = 255;\n\n const OFFSET: u8 = 8;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _HPOSC_COMP_P2W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _HPOSC_COMP_P2W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n", "file_path": "src/fcfg1/freq_offset.rs", "rank": 44, "score": 60569.09042612349 }, { "content": " pub fn hposc_comp_p1(&self) -> HPOSC_COMP_P1R {\n\n let bits = {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 8;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n HPOSC_COMP_P1R { bits }\n\n }\n\n #[doc = \"Bits 0:7 - 7:0\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n\n pub fn hposc_comp_p2(&self) -> HPOSC_COMP_P2R {\n\n let bits = {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n HPOSC_COMP_P2R { bits }\n\n }\n\n}\n\nimpl W {\n", "file_path": "src/fcfg1/freq_offset.rs", "rank": 45, "score": 60564.048840839234 }, { "content": " }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 16:31 - 31:16\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn rtc_comp_p0(&self) -> RTC_COMP_P0R {\n\n let bits = {\n\n const MASK: u16 = 65535;\n\n const OFFSET: u8 = 16;\n\n ((self.bits >> OFFSET) & MASK as u32) as u16\n\n };\n\n RTC_COMP_P0R { bits }\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n", "file_path": "src/ccfg/rtc_offset.rs", "rank": 46, "score": 60561.16353857706 }, { "content": " }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 16:31 - 31:16\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn hf_comp_p0(&self) -> HF_COMP_P0R {\n\n let bits = {\n\n const MASK: u16 = 65535;\n\n const OFFSET: u8 = 16;\n\n ((self.bits >> OFFSET) & MASK as u32) as u16\n\n };\n\n HF_COMP_P0R { bits }\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n", "file_path": "src/ccfg/freq_offset.rs", "rank": 47, "score": 60561.16353857706 }, { "content": " pub fn hf_comp_p1(&self) -> HF_COMP_P1R {\n\n let bits = {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 8;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n HF_COMP_P1R { bits }\n\n }\n\n #[doc = \"Bits 0:7 - 7:0\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn hf_comp_p2(&self) -> HF_COMP_P2R {\n\n let bits = {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n HF_COMP_P2R { bits }\n\n }\n\n}\n\nimpl W {\n", "file_path": "src/ccfg/freq_offset.rs", "rank": 48, "score": 60556.63212949514 }, { "content": " pub fn rtc_comp_p1(&self) -> RTC_COMP_P1R {\n\n let bits = {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 8;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n RTC_COMP_P1R { bits }\n\n }\n\n #[doc = \"Bits 0:7 - 7:0\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn rtc_comp_p2(&self) -> RTC_COMP_P2R {\n\n let bits = {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n RTC_COMP_P2R { bits }\n\n }\n\n}\n\nimpl W {\n", "file_path": "src/ccfg/rtc_offset.rs", "rank": 49, "score": 60556.63212949514 }, { "content": " #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 4294967295 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 16:31 - 31:16\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn rtc_comp_p0(&mut self) -> _RTC_COMP_P0W {\n\n _RTC_COMP_P0W { w: self }\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn rtc_comp_p1(&mut self) -> _RTC_COMP_P1W {\n\n _RTC_COMP_P1W { w: self }\n\n }\n\n #[doc = \"Bits 0:7 - 7:0\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn rtc_comp_p2(&mut self) -> _RTC_COMP_P2W {\n\n _RTC_COMP_P2W { w: self }\n\n }\n\n}\n", "file_path": "src/ccfg/rtc_offset.rs", "rank": 50, "score": 60551.07471497755 }, { "content": " #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 4294967295 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 16:31 - 31:16\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn hf_comp_p0(&mut self) -> _HF_COMP_P0W {\n\n _HF_COMP_P0W { w: self }\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn hf_comp_p1(&mut self) -> _HF_COMP_P1W {\n\n _HF_COMP_P1W { w: self }\n\n }\n\n #[doc = \"Bits 0:7 - 7:0\\\\] Reserved for future use. Software should not rely on the value of a reserved. Writing any other value than the reset/default value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn hf_comp_p2(&mut self) -> _HF_COMP_P2W {\n\n _HF_COMP_P2W { w: self }\n\n }\n\n}\n", "file_path": "src/ccfg/freq_offset.rs", "rank": 51, "score": 60551.07471497755 }, { "content": " #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct HF_COMP_P2R {\n\n bits: u8,\n\n}\n\nimpl HF_COMP_P2R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _HF_COMP_P0W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/ccfg/freq_offset.rs", "rank": 52, "score": 60545.530386452 }, { "content": " #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct HPOSC_COMP_P2R {\n\n bits: u8,\n\n}\n\nimpl HPOSC_COMP_P2R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _HPOSC_COMP_P0W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/fcfg1/freq_offset.rs", "rank": 53, "score": 60545.530386452 }, { "content": " #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct RTC_COMP_P2R {\n\n bits: u8,\n\n}\n\nimpl RTC_COMP_P2R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _RTC_COMP_P0W<'a> {\n\n w: &'a mut W,\n", "file_path": "src/ccfg/rtc_offset.rs", "rank": 54, "score": 60545.530386452 }, { "content": " pub fn reset(&self) {\n\n self.write(|w| w)\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct HF_COMP_P0R {\n\n bits: u16,\n\n}\n\nimpl HF_COMP_P0R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u16 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct HF_COMP_P1R {\n\n bits: u8,\n\n}\n\nimpl HF_COMP_P1R {\n", "file_path": "src/ccfg/freq_offset.rs", "rank": 55, "score": 60536.40277048024 }, { "content": " pub fn reset(&self) {\n\n self.write(|w| w)\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct RTC_COMP_P0R {\n\n bits: u16,\n\n}\n\nimpl RTC_COMP_P0R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u16 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct RTC_COMP_P1R {\n\n bits: u8,\n\n}\n\nimpl RTC_COMP_P1R {\n", "file_path": "src/ccfg/rtc_offset.rs", "rank": 56, "score": 60536.40277048024 }, { "content": " pub fn reset(&self) {\n\n self.write(|w| w)\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct HPOSC_COMP_P0R {\n\n bits: u16,\n\n}\n\nimpl HPOSC_COMP_P0R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u16 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct HPOSC_COMP_P1R {\n\n bits: u8,\n\n}\n\nimpl HPOSC_COMP_P1R {\n", "file_path": "src/fcfg1/freq_offset.rs", "rank": 57, "score": 60536.40277048024 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::CCFG_TI_OPTIONS {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/ccfg/ccfg_ti_options.rs", "rank": 58, "score": 58529.64478381001 }, { "content": " const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n TI_FA_ENABLER { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 4294967237 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 0:7 - 7:0\\\\] TI Failure Analysis. 0xC5: Enable the functionality of unlocking the TI FA (TI Failure Analysis) option with the unlock code. All other values: Disable the functionality of unlocking the TI FA option with the unlock code.\"]\n\n #[inline]\n\n pub fn ti_fa_enable(&mut self) -> _TI_FA_ENABLEW {\n\n _TI_FA_ENABLEW { w: self }\n\n }\n\n}\n", "file_path": "src/ccfg/ccfg_ti_options.rs", "rank": 59, "score": 58522.26529101018 }, { "content": " #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 0:7 - 7:0\\\\] TI Failure Analysis. 0xC5: Enable the functionality of unlocking the TI FA (TI Failure Analysis) option with the unlock code. All other values: Disable the functionality of unlocking the TI FA option with the unlock code.\"]\n\n #[inline]\n\n pub fn ti_fa_enable(&self) -> TI_FA_ENABLER {\n\n let bits = {\n", "file_path": "src/ccfg/ccfg_ti_options.rs", "rank": 60, "score": 58520.822520871225 }, { "content": " }\n\n #[doc = r\" Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&self) -> R {\n\n R {\n\n bits: self.register.get(),\n\n }\n\n }\n\n #[doc = r\" Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W::reset_value();\n\n f(&mut w);\n\n self.register.set(w.bits);\n\n }\n\n #[doc = r\" Writes the reset value to the register\"]\n\n #[inline]\n", "file_path": "src/ccfg/ccfg_ti_options.rs", "rank": 61, "score": 58519.54418641652 }, { "content": " pub fn reset(&self) {\n\n self.write(|w| w)\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct TI_FA_ENABLER {\n\n bits: u8,\n\n}\n\nimpl TI_FA_ENABLER {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _TI_FA_ENABLEW<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _TI_FA_ENABLEW<'a> {\n", "file_path": "src/ccfg/ccfg_ti_options.rs", "rank": 62, "score": 58503.04727710166 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SOC_ADC_OFFSET_INT {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 63, "score": 56591.60368590759 }, { "content": " pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 24:31 - 31:24\\\\] Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn reserved24(&self) -> RESERVED24R {\n\n let bits = {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 24;\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 64, "score": 56586.23916509572 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> _RESERVED8W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 8;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _SOC_ADC_ABS_OFFSET_TEMP1W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _SOC_ADC_ABS_OFFSET_TEMP1W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 65, "score": 56582.84795223963 }, { "content": "}\n\nimpl SOC_ADC_ABS_OFFSET_TEMP1R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _RESERVED24W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _RESERVED24W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 24;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 66, "score": 56582.29229954328 }, { "content": " }\n\n #[doc = r\" Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&self) -> R {\n\n R {\n\n bits: self.register.get(),\n\n }\n\n }\n\n #[doc = r\" Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W::reset_value();\n\n f(&mut w);\n\n self.register.set(w.bits);\n\n }\n\n #[doc = r\" Writes the reset value to the register\"]\n\n #[inline]\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 67, "score": 56582.1475821773 }, { "content": " self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _SOC_ADC_REL_OFFSET_TEMP1W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _SOC_ADC_REL_OFFSET_TEMP1W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 16;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _RESERVED8W<'a> {\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 68, "score": 56579.88614746306 }, { "content": " ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n RESERVED8R { bits }\n\n }\n\n #[doc = \"Bits 0:7 - 7:0\\\\] SOC_ADC offset in absolute reference mode at temperature 1 (30C). Signed 8-bit number. Calculated in production test..\"]\n\n #[inline]\n\n pub fn soc_adc_abs_offset_temp1(&self) -> SOC_ADC_ABS_OFFSET_TEMP1R {\n\n let bits = {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n SOC_ADC_ABS_OFFSET_TEMP1R { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 0 }\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 69, "score": 56561.04246287019 }, { "content": " }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 24:31 - 31:24\\\\] Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn reserved24(&mut self) -> _RESERVED24W {\n\n _RESERVED24W { w: self }\n\n }\n\n #[doc = \"Bits 16:23 - 23:16\\\\] SOC_ADC offset in relative reference mode at temperature 1 (30C). Signed 8-bit number. Calculated in production test..\"]\n\n #[inline]\n\n pub fn soc_adc_rel_offset_temp1(&mut self) -> _SOC_ADC_REL_OFFSET_TEMP1W {\n\n _SOC_ADC_REL_OFFSET_TEMP1W { w: self }\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\] Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn reserved8(&mut self) -> _RESERVED8W {\n\n _RESERVED8W { w: self }\n\n }\n\n #[doc = \"Bits 0:7 - 7:0\\\\] SOC_ADC offset in absolute reference mode at temperature 1 (30C). Signed 8-bit number. Calculated in production test..\"]\n\n #[inline]\n\n pub fn soc_adc_abs_offset_temp1(&mut self) -> _SOC_ADC_ABS_OFFSET_TEMP1W {\n\n _SOC_ADC_ABS_OFFSET_TEMP1W { w: self }\n\n }\n\n}\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 70, "score": 56551.501344490884 }, { "content": " pub fn reset(&self) {\n\n self.write(|w| w)\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct RESERVED24R {\n\n bits: u8,\n\n}\n\nimpl RESERVED24R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct SOC_ADC_REL_OFFSET_TEMP1R {\n\n bits: u8,\n\n}\n\nimpl SOC_ADC_REL_OFFSET_TEMP1R {\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 71, "score": 56549.40056967286 }, { "content": " ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n RESERVED24R { bits }\n\n }\n\n #[doc = \"Bits 16:23 - 23:16\\\\] SOC_ADC offset in relative reference mode at temperature 1 (30C). Signed 8-bit number. Calculated in production test..\"]\n\n #[inline]\n\n pub fn soc_adc_rel_offset_temp1(&self) -> SOC_ADC_REL_OFFSET_TEMP1R {\n\n let bits = {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 16;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n SOC_ADC_REL_OFFSET_TEMP1R { bits }\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\] Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn reserved8(&self) -> RESERVED8R {\n\n let bits = {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 8;\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 72, "score": 56548.26868441108 }, { "content": " #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct RESERVED8R {\n\n bits: u8,\n\n}\n\nimpl RESERVED8R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct SOC_ADC_ABS_OFFSET_TEMP1R {\n\n bits: u8,\n", "file_path": "src/fcfg1/soc_adc_offset_int.rs", "rank": 73, "score": 56539.54443666811 }, { "content": "#[doc = r\" Value read from the register\"]\n\npub struct R {\n\n bits: u32,\n\n}\n\n#[doc = r\" Value to write to the register\"]\n\npub struct W {\n\n bits: u32,\n\n}\n\nimpl super::SOC_ADC_REF_TRIM_AND_OFFSET_EXT {\n\n #[doc = r\" Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let bits = self.register.get();\n\n let r = R { bits: bits };\n\n let mut w = W { bits: bits };\n\n f(&r, &mut w);\n\n self.register.set(w.bits);\n", "file_path": "src/fcfg1/soc_adc_ref_trim_and_offset_ext.rs", "rank": 74, "score": 53095.37193943984 }, { "content": "}\n\n#[doc = r\" Proxy\"]\n\npub struct _SOC_ADC_REF_VOLTAGE_TRIM_TEMP1W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _SOC_ADC_REF_VOLTAGE_TRIM_TEMP1W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 63;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n", "file_path": "src/fcfg1/soc_adc_ref_trim_and_offset_ext.rs", "rank": 75, "score": 53094.587734653374 }, { "content": " #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Proxy\"]\n\npub struct _RESERVED6W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> _RESERVED6W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n\n const MASK: u32 = 67108863;\n\n const OFFSET: u8 = 6;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n", "file_path": "src/fcfg1/soc_adc_ref_trim_and_offset_ext.rs", "rank": 76, "score": 53087.38902917038 }, { "content": " }\n\n #[doc = r\" Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&self) -> R {\n\n R {\n\n bits: self.register.get(),\n\n }\n\n }\n\n #[doc = r\" Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W::reset_value();\n\n f(&mut w);\n\n self.register.set(w.bits);\n\n }\n\n #[doc = r\" Writes the reset value to the register\"]\n\n #[inline]\n", "file_path": "src/fcfg1/soc_adc_ref_trim_and_offset_ext.rs", "rank": 77, "score": 53086.791039509204 }, { "content": " SOC_ADC_REF_VOLTAGE_TRIM_TEMP1R { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 49280 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 6:31 - 31:6\\\\] Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn reserved6(&mut self) -> _RESERVED6W {\n\n _RESERVED6W { w: self }\n\n }\n\n #[doc = \"Bits 0:5 - 5:0\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n\n pub fn soc_adc_ref_voltage_trim_temp1(&mut self) -> _SOC_ADC_REF_VOLTAGE_TRIM_TEMP1W {\n\n _SOC_ADC_REF_VOLTAGE_TRIM_TEMP1W { w: self }\n\n }\n\n}\n", "file_path": "src/fcfg1/soc_adc_ref_trim_and_offset_ext.rs", "rank": 78, "score": 53086.353973820915 }, { "content": " self.bits\n\n }\n\n #[doc = \"Bits 6:31 - 31:6\\\\] Software should not rely on the value of a reserved. Writing any other value than the reset value may result in undefined behavior.\"]\n\n #[inline]\n\n pub fn reserved6(&self) -> RESERVED6R {\n\n let bits = {\n\n const MASK: u32 = 67108863;\n\n const OFFSET: u8 = 6;\n\n ((self.bits >> OFFSET) & MASK as u32) as u32\n\n };\n\n RESERVED6R { bits }\n\n }\n\n #[doc = \"Bits 0:5 - 5:0\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n\n pub fn soc_adc_ref_voltage_trim_temp1(&self) -> SOC_ADC_REF_VOLTAGE_TRIM_TEMP1R {\n\n let bits = {\n\n const MASK: u8 = 63;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n", "file_path": "src/fcfg1/soc_adc_ref_trim_and_offset_ext.rs", "rank": 79, "score": 53078.7144106889 }, { "content": " pub fn reset(&self) {\n\n self.write(|w| w)\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct RESERVED6R {\n\n bits: u32,\n\n}\n\nimpl RESERVED6R {\n\n #[doc = r\" Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n}\n\n#[doc = r\" Value of the field\"]\n\npub struct SOC_ADC_REF_VOLTAGE_TRIM_TEMP1R {\n\n bits: u8,\n\n}\n\nimpl SOC_ADC_REF_VOLTAGE_TRIM_TEMP1R {\n", "file_path": "src/fcfg1/soc_adc_ref_trim_and_offset_ext.rs", "rank": 80, "score": 53051.11255952837 }, { "content": " const MASK: u8 = 31;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n EFUSEDAYR { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 0 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 25:31 - 31:25\\\\] Internal. Only to be used through TI provided API.\"]\n", "file_path": "src/flash/efuserelease.rs", "rank": 81, "score": 123.67765597254373 }, { "content": " const MASK: u8 = 63;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n RESERVED0R { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 3072 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 12:31 - 31:12\\\\] Internal. Only to be used through TI provided API.\"]\n", "file_path": "src/flash/fsm_state.rs", "rank": 82, "score": 123.67765597254377 }, { "content": " const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n VINHR { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 524289 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 28:31 - 31:28\\\\] Internal. Only to be used through TI provided API.\"]\n", "file_path": "src/fcfg1/flash_vhv_pv.rs", "rank": 83, "score": 123.67765597254375 }, { "content": " const MASK: u8 = 15;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n VHVCT_PVR { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 8650888 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 24:31 - 31:24\\\\] Internal. Only to be used through TI provided API.\"]\n", "file_path": "src/flash/fvhvct1.rs", "rank": 84, "score": 123.04955623764432 }, { "content": " const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n SEQ_PUMPR { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 2231894016 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 28:31 - 31:28\\\\] Internal. Only to be used through TI provided API.\"]\n", "file_path": "src/flash/fseqpmp.rs", "rank": 85, "score": 123.04955623764432 }, { "content": " const MASK: u8 = 63;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n HPMRAMP1_THR { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 4286284430 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 24:31 - 31:24\\\\] Internal. Only to be used through TI provided API.\"]\n", "file_path": "src/fcfg1/ampcomp_th1.rs", "rank": 86, "score": 123.04955623764434 }, { "content": " #[doc = \"Bits 0:1 - 1:0\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n\n pub fn pumppwr(&self) -> PUMPPWRR {\n\n let bits = {\n\n const MASK: u8 = 3;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n PUMPPWRR { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 34087041 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n", "file_path": "src/flash/fpac1.rs", "rank": 87, "score": 122.8588241943066 }, { "content": " #[doc = \"Bits 0:3 - 3:0\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n\n pub fn inputenable(&self) -> INPUTENABLER {\n\n let bits = {\n\n const MASK: u8 = 15;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n INPUTENABLER { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 0 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n", "file_path": "src/flash/boundary.rs", "rank": 88, "score": 122.8588241943066 }, { "content": "}\n\nimpl<'a> _RESERVED0W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 3;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 26:31 - 31:26\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n", "file_path": "src/fcfg1/ampcomp_th2.rs", "rank": 89, "score": 122.61722335868598 }, { "content": "}\n\nimpl<'a> _SPARE0W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 3;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 26:31 - 31:26\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n", "file_path": "src/aux_ddi0_osc/ampcompth2.rs", "rank": 90, "score": 122.61722335868599 }, { "content": "}\n\nimpl<'a> _RESERVED0W<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 3;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 25:31 - 31:25\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n", "file_path": "src/flash/fbstrobes.rs", "rank": 91, "score": 122.61722335868599 }, { "content": " const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n TEST_PROGRAM_REVR { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 50688 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 28:31 - 31:28\\\\] Internal. Only to be used through TI provided API.\"]\n", "file_path": "src/fcfg1/misc_otp_data.rs", "rank": 92, "score": 122.42843509488071 }, { "content": "}\n\nimpl<'a> _B0_TYPEW<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 15;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 28:31 - 31:28\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 93, "score": 122.00347249796856 }, { "content": "}\n\nimpl<'a> _TRIMBOD_HW<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 31;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 29:31 - 31:29\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n", "file_path": "src/fcfg1/volt_trim.rs", "rank": 94, "score": 122.00347249796859 }, { "content": "}\n\nimpl<'a> _VHV_EW<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 15;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 28:31 - 31:28\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n", "file_path": "src/fcfg1/flash_vhv.rs", "rank": 95, "score": 122.00347249796859 }, { "content": "}\n\nimpl<'a> _EV_SIGNALSW<'a> {\n\n #[doc = r\" Writes raw bits to the field\"]\n\n #[inline]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n const MASK: u8 = 255;\n\n const OFFSET: u8 = 0;\n\n self.w.bits &= !((MASK as u32) << OFFSET);\n\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = r\" Value of the register as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u32 {\n\n self.bits\n\n }\n\n #[doc = \"Bits 18:31 - 31:18\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n", "file_path": "src/aux_sce/wustat.rs", "rank": 96, "score": 122.0034724979686 }, { "content": " const MASK: u8 = 7;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u8\n\n };\n\n VIN_AT_X_EXT_RDR { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 2560139167 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bit 31 - 31:31\\\\] Internal. Only to be used through TI provided API.\"]\n", "file_path": "src/fcfg1/flash_otp_data4.rs", "rank": 97, "score": 121.81416958126248 }, { "content": " const MASK: u32 = 4294967295;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) as u32\n\n };\n\n RAW_ECCR { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 0 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 0:31 - 31:0\\\\] Internal. Only to be used through TI provided API.\"]\n\n #[inline]\n\n pub fn raw_ecc(&mut self) -> _RAW_ECCW {\n\n _RAW_ECCW { w: self }\n\n }\n\n}\n", "file_path": "src/flash/fraw_ecc.rs", "rank": 98, "score": 121.64015439881037 }, { "content": " const MASK: bool = true;\n\n const OFFSET: u8 = 0;\n\n ((self.bits >> OFFSET) & MASK as u32) != 0\n\n };\n\n RM0R { bits }\n\n }\n\n}\n\nimpl W {\n\n #[doc = r\" Reset value of the register\"]\n\n #[inline]\n\n pub fn reset_value() -> W {\n\n W { bits: 0 }\n\n }\n\n #[doc = r\" Writes raw bits to the register\"]\n\n #[inline]\n\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n\n self.bits = bits;\n\n self\n\n }\n\n #[doc = \"Bits 16:31 - 31:16\\\\] Internal. Only to be used through TI provided API.\"]\n", "file_path": "src/flash/fsprd.rs", "rank": 99, "score": 121.41865221895098 } ]
Rust
rust/xaynet-server/src/storage/s3.rs
abargiela/xaynet
2c87c5fd3f02f68dc9aefaaeb3a0371afa005722
use crate::settings::{S3BucketsSettings, S3Settings}; use rusoto_core::{credential::StaticProvider, request::TlsError, HttpClient, RusotoError}; use rusoto_s3::{ CreateBucketError, CreateBucketOutput, CreateBucketRequest, DeleteObjectsError, ListObjectsV2Error, PutObjectError, PutObjectOutput, PutObjectRequest, S3Client, StreamingBody, S3, }; use std::sync::Arc; use thiserror::Error; use xaynet_core::mask::Model; type S3Result<T> = Result<T, S3Error>; #[derive(Debug, Error)] pub enum S3Error { #[error("upload error: {0}")] Upload(#[from] RusotoError<PutObjectError>), #[error("create bucket error: {0}")] CreateBucket(#[from] RusotoError<CreateBucketError>), #[error("list objects error: {0}")] ListObjects(#[from] RusotoError<ListObjectsV2Error>), #[error("delete objects error: {0}")] DeleteObjects(#[from] RusotoError<DeleteObjectsError>), #[error("serialization failed")] Serialization(#[from] bincode::Error), #[error("empty response error")] EmptyResponse, #[error(transparent)] HttpClient(#[from] TlsError), } #[derive(Clone)] pub struct Client { buckets: Arc<S3BucketsSettings>, s3_client: S3Client, } #[cfg(test)] impl std::fmt::Debug for Client { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Client") .field("buckets", &self.buckets) .finish() } } impl Client { pub fn new(settings: S3Settings) -> S3Result<Self> { let credentials_provider = StaticProvider::new_minimal(settings.access_key, settings.secret_access_key); let dispatcher = HttpClient::new()?; Ok(Self { buckets: Arc::new(settings.buckets), s3_client: S3Client::new_with(dispatcher, credentials_provider, settings.region), }) } pub async fn upload_global_model(&self, key: &str, global_model: &Model) -> S3Result<()> { debug!("store global model: {}", key); let data = bincode::serialize(global_model)?; self.upload(&self.buckets.global_models, key, data) .await .map_err(From::from) .map(|_| ()) } pub async fn create_global_models_bucket(&self) -> S3Result<()> { debug!("create global-models bucket"); match self.create_bucket("global-models").await { Ok(_) | Err(RusotoError::Service(CreateBucketError::BucketAlreadyExists(_))) | Err(RusotoError::Service(CreateBucketError::BucketAlreadyOwnedByYou(_))) => Ok(()), Err(err) => Err(S3Error::from(err)), } } async fn upload( &self, bucket: &str, key: &str, data: Vec<u8>, ) -> Result<PutObjectOutput, RusotoError<PutObjectError>> { let req = PutObjectRequest { bucket: bucket.to_string(), key: key.to_string(), body: Some(StreamingBody::from(data)), ..Default::default() }; self.s3_client.put_object(req).await } async fn create_bucket( &self, bucket: &str, ) -> Result<CreateBucketOutput, RusotoError<CreateBucketError>> { let req = CreateBucketRequest { bucket: bucket.to_string(), ..Default::default() }; self.s3_client.create_bucket(req).await } } #[cfg(test)] pub(in crate) mod tests { use super::*; use crate::storage::tests::create_global_model; use rusoto_core::Region; use rusoto_s3::{ Delete, DeleteObjectsOutput, DeleteObjectsRequest, GetObjectOutput, GetObjectRequest, ListObjectsV2Output, ListObjectsV2Request, ObjectIdentifier, }; use serial_test::serial; use tokio::io::AsyncReadExt; use xaynet_core::{common::RoundSeed, crypto::ByteObject}; impl Client { pub async fn clear_bucket(&self, bucket: &str) -> S3Result<()> { let mut continuation_token: Option<String> = None; loop { let list_obj_resp = self.list_objects(bucket, continuation_token).await?; if let Some(identifiers) = Self::unpack_object_identifier(&list_obj_resp) { self.delete_objects(bucket, identifiers).await?; } else { break; } continuation_token = Self::unpack_next_continuation_token(&list_obj_resp); if continuation_token.is_none() { break; } } Ok(()) } pub async fn download_global_model(&self, key: &str) -> Model { debug!("get global model {:?}", key); let object = self.download_object(&self.buckets.global_models, key).await; let content = Self::unpack_object(object).await.expect("unpack error"); bincode::deserialize(&content).expect("deserialization error") } fn unpack_object_identifier( list_obj_resp: &ListObjectsV2Output, ) -> Option<Vec<ObjectIdentifier>> { if let Some(objects) = &list_obj_resp.contents { let keys = objects .iter() .filter_map(|obj| obj.key.clone()) .map(|key| ObjectIdentifier { key, ..Default::default() }) .collect(); Some(keys) } else { None } } async fn delete_objects( &self, bucket: &str, identifiers: Vec<ObjectIdentifier>, ) -> Result<DeleteObjectsOutput, RusotoError<DeleteObjectsError>> { let req = DeleteObjectsRequest { bucket: bucket.to_string(), delete: Delete { objects: identifiers, ..Default::default() }, ..Default::default() }; self.s3_client.delete_objects(req).await.map_err(From::from) } async fn list_objects( &self, bucket: &str, continuation_token: Option<String>, ) -> Result<ListObjectsV2Output, RusotoError<ListObjectsV2Error>> { let req = ListObjectsV2Request { bucket: bucket.to_string(), continuation_token, max_keys: Some(1000), ..Default::default() }; self.s3_client .list_objects_v2(req) .await .map_err(From::from) } fn unpack_next_continuation_token(list_obj_resp: &ListObjectsV2Output) -> Option<String> { if let Some(is_truncated) = list_obj_resp.is_truncated { if is_truncated { list_obj_resp.next_continuation_token.clone() } else { None } } else { None } } async fn unpack_object(object: GetObjectOutput) -> S3Result<Vec<u8>> { let mut content = Vec::new(); object .body .ok_or(S3Error::EmptyResponse)? .into_async_read() .read_to_end(&mut content) .await .map_err(|_| S3Error::EmptyResponse)?; Ok(content) } async fn download_object(&self, bucket: &str, key: &str) -> GetObjectOutput { let req = GetObjectRequest { bucket: bucket.to_string(), key: key.to_string(), ..Default::default() }; self.s3_client .get_object(req) .await .expect("download error") } } fn create_minio_setup() -> S3Settings { let region = Region::Custom { name: String::from("minio"), endpoint: String::from("http://localhost:9000"), }; S3Settings { region, access_key: String::from("minio"), secret_access_key: String::from("minio123"), buckets: S3BucketsSettings::default(), } } pub async fn create_client() -> Client { let settings = create_minio_setup(); let client = Client::new(settings).unwrap(); client.create_global_models_bucket().await.unwrap(); client.clear_bucket("global-models").await.unwrap(); client } #[tokio::test] #[serial] async fn integration_test_upload_global_model() { let client = create_client().await; let global_model = create_global_model(10); let round_seed = hex::encode(RoundSeed::generate().as_slice()); let res = client .upload_global_model(&format!("{}_{}", 1, round_seed), &global_model) .await; assert!(res.is_ok()) } }
use crate::settings::{S3BucketsSettings, S3Settings}; use rusoto_core::{credential::StaticProvider, request::TlsError, HttpClient, RusotoError}; use rusoto_s3::{ CreateBucketError, CreateBucketOutput, CreateBucketRequest, DeleteObjectsError, ListObjectsV2Error, PutObjectError, PutObjectOutput, PutObjectRequest, S3Client, StreamingBody, S3, }; use std::sync::Arc; use thiserror::Error; use xaynet_core::mask::Model; type S3Result<T> = Result<T, S3Error>; #[derive(Debug, Error)] pub enum S3Error { #[error("upload error: {0}")] Upload(#[from] RusotoError<PutObjectError>), #[error("create bucket error: {0}")] CreateBucket(#[from] RusotoError<CreateBucketError>), #[error("list objects error: {0}")] ListObjects(#[from] RusotoError<ListObjectsV2Error>), #[error("delete objects error: {0}")] DeleteObjects(#[from] RusotoError<DeleteObjectsError>), #[error("serialization failed")] Serialization(#[from] bincode::Error), #[error("empty response error")] EmptyResponse, #[error(transparent)] HttpClient(#[from] TlsError), } #[derive(Clone)] pub struct Client { buckets: Arc<S3BucketsSettings>, s3_client: S3Client, } #[cfg(test)] impl std::fmt::Debug for Client { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Client") .field("buckets", &self.buckets) .finish() } } impl Client { pub fn new(settings: S3Settings) -> S3Result<Self> { let credentials_provider = StaticProvider::new_minimal(settings.access_key, settings.secret_a
ifiers: Vec<ObjectIdentifier>, ) -> Result<DeleteObjectsOutput, RusotoError<DeleteObjectsError>> { let req = DeleteObjectsRequest { bucket: bucket.to_string(), delete: Delete { objects: identifiers, ..Default::default() }, ..Default::default() }; self.s3_client.delete_objects(req).await.map_err(From::from) } async fn list_objects( &self, bucket: &str, continuation_token: Option<String>, ) -> Result<ListObjectsV2Output, RusotoError<ListObjectsV2Error>> { let req = ListObjectsV2Request { bucket: bucket.to_string(), continuation_token, max_keys: Some(1000), ..Default::default() }; self.s3_client .list_objects_v2(req) .await .map_err(From::from) } fn unpack_next_continuation_token(list_obj_resp: &ListObjectsV2Output) -> Option<String> { if let Some(is_truncated) = list_obj_resp.is_truncated { if is_truncated { list_obj_resp.next_continuation_token.clone() } else { None } } else { None } } async fn unpack_object(object: GetObjectOutput) -> S3Result<Vec<u8>> { let mut content = Vec::new(); object .body .ok_or(S3Error::EmptyResponse)? .into_async_read() .read_to_end(&mut content) .await .map_err(|_| S3Error::EmptyResponse)?; Ok(content) } async fn download_object(&self, bucket: &str, key: &str) -> GetObjectOutput { let req = GetObjectRequest { bucket: bucket.to_string(), key: key.to_string(), ..Default::default() }; self.s3_client .get_object(req) .await .expect("download error") } } fn create_minio_setup() -> S3Settings { let region = Region::Custom { name: String::from("minio"), endpoint: String::from("http://localhost:9000"), }; S3Settings { region, access_key: String::from("minio"), secret_access_key: String::from("minio123"), buckets: S3BucketsSettings::default(), } } pub async fn create_client() -> Client { let settings = create_minio_setup(); let client = Client::new(settings).unwrap(); client.create_global_models_bucket().await.unwrap(); client.clear_bucket("global-models").await.unwrap(); client } #[tokio::test] #[serial] async fn integration_test_upload_global_model() { let client = create_client().await; let global_model = create_global_model(10); let round_seed = hex::encode(RoundSeed::generate().as_slice()); let res = client .upload_global_model(&format!("{}_{}", 1, round_seed), &global_model) .await; assert!(res.is_ok()) } }
ccess_key); let dispatcher = HttpClient::new()?; Ok(Self { buckets: Arc::new(settings.buckets), s3_client: S3Client::new_with(dispatcher, credentials_provider, settings.region), }) } pub async fn upload_global_model(&self, key: &str, global_model: &Model) -> S3Result<()> { debug!("store global model: {}", key); let data = bincode::serialize(global_model)?; self.upload(&self.buckets.global_models, key, data) .await .map_err(From::from) .map(|_| ()) } pub async fn create_global_models_bucket(&self) -> S3Result<()> { debug!("create global-models bucket"); match self.create_bucket("global-models").await { Ok(_) | Err(RusotoError::Service(CreateBucketError::BucketAlreadyExists(_))) | Err(RusotoError::Service(CreateBucketError::BucketAlreadyOwnedByYou(_))) => Ok(()), Err(err) => Err(S3Error::from(err)), } } async fn upload( &self, bucket: &str, key: &str, data: Vec<u8>, ) -> Result<PutObjectOutput, RusotoError<PutObjectError>> { let req = PutObjectRequest { bucket: bucket.to_string(), key: key.to_string(), body: Some(StreamingBody::from(data)), ..Default::default() }; self.s3_client.put_object(req).await } async fn create_bucket( &self, bucket: &str, ) -> Result<CreateBucketOutput, RusotoError<CreateBucketError>> { let req = CreateBucketRequest { bucket: bucket.to_string(), ..Default::default() }; self.s3_client.create_bucket(req).await } } #[cfg(test)] pub(in crate) mod tests { use super::*; use crate::storage::tests::create_global_model; use rusoto_core::Region; use rusoto_s3::{ Delete, DeleteObjectsOutput, DeleteObjectsRequest, GetObjectOutput, GetObjectRequest, ListObjectsV2Output, ListObjectsV2Request, ObjectIdentifier, }; use serial_test::serial; use tokio::io::AsyncReadExt; use xaynet_core::{common::RoundSeed, crypto::ByteObject}; impl Client { pub async fn clear_bucket(&self, bucket: &str) -> S3Result<()> { let mut continuation_token: Option<String> = None; loop { let list_obj_resp = self.list_objects(bucket, continuation_token).await?; if let Some(identifiers) = Self::unpack_object_identifier(&list_obj_resp) { self.delete_objects(bucket, identifiers).await?; } else { break; } continuation_token = Self::unpack_next_continuation_token(&list_obj_resp); if continuation_token.is_none() { break; } } Ok(()) } pub async fn download_global_model(&self, key: &str) -> Model { debug!("get global model {:?}", key); let object = self.download_object(&self.buckets.global_models, key).await; let content = Self::unpack_object(object).await.expect("unpack error"); bincode::deserialize(&content).expect("deserialization error") } fn unpack_object_identifier( list_obj_resp: &ListObjectsV2Output, ) -> Option<Vec<ObjectIdentifier>> { if let Some(objects) = &list_obj_resp.contents { let keys = objects .iter() .filter_map(|obj| obj.key.clone()) .map(|key| ObjectIdentifier { key, ..Default::default() }) .collect(); Some(keys) } else { None } } async fn delete_objects( &self, bucket: &str, ident
random
[ { "content": "fn error_code_type_error(response: &Value) -> RedisError {\n\n redis_type_error(\n\n \"Response status not valid integer\",\n\n Some(format!(\"Response was {:?}\", response)),\n\n )\n\n}\n\n\n\n/// Implements ['FromRedisValue'] and ['ToRedisArgs'] for types that implement ['ByteObject'].\n\n/// The Redis traits as well as the crypto types are both defined in foreign crates.\n\n/// To bypass the restrictions of orphan rule, we use `Newtypes` for the crypto types.\n\n///\n\n/// Each crypto type has two `Newtypes`, one for reading and one for writing.\n\n/// The difference between `Read` and `Write` is that the write `Newtype` does not take the\n\n/// ownership of the value but only a reference. This allows us to use references in the\n\n/// [`Client`] methods. The `Read` Newtype also implements [`ToRedisArgs`] to reduce the\n\n/// conversion overhead that you would get if you wanted to reuse a `Read` value for another\n\n/// Redis query.\n\n///\n\n/// Example:\n\n///\n", "file_path": "rust/xaynet-server/src/storage/impls.rs", "rank": 0, "score": 195852.24491439637 }, { "content": "// Validates the bucket name\n\n// [Rules for AWS bucket naming](https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html)\n\nfn validate_s3_bucket_name(bucket_name: &str) -> Result<(), ValidationError> {\n\n // https://stackoverflow.com/questions/50480924/regex-for-s3-bucket-name#comment104807676_58248645\n\n // I had to use fancy_regex here because the std regex does not support `look-around`\n\n let re =\n\n Regex::new(r\"(?!^(\\d{1,3}\\.){3}\\d{1,3}$)(^[a-z0-9]([a-z0-9-]*(\\.[a-z0-9])?)*$(?<!\\-))\")\n\n .unwrap();\n\n match re.is_match(bucket_name) {\n\n Ok(true) => Ok(()),\n\n Ok(false) => Err(ValidationError::new(\"invalid bucket name\\n See here: https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html\")),\n\n // something went wrong with the regex engine\n\n Err(_) => Err(ValidationError::new(\"can not validate bucket name\")),\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/settings/s3.rs", "rank": 1, "score": 195713.66837578482 }, { "content": "pub fn message<F, P>(f: F) -> (Message, Vec<u8>)\n\nwhere\n\n F: Fn() -> (P, Vec<u8>),\n\n P: Into<Payload>,\n\n{\n\n let (payload, payload_bytes) = f();\n\n let payload: Payload = payload.into();\n\n let tag = match payload {\n\n Payload::Sum(_) => Tag::Sum,\n\n Payload::Update(_) => Tag::Update,\n\n Payload::Sum2(_) => Tag::Sum2,\n\n _ => panic!(\"chunks not supported\"),\n\n };\n\n let message = Message {\n\n signature: Some(signature().0),\n\n participant_pk: participant_pk().0,\n\n coordinator_pk: coordinator_pk().0,\n\n payload,\n\n is_multipart: false,\n\n tag,\n", "file_path": "rust/xaynet-core/src/testutils/messages.rs", "rank": 2, "score": 187093.36672657754 }, { "content": "pub fn parse_sum(c: &mut Criterion) {\n\n let sum_message = helpers::message(helpers::sum::payload).0;\n\n let mut bytes = vec![0; sum_message.buffer_length()];\n\n sum_message.to_bytes(&mut bytes, &participant_sk());\n\n\n\n // This benchmark is also quite unstable so make it a bit more\n\n // relaxed\n\n let mut bench = c.benchmark_group(\"parse_sum\");\n\n bench.confidence_level(0.9).noise_threshold(0.05);\n\n bench.bench_function(\"parse from slice\", |b| {\n\n b.iter(|| Message::from_byte_slice(&black_box(bytes.as_slice())))\n\n });\n\n}\n\n\n\ncriterion_group!(name = benches;\n\n // By default criterion collection 100 sample and the\n\n // measurement time is 5 seconds, but the results are\n\n // quite unstable with this configuration. This\n\n // config makes the benchmarks running longer but\n\n // provide more reliable results\n\n config = Criterion::default().sample_size(1000).measurement_time(Duration::new(10, 0));\n\n targets = emit_sum, parse_sum);\n\ncriterion_main!(benches);\n", "file_path": "rust/benches/benches/messages.rs", "rank": 3, "score": 186761.82801424342 }, { "content": "pub fn emit_sum(c: &mut Criterion) {\n\n let (sum_message, _) = helpers::message(helpers::sum::payload);\n\n let buf_len = sum_message.buffer_length();\n\n let mut pre_allocated_buf = vec![0; buf_len];\n\n\n\n // the benchmarks run under 20 ns. The results for such\n\n // benchmarks can vary a bit more so we:\n\n // - eliminate outliers a bit more aggressively (confidence level)\n\n // - increase the noise threshold\n\n //\n\n // Note: criterion always reports p = 0.0 so lowering the\n\n // significance level doesn't change anything\n\n let mut bench = c.benchmark_group(\"emit_sum\");\n\n bench.confidence_level(0.9).noise_threshold(0.05);\n\n\n\n bench.bench_function(\"compute buffer length\", |b| {\n\n b.iter(|| black_box(&sum_message).buffer_length())\n\n });\n\n\n\n bench.bench_function(\"emit sum message\", |b| {\n\n b.iter(|| {\n\n sum_message.to_bytes(\n\n black_box(&mut pre_allocated_buf),\n\n black_box(&participant_sk()),\n\n )\n\n })\n\n });\n\n}\n\n\n", "file_path": "rust/benches/benches/messages.rs", "rank": 4, "score": 186761.82801424342 }, { "content": "fn redis_type_error(desc: &'static str, details: Option<String>) -> RedisError {\n\n if let Some(details) = details {\n\n RedisError::from((ErrorKind::TypeError, desc, details))\n\n } else {\n\n RedisError::from((ErrorKind::TypeError, desc))\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/storage/impls.rs", "rank": 5, "score": 179141.63232560395 }, { "content": "pub fn mask_object(len: usize) -> MaskObject {\n\n // The model contains 2 sub mask objects:\n\n // - the masked model, which has:\n\n // - 4 bytes for the config\n\n // - 4 bytes for the number of weights\n\n // - 6 bytes (with our config) for each weight\n\n // - the masked scalar:\n\n // - 4 bytes for the config\n\n // - 6 bytes (with our config) for the scalar\n\n //\n\n // The only parameter we control to make the length vary is\n\n // the number of weights. The lengths is then:\n\n //\n\n // len = (4 + 4 + n_weights * 6) + (4 + 6) = 18 + 6 * n_weights\n\n //\n\n // So we must have: (len - 18) % 6 = 0\n\n if (len - 18) % 6 != 0 {\n\n panic!(\"invalid masked model length\")\n\n }\n\n let n_weights = (len - 18) / 6;\n", "file_path": "rust/xaynet-core/src/testutils/multipart.rs", "rank": 7, "score": 168779.3243341148 }, { "content": "/// Extracts a participant public key from a request body\n\nfn part_pk() -> impl Filter<Extract = (ParticipantPublicKey,), Error = warp::Rejection> + Clone {\n\n warp::body::bytes().and_then(|body: Bytes| async move {\n\n if let Some(pk) = ParticipantPublicKey::from_slice(body.bytes()) {\n\n Ok(pk)\n\n } else {\n\n Err(warp::reject::custom(InvalidPublicKey))\n\n }\n\n })\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/rest.rs", "rank": 8, "score": 159156.1457108179 }, { "content": "/// Generates a secure pseudo-random integer.\n\n///\n\n/// Draws from a uniform distribution over the integers between zero (included) and\n\n/// `max_int` (excluded). Employs the `ChaCha20` stream cipher as a PRNG.\n\npub fn generate_integer(prng: &mut ChaCha20Rng, max_int: &BigUint) -> BigUint {\n\n if max_int.is_zero() {\n\n return BigUint::zero();\n\n }\n\n let mut bytes = max_int.to_bytes_le();\n\n let mut rand_int = max_int.clone();\n\n while &rand_int >= max_int {\n\n prng.fill_bytes(&mut bytes);\n\n rand_int = BigUint::from_bytes_le(&bytes);\n\n }\n\n rand_int\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use num::traits::{pow::Pow, Num};\n\n use rand::SeedableRng;\n\n\n\n use super::*;\n\n\n", "file_path": "rust/xaynet-core/src/crypto/prng.rs", "rank": 9, "score": 155545.31748688137 }, { "content": "// A small wrapper to support the list type for environment variable values.\n\n// config-rs always converts a environment variable value to a string\n\n// https://github.com/mehcode/config-rs/blob/master/src/env.rs#L114 .\n\n// Strings however, are not supported by the deserializer of rusoto_core::Region (only sequences).\n\n// Therefore we use S3RegionVisitor to implement `visit_str` and thus support\n\n// the deserialization of rusoto_core::Region from strings.\n\nfn deserialize_s3_region<'de, D>(deserializer: D) -> Result<Region, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct S3RegionVisitor;\n\n\n\n impl<'de> Visitor<'de> for S3RegionVisitor {\n\n type Value = Region;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"sequence of \\\"name Optional<endpoint>\\\"\")\n\n }\n\n\n\n // FIXME: a copy of https://rusoto.github.io/rusoto/src/rusoto_core/region.rs.html#185\n\n // I haven't managed to create a sequence and call `self.visit_seq(seq)`.\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n let mut seq = value.split_whitespace();\n", "file_path": "rust/xaynet-server/src/settings/s3.rs", "rank": 10, "score": 154908.05267391104 }, { "content": "/// Extract the masked model from an update message\n\n///\n\n/// # Panic\n\n///\n\n/// Panic if this message is not an update message\n\npub fn masked_model(msg: &Message) -> MaskObject {\n\n if let Payload::Update(Update { masked_model, .. }) = &msg.payload {\n\n masked_model.clone()\n\n } else {\n\n panic!(\"not an update message\");\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 11, "score": 154253.4067363959 }, { "content": "pub fn create_mask_zeroed(byte_size: usize) -> MaskObject {\n\n let config = MaskConfig {\n\n group_type: GroupType::Prime,\n\n data_type: DataType::F32,\n\n bound_type: BoundType::B0,\n\n model_type: ModelType::M3,\n\n };\n\n\n\n MaskObject::new(\n\n config,\n\n vec![BigUint::zero(); byte_size],\n\n config,\n\n BigUint::zero(),\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/storage/tests/mod.rs", "rank": 12, "score": 152437.1059887698 }, { "content": "/// Construct a [`Fetcher`] service\n\npub fn fetcher(event_subscriber: &EventSubscriber) -> impl Fetcher + Sync + Send + Clone + 'static {\n\n let round_params = ServiceBuilder::new()\n\n .buffer(100)\n\n .concurrency_limit(100)\n\n .layer(FetcherLayer)\n\n .service(RoundParamsService::new(event_subscriber));\n\n\n\n let mask_length = ServiceBuilder::new()\n\n .buffer(100)\n\n .concurrency_limit(100)\n\n .layer(FetcherLayer)\n\n .service(MaskLengthService::new(event_subscriber));\n\n\n\n let model = ServiceBuilder::new()\n\n .buffer(100)\n\n .concurrency_limit(100)\n\n .layer(FetcherLayer)\n\n .service(ModelService::new(event_subscriber));\n\n\n\n let sum_dict = ServiceBuilder::new()\n", "file_path": "rust/xaynet-server/src/services/fetchers/mod.rs", "rank": 13, "score": 152052.54105647362 }, { "content": "fn deserialize_max_message_size<'de, D>(deserializer: D) -> Result<Option<usize>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let value: Option<usize> = Option::deserialize(deserializer)?;\n\n match value {\n\n Some(size) => {\n\n if size >= MIN_MESSAGE_SIZE {\n\n Ok(Some(size))\n\n } else {\n\n Err(SerdeError::custom(format!(\n\n \"max_message_size must be at least {} (got {})\",\n\n MIN_MESSAGE_SIZE, size\n\n )))\n\n }\n\n }\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-client/src/mobile_client/participant/mod.rs", "rank": 14, "score": 146306.78549988975 }, { "content": "pub fn create_mask(byte_size: usize, number: u32) -> MaskObject {\n\n let config = MaskConfig {\n\n group_type: GroupType::Prime,\n\n data_type: DataType::F32,\n\n bound_type: BoundType::B0,\n\n model_type: ModelType::M3,\n\n };\n\n\n\n MaskObject::new(\n\n config,\n\n vec![BigUint::from(number); byte_size],\n\n config,\n\n BigUint::zero(),\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/storage/tests/mod.rs", "rank": 15, "score": 145936.620912874 }, { "content": "/// Sign and encrypt the given message using the given round\n\n/// parameters and particpant keys.\n\npub fn encrypt_message(\n\n message: &Message,\n\n round_params: &RoundParameters,\n\n participant_signing_keys: &SigningKeyPair,\n\n) -> Vec<u8> {\n\n let serialized = serialize_message(message, participant_signing_keys);\n\n round_params.pk.encrypt(&serialized[..])\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/utils.rs", "rank": 16, "score": 132026.07126467908 }, { "content": "pub fn create_seed_dict(\n\n sum_dict: SumDict,\n\n seed_updates: &[(UpdateParticipantPublicKey, LocalSeedDict)],\n\n) -> SeedDict {\n\n let mut seed_dict: SeedDict = sum_dict\n\n .keys()\n\n .map(|pk| (*pk, LocalSeedDict::new()))\n\n .collect();\n\n\n\n for (pk, local_seed_dict) in seed_updates {\n\n for (sum_pk, seed) in local_seed_dict {\n\n seed_dict.get_mut(sum_pk).unwrap().insert(*pk, seed.clone());\n\n }\n\n }\n\n\n\n seed_dict\n\n}\n\n\n\npub async fn create_and_write_sum_participant_entries(\n\n client: &Client,\n", "file_path": "rust/xaynet-server/src/storage/tests/mod.rs", "rank": 17, "score": 130599.67997828513 }, { "content": "pub fn enable_logging() {\n\n let _fmt_subscriber = FmtSubscriber::builder()\n\n .with_env_filter(EnvFilter::from_default_env())\n\n .with_ansi(true)\n\n .try_init();\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 18, "score": 130599.67997828513 }, { "content": "pub fn create_local_seed_entries(\n\n sum_pks: &[SumParticipantPublicKey],\n\n) -> Vec<(UpdateParticipantPublicKey, LocalSeedDict)> {\n\n let mut entries = Vec::new();\n\n\n\n for _ in 0..sum_pks.len() {\n\n let SigningKeyPair {\n\n public: update_pk, ..\n\n } = SigningKeyPair::generate();\n\n\n\n let mut local_seed_dict = LocalSeedDict::new();\n\n for sum_pk in sum_pks {\n\n let seed = EncryptedMaskSeed::zeroed();\n\n local_seed_dict.insert(*sum_pk, seed);\n\n }\n\n entries.push((update_pk, local_seed_dict))\n\n }\n\n\n\n entries\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/storage/tests/mod.rs", "rank": 19, "score": 129224.43120146162 }, { "content": "pub fn mask_config() -> MaskConfig {\n\n // config.order() = 20_000_000_000_001 with this config, so the data\n\n // should be stored on 6 bytes.\n\n MaskConfig {\n\n group_type: GroupType::Integer,\n\n data_type: DataType::I32,\n\n bound_type: BoundType::B0,\n\n model_type: ModelType::M3,\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/testutils/multipart.rs", "rank": 20, "score": 126409.15729951455 }, { "content": "fn into_fetch_error<E: Into<Box<dyn ::std::error::Error + 'static + Sync + Send>>>(\n\n e: E,\n\n) -> FetchError {\n\n anyhow::anyhow!(\"Fetcher failed: {:?}\", e.into())\n\n}\n\n\n\n#[async_trait]\n\nimpl<RoundParams, SumDict, SeedDict, MaskLength, Model> Fetcher\n\n for Fetchers<RoundParams, SumDict, SeedDict, MaskLength, Model>\n\nwhere\n\n Self: Send + Sync + 'static,\n\n\n\n RoundParams: Service<RoundParamsRequest, Response = RoundParamsResponse> + Send + 'static,\n\n <RoundParams as Service<RoundParamsRequest>>::Future: Send + Sync + 'static,\n\n <RoundParams as Service<RoundParamsRequest>>::Error:\n\n Into<Box<dyn ::std::error::Error + 'static + Sync + Send>>,\n\n\n\n MaskLength: Service<MaskLengthRequest, Response = MaskLengthResponse> + Send + 'static,\n\n <MaskLength as Service<MaskLengthRequest>>::Future: Send + Sync + 'static,\n\n <MaskLength as Service<MaskLengthRequest>>::Error:\n", "file_path": "rust/xaynet-server/src/services/fetchers/mod.rs", "rank": 21, "score": 125539.98470025562 }, { "content": "pub fn model_settings() -> ModelSettings {\n\n ModelSettings { size: 1 }\n\n}\n\n\n\npub async fn init_shared() -> (Shared, EventSubscriber, RequestSender) {\n\n let redis = redis::Client::new(\"redis://127.0.0.1/\", 10).await.unwrap();\n\n redis.connection().await.flush_db().await.unwrap();\n\n\n\n let coordinator_state =\n\n CoordinatorState::new(pet_settings(), mask_settings(), model_settings());\n\n\n\n let (event_publisher, event_subscriber) = EventPublisher::init(\n\n coordinator_state.round_id,\n\n coordinator_state.keys.clone(),\n\n coordinator_state.round_params.clone(),\n\n PhaseName::Idle,\n\n );\n\n\n\n let (request_rx, request_tx) = RequestReceiver::new();\n\n (\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 22, "score": 123702.81385652909 }, { "content": "pub fn pet_settings() -> PetSettings {\n\n PetSettings {\n\n sum: 0.4,\n\n update: 0.5,\n\n min_sum_count: 1,\n\n min_update_count: 3,\n\n min_sum_time: 1,\n\n max_sum_time: 2,\n\n min_update_time: 1,\n\n max_update_time: 2,\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 23, "score": 123702.81385652909 }, { "content": "pub fn mask_settings() -> MaskSettings {\n\n MaskSettings {\n\n group_type: GroupType::Prime,\n\n data_type: DataType::F32,\n\n bound_type: BoundType::B0,\n\n model_type: ModelType::M3,\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 24, "score": 123702.81385652909 }, { "content": "#[async_trait]\n\npub trait LocalModel {\n\n async fn get_local_model(&mut self) -> Option<Model>;\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ClientState<Type> {\n\n participant: Participant<Type>,\n\n round_params: RoundParameters,\n\n}\n\n\n\nimpl<Type> ClientState<Type> {\n\n async fn check_round_freshness<T: ApiClient>(\n\n &self,\n\n api: &mut T,\n\n ) -> Result<(), ClientError<T::Error>> {\n\n debug!(\"fetching round parameters\");\n\n let round_params = api.get_round_params().await?;\n\n if round_params.seed != self.round_params.seed {\n\n info!(\"new round parameters\");\n\n Err(ClientError::RoundOutdated)\n", "file_path": "rust/xaynet-client/src/mobile_client/client.rs", "rank": 25, "score": 123550.74760907849 }, { "content": "pub fn task_signatures() -> (Signature, Signature) {\n\n (\n\n messages::sum::sum_task_signature().0,\n\n messages::update::update_task_signature().0,\n\n )\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/testutils/multipart.rs", "rank": 26, "score": 122914.14178762666 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"Test Drive\")]\n\nstruct Opt {\n\n #[structopt(\n\n default_value = \"http://127.0.0.1:8081\",\n\n short,\n\n help = \"The URL of the coordinator\"\n\n )]\n\n url: String,\n\n\n\n #[structopt(default_value = \"4\", short, help = \"The length of the model\")]\n\n len: u32,\n\n\n\n #[cfg_attr(\n\n feature = \"tls\",\n\n cfg(feature = \"tls\"),\n\n structopt(\n\n short,\n\n long,\n\n parse(from_os_str),\n\n help = \"The list of trusted DER/PEM encoded TLS server certificates\"\n\n )\n\n )]\n\n certificates: Vec<PathBuf>,\n\n}\n\n\n", "file_path": "rust/examples/mobile-client.rs", "rank": 27, "score": 121303.30419394624 }, { "content": "#[async_trait]\n\npub trait ApiClient {\n\n type Error: ::std::fmt::Debug + ::std::error::Error + 'static;\n\n\n\n /// Retrieve the current round parameters\n\n async fn get_round_params(&mut self) -> Result<RoundParameters, Self::Error>;\n\n\n\n /// Retrieve the current sum dictionary, if available\n\n async fn get_sums(&mut self) -> Result<Option<SumDict>, Self::Error>;\n\n\n\n /// Retrieve the current seed dictionary for the given sum\n\n /// participant, if available.\n\n async fn get_seeds(\n\n &mut self,\n\n pk: SumParticipantPublicKey,\n\n ) -> Result<Option<UpdateSeedDict>, Self::Error>;\n\n\n\n /// Retrieve the current model/mask length, if available\n\n async fn get_mask_length(&mut self) -> Result<Option<u64>, Self::Error>;\n\n\n\n /// Retrieve the current global model, if available.\n\n async fn get_model(&mut self) -> Result<Option<Model>, Self::Error>;\n\n\n\n /// Send an encrypted and signed PET message to the coordinator.\n\n async fn send_message(&mut self, msg: Vec<u8>) -> Result<(), Self::Error>;\n\n}\n", "file_path": "rust/xaynet-client/src/api/mod.rs", "rank": 28, "score": 120299.71286470866 }, { "content": "type UpdateSignature = Signature;\n\n#[derive(Serialize, Deserialize, Clone)]\n\npub struct Awaiting;\n\n\n\nimpl Participant<Awaiting> {\n\n pub fn new(state: ParticipantState) -> Self {\n\n Self {\n\n inner: Awaiting,\n\n state,\n\n }\n\n }\n\n\n\n /// Check eligibility for a task given probabilities for `Sum` and `Update`\n\n /// selection in this round.\n\n ///\n\n /// Returns the participant [`Role`] selected for this round.\n\n pub fn determine_role(self, round_seed: &[u8], round_sum: f64, round_update: f64) -> Role {\n\n let (sum_signature, update_signature) = self.compute_signatures(round_seed);\n\n if sum_signature.is_eligible(round_sum) {\n\n Participant::<Sum>::new(self.state, sum_signature).into()\n", "file_path": "rust/xaynet-client/src/mobile_client/participant/awaiting.rs", "rank": 29, "score": 120152.05455213034 }, { "content": "type SumSignature = Signature;\n", "file_path": "rust/xaynet-client/src/mobile_client/participant/awaiting.rs", "rank": 30, "score": 120152.05455213034 }, { "content": "pub fn signature() -> (Signature, Vec<u8>) {\n\n let bytes = vec![0xaa; 64];\n\n let signature = Signature::from_slice(bytes.as_slice()).unwrap();\n\n (signature, bytes)\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/testutils/messages.rs", "rank": 31, "score": 119954.74997875666 }, { "content": "#[proc_macro]\n\npub fn metrics(input: TokenStream) -> TokenStream {\n\n let Send { sender, metrics } = parse_macro_input!(input as Send);\n\n\n\n TokenStream::from(quote! {\n\n #[cfg(feature = \"metrics\")]\n\n {\n\n #(#sender.send(#metrics);)*\n\n }\n\n })\n\n}\n", "file_path": "rust/xaynet-macros/src/lib.rs", "rank": 32, "score": 118579.50120193316 }, { "content": "/// A primitive data type as a target for model conversion.\n\nenum PrimitiveType {\n\n F32,\n\n F64,\n\n I32,\n\n I64,\n\n}\n\n\n\n#[derive(Error, Debug)]\n\n#[error(\"Could not convert weight {weight} to primitive type {target}\")]\n\n/// Errors related to model conversion into primitives.\n\npub struct ModelCastError {\n\n weight: Ratio<BigInt>,\n\n target: PrimitiveType,\n\n}\n\n\n\n#[derive(Error, Debug)]\n\n#[error(\"Could not convert primitive type {0:?} to model weight\")]\n\n/// Errors related to model conversion from primitives.\n\npub struct PrimitiveCastError<P: Debug>(P);\n\n\n", "file_path": "rust/xaynet-core/src/mask/model.rs", "rank": 33, "score": 118274.52600822295 }, { "content": "fn pause() {\n\n let mut stdout = stdout();\n\n stdout.write_all(b\"Press Enter to continue...\").unwrap();\n\n stdout.flush().unwrap();\n\n stdin().read_exact(&mut [0]).unwrap();\n\n}\n\n\n", "file_path": "rust/examples/mobile-client.rs", "rank": 34, "score": 117869.25161169858 }, { "content": "/// Create an [`EventPublisher`]/[`EventSubscriber`] pair with default\n\n/// values similar to those produced in practice when instantiating a\n\n/// new coordinator.\n\npub fn new_event_channels() -> (EventPublisher, EventSubscriber) {\n\n let keys = EncryptKeyPair::generate();\n\n let params = RoundParameters {\n\n pk: keys.public,\n\n sum: 0.0,\n\n update: 0.0,\n\n seed: RoundSeed::generate(),\n\n };\n\n let phase = PhaseName::Idle;\n\n let round_id = 0;\n\n EventPublisher::init(round_id, keys, params, phase)\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/utils.rs", "rank": 35, "score": 117669.8301937216 }, { "content": "// perform the participant task (this function should be triggered regularly on the phone while the\n\n// app is active or in a background task)\n\nfn perform_task(\n\n url: &str,\n\n bytes: &[u8],\n\n model: Model,\n\n #[cfg(feature = \"tls\")] certificates: &[PathBuf],\n\n) -> Vec<u8> {\n\n let mut client = MobileClient::restore(\n\n url,\n\n bytes,\n\n #[cfg(feature = \"tls\")]\n\n certificates,\n\n )\n\n .unwrap();\n\n println!(\"task: {:?}\", &client.get_current_state());\n\n\n\n client.set_local_model(model);\n\n client = match client.try_to_proceed() {\n\n Ok(client) => client,\n\n Err((client, _)) => client,\n\n };\n", "file_path": "rust/examples/mobile-client.rs", "rank": 36, "score": 116189.94852252779 }, { "content": "struct LocalModelCache(Option<Model>);\n\n\n\nimpl LocalModelCache {\n\n fn set_local_model(&mut self, model: Model) {\n\n self.0 = Some(model);\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl LocalModel for LocalModelCache {\n\n async fn get_local_model(&mut self) -> Option<Model> {\n\n self.0.clone()\n\n }\n\n}\n", "file_path": "rust/xaynet-client/src/mobile_client/mod.rs", "rank": 37, "score": 115980.64643804349 }, { "content": "pub fn coordinator_pk() -> (PublicEncryptKey, Vec<u8>) {\n\n let bytes = vec![0xcc; 32];\n\n let pk = PublicEncryptKey::from_slice(&bytes).unwrap();\n\n (pk, bytes)\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/testutils/messages.rs", "rank": 38, "score": 115959.37314110245 }, { "content": "pub fn participant_pk() -> (PublicSigningKey, Vec<u8>) {\n\n let bytes = vec![0xbb; 32];\n\n let pk = PublicSigningKey::from_slice(&bytes).unwrap();\n\n (pk, bytes)\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/testutils/messages.rs", "rank": 39, "score": 115959.37314110245 }, { "content": "/// Converts a data fetcher into a `warp` filter.\n\nfn with_fetcher<F: Fetcher + Sync + Send + 'static + Clone>(\n\n fetcher: F,\n\n) -> impl Filter<Extract = (F,), Error = Infallible> + Clone {\n\n warp::any().map(move || fetcher.clone())\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/rest.rs", "rank": 40, "score": 115595.83152027 }, { "content": "/// Return a seed dict that has the given length `len` once\n\n/// serialized. `len - 4` must be multiple of 112.\n\npub fn local_seed_dict(len: usize) -> LocalSeedDict {\n\n // a public key is 32 bytes and an encrypted mask seed 80.\n\n let entry_len = 32 + 80;\n\n if ((len - 4) % entry_len) != 0 {\n\n panic!(\"invalid length for seed dict\");\n\n }\n\n\n\n let nb_entries = (len - 4) / entry_len;\n\n let mut dict = LocalSeedDict::new();\n\n for i in 0..nb_entries {\n\n let b = (i % 0xff) as u8;\n\n let pk = PublicSigningKey::from_slice(vec![b; 32].as_slice()).unwrap();\n\n let mask_seed = EncryptedMaskSeed::try_from(vec![b; 80]).unwrap();\n\n dict.insert(pk, mask_seed);\n\n }\n\n\n\n // Check that our calculations are correct\n\n assert_eq!(dict.buffer_length(), len);\n\n dict\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/testutils/multipart.rs", "rank": 41, "score": 114710.43838485159 }, { "content": "#[cfg(feature = \"model-persistence\")]\n\npub fn create_global_model(nb_elements: usize) -> Model {\n\n Model::from_primitives(vec![0; nb_elements].into_iter()).unwrap()\n\n}\n", "file_path": "rust/xaynet-server/src/storage/tests/mod.rs", "rank": 42, "score": 114710.43838485159 }, { "content": "// // override the old state with the new one\n\n// db.save(\"client_state\", serialized_client);\n\n// }\n\nfn main() -> Result<(), ()> {\n\n let opt = Opt::from_args();\n\n\n\n let _fmt_subscriber = FmtSubscriber::builder()\n\n .with_env_filter(EnvFilter::from_default_env())\n\n .with_ansi(true)\n\n .init();\n\n\n\n // create a new client\n\n let client = MobileClient::init(\n\n &opt.url,\n\n get_participant_settings(),\n\n #[cfg(feature = \"tls\")]\n\n &opt.certificates,\n\n )\n\n .unwrap();\n\n // serialize the current client state (and save it on the phone)\n\n let mut bytes = client.serialize();\n\n\n\n // simulate the regular execution of perform_task on the phone\n", "file_path": "rust/examples/mobile-client.rs", "rank": 43, "score": 113706.55195084552 }, { "content": "/// Extract the local seed dictioanry from an update message\n\n///\n\n/// # Panic\n\n///\n\n/// Panic if this message is not an update message\n\npub fn local_seed_dict(msg: &Message) -> LocalSeedDict {\n\n if let Payload::Update(Update {\n\n local_seed_dict, ..\n\n }) = &msg.payload\n\n {\n\n local_seed_dict.clone()\n\n } else {\n\n panic!(\"not an update message\");\n\n }\n\n}\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 44, "score": 112325.60636287408 }, { "content": "/// Extract the ephemeral public key from a sum message.\n\n///\n\n/// # Panic\n\n///\n\n/// Panic if this message is not a sum message\n\npub fn ephm_pk(msg: &Message) -> SumParticipantEphemeralPublicKey {\n\n if let Payload::Sum(Sum { ephm_pk, .. }) = &msg.payload {\n\n *ephm_pk\n\n } else {\n\n panic!(\"not a sum message\");\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 45, "score": 111186.34954177523 }, { "content": "pub fn create_sum_participant_entry() -> (SumParticipantPublicKey, SumParticipantEphemeralPublicKey)\n\n{\n\n let SigningKeyPair { public: pk, .. } = SigningKeyPair::generate();\n\n let EncryptKeyPair {\n\n public: ephm_pk, ..\n\n } = EncryptKeyPair::generate();\n\n (pk, ephm_pk)\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/storage/tests/mod.rs", "rank": 46, "score": 110922.45764220186 }, { "content": "fn dummy_config() -> MaskConfig {\n\n MaskConfig {\n\n group_type: GroupType::Prime,\n\n data_type: DataType::F32,\n\n bound_type: BoundType::B0,\n\n model_type: ModelType::M3,\n\n }\n\n}\n", "file_path": "rust/xaynet-client/src/participant.rs", "rank": 47, "score": 109008.71160363939 }, { "content": "fn get_participant_settings() -> ParticipantSettings {\n\n sodiumoxide::init().unwrap();\n\n\n\n let secret_key = MobileClient::create_participant_secret_key();\n\n ParticipantSettings {\n\n secret_key,\n\n aggregation_config: AggregationConfig {\n\n mask: MaskConfig {\n\n group_type: GroupType::Prime,\n\n data_type: DataType::F32,\n\n bound_type: BoundType::B0,\n\n model_type: ModelType::M3,\n\n },\n\n scalar: 1_f64,\n\n },\n\n max_message_size: Default::default(),\n\n }\n\n}\n\n\n\n// // How a Dart API could look like:\n", "file_path": "rust/examples/mobile-client.rs", "rank": 48, "score": 109008.71160363939 }, { "content": "/// Create an update message with a seed dictionary of length\n\n/// `dict_len` and a mask object of length `mask_len`. For a message\n\n/// of size `S`, the following must hold true:\n\n///\n\n/// ```no_rust\n\n/// (mask_len - 22) % 6 = 0\n\n/// (dict_len - 4) % 112 = 0\n\n/// S = dict_len + mask_len + 64*2 + 136\n\n/// ```\n\npub fn message(dict_len: usize, mask_obj_len: usize) -> Message {\n\n let (message, _) = messages::message(|| {\n\n let payload = update(dict_len, mask_obj_len);\n\n let dummy_buf = vec![];\n\n (payload, dummy_buf)\n\n });\n\n message\n\n}\n", "file_path": "rust/xaynet-core/src/testutils/multipart.rs", "rank": 49, "score": 108764.97095783016 }, { "content": "/// Create an update payload with a seed dictionary of length\n\n/// `dict_len` and a mask object of length `mask_len`. For a payload\n\n/// of size `S`, the following must hold true:\n\n///\n\n/// ```no_rust\n\n/// (mask_len - 22) % 6 = 0\n\n/// (dict_len - 4) % 112 = 0\n\n/// S = dict_len + mask_len + 64*2\n\n/// ```\n\npub fn update(dict_len: usize, mask_obj_len: usize) -> Update {\n\n // An update message is made of:\n\n // - 2 signatures of 64 bytes each\n\n // - a mask object of variable length\n\n // - a seed dictionary of variable length\n\n //\n\n // The `Message` overhead is 136 bytes (see\n\n // crate::messages::HEADER_LEN). So a message with\n\n // `dict_len` = 100 and `mask_obj_len` = 100 will be:\n\n //\n\n // 100 + 100 + 64*2 + 136 = 464 bytes\n\n let (sum_signature, update_signature) = task_signatures();\n\n\n\n let payload = Update {\n\n sum_signature,\n\n update_signature,\n\n masked_model: mask_object(mask_obj_len),\n\n local_seed_dict: local_seed_dict(dict_len),\n\n };\n\n\n\n assert_eq!(payload.buffer_length(), mask_obj_len + dict_len + 64 * 2);\n\n payload\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/testutils/multipart.rs", "rank": 50, "score": 108764.97095783016 }, { "content": "/// Simulate a participant generating keys and crafting a valid sum\n\n/// message for the given round parameters. The keys generated by the\n\n/// participants are returned along with the message.\n\npub fn new_sum_message(round_params: &RoundParameters) -> (Message, SigningKeyPair) {\n\n let signing_keys = SigningKeyPair::generate();\n\n let sum = Sum {\n\n sum_signature: signing_keys\n\n .secret\n\n .sign_detached(&[round_params.seed.as_slice(), b\"sum\"].concat()),\n\n ephm_pk: PublicEncryptKey::generate(),\n\n };\n\n let message = Message::new_sum(signing_keys.public, round_params.pk, sum);\n\n (message, signing_keys)\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/utils.rs", "rank": 51, "score": 107542.32245135671 }, { "content": "/// An interface for slicing into cryptographic byte objects.\n\npub trait ByteObject: Sized {\n\n /// Length in bytes of this object\n\n const LENGTH: usize;\n\n\n\n /// Creates a new object with all the bytes initialized to `0`.\n\n fn zeroed() -> Self;\n\n\n\n /// Gets the object byte representation.\n\n fn as_slice(&self) -> &[u8];\n\n\n\n /// Creates an object from the given buffer.\n\n ///\n\n /// # Errors\n\n /// Returns `None` if the length of the byte-slice isn't equal to the length of the object.\n\n fn from_slice(bytes: &[u8]) -> Option<Self>;\n\n\n\n /// Creates an object from the given buffer.\n\n ///\n\n /// # Panics\n\n /// Panics if the length of the byte-slice isn't equal to the length of the object.\n", "file_path": "rust/xaynet-core/src/crypto/mod.rs", "rank": 52, "score": 107416.36988024408 }, { "content": "fn deserialize_env_filter<'de, D>(deserializer: D) -> Result<EnvFilter, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct EnvFilterVisitor;\n\n\n\n impl<'de> Visitor<'de> for EnvFilterVisitor {\n\n type Value = EnvFilter;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n write!(formatter, \"a valid tracing filter directive: https://docs.rs/tracing-subscriber/0.2.6/tracing_subscriber/filter/struct.EnvFilter.html#directives\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n EnvFilter::try_new(value)\n\n .map_err(|_| de::Error::invalid_value(serde::de::Unexpected::Str(value), &self))\n\n }\n\n }\n\n\n\n deserializer.deserialize_str(EnvFilterVisitor)\n\n}\n", "file_path": "rust/xaynet-server/src/settings/mod.rs", "rank": 53, "score": 104186.91783115477 }, { "content": "fn deserialize_redis_url<'de, D>(deserializer: D) -> Result<ConnectionInfo, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct ConnectionInfoVisitor;\n\n\n\n impl<'de> Visitor<'de> for ConnectionInfoVisitor {\n\n type Value = ConnectionInfo;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n formatter,\n\n \"redis://[<username>][:<passwd>@]<hostname>[:port][/<db>]\"\n\n )\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n", "file_path": "rust/xaynet-server/src/settings/mod.rs", "rank": 54, "score": 104186.91783115477 }, { "content": "pub fn serialize_message(message: &Message, participant_signing_keys: &SigningKeyPair) -> Vec<u8> {\n\n let mut buf = vec![0; message.buffer_length()];\n\n message.to_bytes(&mut buf, &participant_signing_keys.secret);\n\n buf\n\n}\n", "file_path": "rust/xaynet-server/src/services/tests/utils.rs", "rank": 55, "score": 102340.9001328475 }, { "content": "pub fn generate_updater(seed: &RoundSeed, sum_ratio: f64, update_ratio: f64) -> Participant {\n\n loop {\n\n let mut participant = Participant::new().unwrap();\n\n participant.compute_signatures(seed.as_slice());\n\n if participant.check_task(sum_ratio, update_ratio) == Task::Update {\n\n return participant;\n\n };\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 56, "score": 99596.87207730212 }, { "content": "pub fn generate_summer(seed: &RoundSeed, sum_ratio: f64, update_ratio: f64) -> Participant {\n\n loop {\n\n let mut participant = Participant::new().unwrap();\n\n participant.compute_signatures(seed.as_slice());\n\n if participant.check_task(sum_ratio, update_ratio) == Task::Sum {\n\n return participant;\n\n };\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 57, "score": 99596.87207730212 }, { "content": "/// Checks pathological cases of deadlocks.\n\nfn validate_fractions(s: &PetSettings) -> Result<(), ValidationError> {\n\n if 0. < s.sum\n\n && s.sum < 1.\n\n && 0. < s.update\n\n && s.update < 1.\n\n && 0. < s.sum + s.update - s.sum * s.update\n\n && s.sum + s.update - s.sum * s.update < 1.\n\n {\n\n Ok(())\n\n } else {\n\n Err(ValidationError::new(\"starvation\"))\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\n/// REST API settings.\n\npub struct ApiSettings {\n\n /// The address to which the REST API should be bound.\n\n ///\n\n /// # Examples\n", "file_path": "rust/xaynet-server/src/settings/mod.rs", "rank": 58, "score": 99072.77545987848 }, { "content": "/// Checks PET settings.\n\nfn validate_pet(s: &PetSettings) -> Result<(), ValidationError> {\n\n validate_phase_times(s)?;\n\n validate_fractions(s)\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/settings/mod.rs", "rank": 59, "score": 99072.77545987848 }, { "content": "/// A helper that performs division with ceil.\n\n///\n\n/// # Panic\n\n///\n\n/// This function panic if `d` is 0.\n\nfn ceiling_div(n: usize, d: usize) -> usize {\n\n (n + d - 1) / d\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n #[should_panic(expected = \"no chunk with ID 0\")]\n\n fn test_0() {\n\n let data = vec![];\n\n let chunker = Chunker::new(&data, 0);\n\n assert_eq!(chunker.nb_chunks(), 0);\n\n chunker.get_chunk(0);\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"no chunk with ID 5\")]\n\n fn test_1() {\n", "file_path": "rust/xaynet-client/src/utils/multipart/chunker.rs", "rank": 60, "score": 98362.75497773169 }, { "content": "/// Checks validity of phase time ranges.\n\nfn validate_phase_times(s: &PetSettings) -> Result<(), ValidationError> {\n\n if s.min_sum_time <= s.max_sum_time && s.min_update_time <= s.max_update_time {\n\n Ok(())\n\n } else {\n\n Err(ValidationError::new(\"invalid phase time range(s)\"))\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/settings/mod.rs", "rank": 61, "score": 97781.56381693708 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"Test Drive\")]\n\nstruct Opt {\n\n #[structopt(\n\n default_value = \"http://127.0.0.1:8081\",\n\n short,\n\n help = \"The URL of the coordinator\"\n\n )]\n\n url: String,\n\n\n\n #[structopt(default_value = \"4\", short, help = \"The length of the model\")]\n\n len: u32,\n\n\n\n #[structopt(\n\n default_value = \"1\",\n\n short,\n\n help = \"The time period at which to poll for service data, in seconds\"\n\n )]\n\n period: u64,\n\n\n\n #[structopt(default_value = \"10\", short, help = \"The number of clients\")]\n\n nb_client: u32,\n", "file_path": "rust/examples/test-drive-net.rs", "rank": 62, "score": 74350.48418350163 }, { "content": "struct Send {\n\n sender: Expr,\n\n metrics: Vec<Expr>,\n\n}\n\n\n\nimpl Parse for Send {\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n // metrics!(sender, metric_1);\n\n let sender = input.parse()?; // sender\n\n let mut metrics = Vec::new();\n\n\n\n // at least one metric is required, otherwise parse will fail.\n\n input.parse::<Token![,]>()?; // ,\n\n let metric = input.parse()?; // metric_1\n\n metrics.push(metric);\n\n\n\n // metrics!(sender, metric_1, metric_N);\n\n loop {\n\n if input.is_empty() {\n\n break;\n", "file_path": "rust/xaynet-macros/src/lib.rs", "rank": 63, "score": 74345.46125781846 }, { "content": "#[derive(Debug)]\n\nstruct InvalidPublicKey;\n\n\n\nimpl warp::reject::Reject for InvalidPublicKey {}\n\n\n\n/// Handles `warp` rejections of bad requests.\n\nasync fn handle_reject(err: warp::Rejection) -> Result<impl warp::Reply, Infallible> {\n\n let code = if err.is_not_found() {\n\n StatusCode::NOT_FOUND\n\n } else if let Some(InvalidPublicKey) = err.find() {\n\n StatusCode::BAD_REQUEST\n\n } else {\n\n error!(\"unhandled rejection: {:?}\", err);\n\n StatusCode::INTERNAL_SERVER_ERROR\n\n };\n\n // reply with empty body; the status code is the interesting part\n\n Ok(warp::reply::with_status(Vec::new(), code))\n\n}\n", "file_path": "rust/xaynet-server/src/rest.rs", "rank": 64, "score": 72733.51404243275 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Parser;\n\n\n\nimpl<T> Service<RawMessage<T>> for Parser\n\nwhere\n\n T: AsRef<[u8]> + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = future::Ready<Result<Self::Response, Self::Error>>;\n\n\n\n fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn call(&mut self, req: RawMessage<T>) -> Self::Future {\n\n let bytes = req.buffer.inner();\n\n future::ready(Message::from_byte_slice(&bytes).map_err(ServiceError::Parsing))\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 65, "score": 71969.01909633649 }, { "content": "#[derive(Clone)]\n\nstruct RawDecryptor {\n\n /// A listener to retrieve the latest coordinator keys. These are\n\n /// necessary for decrypting messages and verifying their\n\n /// signature.\n\n keys_events: EventListener<EncryptKeyPair>,\n\n\n\n /// Thread-pool the CPU-intensive tasks are offloaded to.\n\n thread_pool: Arc<ThreadPool>,\n\n}\n\n\n\nimpl<T> Service<T> for RawDecryptor\n\nwhere\n\n T: AsRef<[u8]> + Sync + Send + 'static,\n\n{\n\n type Response = Vec<u8>;\n\n type Error = ServiceError;\n\n #[allow(clippy::type_complexity)]\n\n type Future =\n\n Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + 'static + Send + Sync>>;\n\n\n", "file_path": "rust/xaynet-server/src/services/messages/decryptor.rs", "rank": 66, "score": 71969.01909633649 }, { "content": "struct SignatureVerifierLayer {\n\n thread_pool: Arc<ThreadPool>,\n\n}\n\n\n\nimpl<S> Layer<S> for SignatureVerifierLayer {\n\n type Service = ConcurrencyLimit<SignatureVerifier<S>>;\n\n\n\n fn layer(&self, service: S) -> Self::Service {\n\n let limit = self.thread_pool.current_num_threads();\n\n // FIXME: we actually want to limit the concurrency of just\n\n // the SignatureVerifier middleware. Right now we're limiting\n\n // the whole stack of services.\n\n ConcurrencyLimit::new(\n\n SignatureVerifier {\n\n thread_pool: self.thread_pool.clone(),\n\n next_svc: service,\n\n },\n\n limit,\n\n )\n\n }\n\n}\n\n\n\n/// A service that verifies the coordinator public key embedded in PET\n\n/// messsages\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 67, "score": 70516.107026349 }, { "content": "struct PhaseFilterLayer {\n\n phase: EventListener<PhaseName>,\n\n}\n\n\n\nimpl<S> Layer<S> for PhaseFilterLayer {\n\n type Service = PhaseFilter<S>;\n\n\n\n fn layer(&self, service: S) -> PhaseFilter<S> {\n\n PhaseFilter {\n\n phase: self.phase.clone(),\n\n next_svc: service,\n\n }\n\n }\n\n}\n\n\n\n/// A service for verifying the signature of PET messages\n\n///\n\n/// Since this is a CPU-intensive task for large messages, this\n\n/// service offloads the processing to a `rayon` thread-pool to avoid\n\n/// overloading the tokio thread-pool with blocking tasks.\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 68, "score": 70516.107026349 }, { "content": "struct BufferWrapperLayer;\n\n\n\nimpl<S> Layer<S> for BufferWrapperLayer {\n\n type Service = BufferWrapper<S>;\n\n\n\n fn layer(&self, service: S) -> BufferWrapper<S> {\n\n BufferWrapper(service)\n\n }\n\n}\n\n\n\n/// A service that discards messages that are not expected in the current phase\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 69, "score": 70516.107026349 }, { "content": "/// Converts a PET message handler into a `warp` filter.\n\nfn with_message_handler(\n\n handler: PetMessageHandler,\n\n) -> impl Filter<Extract = (PetMessageHandler,), Error = Infallible> + Clone {\n\n warp::any().map(move || handler.clone())\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/rest.rs", "rank": 70, "score": 70105.93395488708 }, { "content": "struct Inner<I>\n\nwhere\n\n I: Iterator,\n\n{\n\n /// The iterator we're chunking\n\n iter: I,\n\n /// Size of each chunk. Note that the last chunk may be smaller\n\n chunk_size: usize,\n\n /// Number of chunks that have been yielded\n\n nb_chunks: usize,\n\n /// Next item from `iter`. By buffering it, we can know when `iter`\n\n /// is exhausted.\n\n next: Option<(usize, I::Item)>,\n\n}\n\n\n\nimpl<I> fmt::Debug for Inner<I>\n\nwhere\n\n I: Iterator + fmt::Debug,\n\n I::Item: fmt::Debug,\n\n{\n", "file_path": "rust/xaynet-core/src/message/utils/chunkable_iterator.rs", "rank": 71, "score": 69166.23573487479 }, { "content": "struct CoordinatorPublicKeyValidatorLayer {\n\n keys: EventListener<EncryptKeyPair>,\n\n}\n\n\n\nimpl<S> Layer<S> for CoordinatorPublicKeyValidatorLayer {\n\n type Service = CoordinatorPublicKeyValidator<S>;\n\n\n\n fn layer(&self, service: S) -> CoordinatorPublicKeyValidator<S> {\n\n CoordinatorPublicKeyValidator {\n\n keys: self.keys.clone(),\n\n next_svc: service,\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 72, "score": 69156.49013524235 }, { "content": "/// An interface for serializable message types.\n\n///\n\n/// See also [`FromBytes`] for deserialization.\n\npub trait ToBytes {\n\n /// The length of the buffer for encoding the type.\n\n fn buffer_length(&self) -> usize;\n\n\n\n /// Serialize the type in the given buffer.\n\n ///\n\n /// # Panics\n\n /// This method may panic if the given buffer is too small. Thus, [`buffer_length()`] must be\n\n /// called prior to calling this, and a large enough buffer must be provided.\n\n ///\n\n /// [`buffer_length()`]: #method.buffer_length\n\n fn to_bytes<T: AsMut<[u8]> + AsRef<[u8]>>(&self, buffer: &mut T);\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/message/traits.rs", "rank": 73, "score": 68188.51573290885 }, { "content": "#[test]\n\nfn test_validate_pet() {\n\n assert!(validate_pet(&PetSettings::default()).is_ok());\n\n\n\n // phase times\n\n assert!(validate_pet(&PetSettings {\n\n min_sum_time: 2,\n\n max_sum_time: 1,\n\n ..PetSettings::default()\n\n })\n\n .is_err());\n\n assert!(validate_pet(&PetSettings {\n\n min_update_time: 2,\n\n max_update_time: 1,\n\n ..PetSettings::default()\n\n })\n\n .is_err());\n\n\n\n // fractions\n\n assert!(validate_pet(&PetSettings {\n\n sum: 0.,\n", "file_path": "rust/xaynet-server/src/settings/tests/mod.rs", "rank": 74, "score": 67824.5262018051 }, { "content": "#[test]\n\nfn test_settings_new() {\n\n assert!(Settings::new(PathBuf::from(\"../../configs/config.toml\")).is_ok());\n\n assert!(Settings::new(PathBuf::from(\"\")).is_err());\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/settings/tests/mod.rs", "rank": 75, "score": 67824.5262018051 }, { "content": "/// A type that hold a un-parsed message\n\nstruct RawMessage<T> {\n\n /// The buffer that contains the message to parse\n\n buffer: Arc<MessageBuffer<T>>,\n\n}\n\n\n\nimpl<T> Clone for RawMessage<T> {\n\n fn clone(&self) -> Self {\n\n Self {\n\n buffer: self.buffer.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> From<MessageBuffer<T>> for RawMessage<T> {\n\n fn from(buffer: MessageBuffer<T>) -> Self {\n\n RawMessage {\n\n buffer: Arc::new(buffer),\n\n }\n\n }\n\n}\n\n\n\n/// A service that wraps a buffer `T` representing a message into a\n\n/// [`RawMessage<T>`]\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 76, "score": 67718.3848582842 }, { "content": "#[derive(Debug, Clone)]\n\nstruct SignatureVerifier<S> {\n\n /// Thread-pool the CPU-intensive tasks are offloaded to.\n\n thread_pool: Arc<ThreadPool>,\n\n /// The service to be called after the [`SignatureVerifier`]\n\n next_svc: S,\n\n}\n\n\n\nimpl<T, S> Service<RawMessage<T>> for SignatureVerifier<S>\n\nwhere\n\n T: AsRef<[u8]> + Sync + Send + 'static,\n\n S: Service<RawMessage<T>, Response = Message, Error = ServiceError>\n\n + Clone\n\n + Sync\n\n + Send\n\n + 'static,\n\n S::Future: Sync + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = BoxedServiceFuture<Self::Response, Self::Error>;\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 77, "score": 67713.32366488728 }, { "content": "#[derive(Debug, Clone)]\n\nstruct PhaseFilter<S> {\n\n /// A listener to retrieve the current phase\n\n phase: EventListener<PhaseName>,\n\n /// Next service to be called\n\n next_svc: S,\n\n}\n\n\n\nimpl<T, S> Service<RawMessage<T>> for PhaseFilter<S>\n\nwhere\n\n T: AsRef<[u8]> + Send + 'static,\n\n S: Service<RawMessage<T>, Response = Message, Error = ServiceError>,\n\n S::Future: Sync + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = BoxedServiceFuture<Self::Response, Self::Error>;\n\n\n\n fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.next_svc.poll_ready(cx)\n\n }\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 78, "score": 67713.32366488728 }, { "content": "type InnerService = BufferWrapper<\n\n PhaseFilter<ConcurrencyLimit<SignatureVerifier<CoordinatorPublicKeyValidator<Parser>>>>,\n\n>;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct MessageParser(InnerService);\n\n\n\nimpl<T> Service<T> for MessageParser\n\nwhere\n\n T: AsRef<[u8]> + Sync + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = BoxedServiceFuture<Self::Response, Self::Error>;\n\n\n\n fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n <InnerService as Service<T>>::poll_ready(&mut self.0, cx)\n\n }\n\n\n\n fn call(&mut self, req: T) -> Self::Future {\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 79, "score": 67676.4564342131 }, { "content": "#[async_trait]\n\npub trait Fetcher {\n\n /// Fetch the parameters for the current round\n\n async fn round_params(&mut self) -> Result<RoundParamsResponse, FetchError>;\n\n\n\n /// Fetch the mask length for the current round. The sum\n\n /// participants need this value during the sum2 phase to derive\n\n /// masks from the update participant's masking seeds.\n\n async fn mask_length(&mut self) -> Result<MaskLengthResponse, FetchError>;\n\n\n\n /// Fetch the latest global model.\n\n async fn model(&mut self) -> Result<ModelResponse, FetchError>;\n\n\n\n /// Fetch the global seed dictionary. Each sum2 participant needs a\n\n /// different portion of that dictionary.\n\n async fn seed_dict(&mut self) -> Result<SeedDictResponse, FetchError>;\n\n\n\n /// Fetch the sum dictionary. The update participants need this\n\n /// dictionary to encrypt their masking seed for each sum\n\n /// participant.\n\n async fn sum_dict(&mut self) -> Result<SumDictResponse, FetchError>;\n\n}\n\n\n\n/// An error returned by the [`Fetcher`]'s method.\n\npub type FetchError = anyhow::Error;\n\n\n", "file_path": "rust/xaynet-server/src/services/fetchers/mod.rs", "rank": 80, "score": 67421.84759000542 }, { "content": "#[async_trait]\n\npub trait Handler {\n\n /// Handles a request.\n\n async fn handle_request(&mut self, req: StateMachineRequest) -> Result<(), RequestError>;\n\n}\n\n\n\n/// I/O interfaces.\n\n#[cfg_attr(test, derive(Debug))]\n\npub struct IO {\n\n /// The request receiver half.\n\n pub(in crate::state_machine) request_rx: RequestReceiver,\n\n /// The event publisher.\n\n pub(in crate::state_machine) events: EventPublisher,\n\n /// Redis client.\n\n pub(in crate::state_machine) redis: redis::Client,\n\n #[cfg(feature = \"model-persistence\")]\n\n /// S3 client.\n\n pub(in crate::state_machine) s3: s3::Client,\n\n #[cfg(feature = \"metrics\")]\n\n /// The metrics sender half.\n\n pub(in crate::state_machine) metrics_tx: MetricsSender,\n", "file_path": "rust/xaynet-server/src/state_machine/phases/mod.rs", "rank": 81, "score": 66685.81325597737 }, { "content": "#[async_trait]\n\npub trait Phase {\n\n /// Name of the current phase\n\n const NAME: PhaseName;\n\n\n\n /// Run this phase to completion\n\n async fn run(&mut self) -> Result<(), PhaseStateError>;\n\n\n\n /// Moves from this state to the next state.\n\n fn next(self) -> Option<StateMachine>;\n\n}\n\n\n\n/// A trait that must be implemented by a state to handle a request.\n", "file_path": "rust/xaynet-server/src/state_machine/phases/mod.rs", "rank": 82, "score": 66685.81325597737 }, { "content": "#[derive(Debug, Clone)]\n\nstruct CoordinatorPublicKeyValidator<S> {\n\n /// A listener to retrieve the latest coordinator keys\n\n keys: EventListener<EncryptKeyPair>,\n\n /// Next service to be called\n\n next_svc: S,\n\n}\n\n\n\nimpl<T, S> Service<RawMessage<T>> for CoordinatorPublicKeyValidator<S>\n\nwhere\n\n T: AsRef<[u8]> + Send + 'static,\n\n S: Service<RawMessage<T>, Response = Message, Error = ServiceError>,\n\n S::Future: Sync + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = BoxedServiceFuture<Self::Response, Self::Error>;\n\n\n\n fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.next_svc.poll_ready(cx)\n\n }\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 83, "score": 66353.70677378065 }, { "content": "// `Message::to_bytes` takes a secret key as argument. It is not\n\n// actually used, since the message we generate already contains a\n\n// (dummy) signature.\n\nfn participant_sk() -> SecretSigningKey {\n\n SecretSigningKey::from_slice(vec![2; 64].as_slice()).unwrap()\n\n}\n\n\n", "file_path": "rust/benches/benches/messages.rs", "rank": 84, "score": 66074.45658454427 }, { "content": "/// An interface for deserializable message types.\n\n///\n\n/// See also [`ToBytes`] for serialization.\n\npub trait FromBytes: Sized {\n\n /// Deserialize the type from the given buffer.\n\n ///\n\n /// # Errors\n\n /// May fail if certain parts of the deserialized buffer don't pass message validity checks.\n\n fn from_byte_slice<T: AsRef<[u8]>>(buffer: &T) -> Result<Self, DecodeError>;\n\n\n\n fn from_byte_stream<I: Iterator<Item = u8> + ExactSizeIterator>(\n\n iter: &mut I,\n\n ) -> Result<Self, DecodeError>;\n\n}\n\n\n\nimpl<T> FromBytes for T\n\nwhere\n\n T: ByteObject,\n\n{\n\n fn from_byte_slice<U: AsRef<[u8]>>(buffer: &U) -> Result<Self, DecodeError> {\n\n Self::from_slice(buffer.as_ref())\n\n .ok_or_else(|| anyhow!(\"failed to deserialize byte object\"))\n\n }\n", "file_path": "rust/xaynet-core/src/message/traits.rs", "rank": 85, "score": 65274.79661496563 }, { "content": "#[derive(Debug, Clone)]\n\nstruct BufferWrapper<S>(S);\n\n\n\nimpl<S, T> Service<T> for BufferWrapper<S>\n\nwhere\n\n T: AsRef<[u8]> + Send + 'static,\n\n S: Service<RawMessage<T>, Response = Message, Error = ServiceError>,\n\n S::Future: Sync + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = BoxedServiceFuture<Self::Response, Self::Error>;\n\n\n\n fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.0.poll_ready(cx)\n\n }\n\n\n\n fn call(&mut self, req: T) -> Self::Future {\n\n debug!(\"creating a RawMessage request\");\n\n match MessageBuffer::new(req) {\n\n Ok(buffer) => {\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 86, "score": 64726.91971487561 }, { "content": "fn dummy_seed_dict() -> SeedDict {\n\n let mut dict = HashMap::new();\n\n dict.insert(PublicSigningKey::fill_with(0xaa), dummy_update_dict());\n\n dict.insert(PublicSigningKey::fill_with(0xbb), dummy_update_dict());\n\n dict\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/fetchers.rs", "rank": 87, "score": 63938.63602102683 }, { "content": "fn dummy_sum_dict() -> SumDict {\n\n let mut dict = HashMap::new();\n\n dict.insert(\n\n PublicSigningKey::fill_with(0xaa),\n\n PublicEncryptKey::fill_with(0xcc),\n\n );\n\n dict.insert(\n\n PublicSigningKey::fill_with(0xbb),\n\n PublicEncryptKey::fill_with(0xdd),\n\n );\n\n dict\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_sum_dict_svc() {\n\n let (mut publisher, subscriber) = new_event_channels();\n\n\n\n let mut task = Spawn::new(SumDictService::new(&subscriber));\n\n assert_ready!(task.poll_ready()).unwrap();\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/fetchers.rs", "rank": 88, "score": 63938.63602102683 }, { "content": "fn dummy_update_dict() -> UpdateSeedDict {\n\n let mut dict = HashMap::new();\n\n dict.insert(\n\n PublicSigningKey::fill_with(0x11),\n\n EncryptedMaskSeed::fill_with(0x11),\n\n );\n\n dict.insert(\n\n PublicSigningKey::fill_with(0x22),\n\n EncryptedMaskSeed::fill_with(0x22),\n\n );\n\n dict\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_seed_dict_svc() {\n\n let (mut publisher, subscriber) = new_event_channels();\n\n\n\n let mut task = Spawn::new(SeedDictService::new(&subscriber));\n\n assert_ready!(task.poll_ready()).unwrap();\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/fetchers.rs", "rank": 89, "score": 63273.838372267135 }, { "content": "pub trait ChunkableIterator: Iterator + Sized {\n\n /// Return an _iterable_ that can chunk the iterator.\n\n ///\n\n /// Yield subiterators (chunks) that each yield a fixed number of\n\n /// elements, determined by `size`. The last chunk will be shorter\n\n /// if there aren't enough elements.\n\n ///\n\n /// Note that the chunks *must* be fully consumed in the order\n\n /// they are yielded. Otherwise, they will panic.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```ignore\n\n /// # use std::iter::Iterator;\n\n /// # use xaynet_core::message::utils::ChunkableIterator;\n\n ///\n\n /// let chunks = vec![0, 1, 2, 3, 4].into_iter().chunks(2);\n\n /// let mut chunks_iter = chunks.into_iter();\n\n ///\n\n /// let mut chunk_1 = chunks_iter.next().unwrap();\n", "file_path": "rust/xaynet-core/src/message/utils/chunkable_iterator.rs", "rank": 90, "score": 60672.68497844186 }, { "content": "/// An interface to convert a collection of primitive values into an iterator of numerical values.\n\n///\n\n/// This trait is used to convert primitive types ([`f32`], [`f64`], [`i32`], [`i64`]) into a\n\n/// [`Model`], which has its own internal representation of the weights. The opposite trait is\n\n/// [`IntoPrimitives`].\n\npub trait FromPrimitives<P: Debug>: Sized {\n\n /// Creates an iterator from primitive values that yields converted numerical values.\n\n ///\n\n /// # Errors\n\n /// Yields an error for the first encountered primitive value that can't be converted into a\n\n /// numerical value due to not being finite.\n\n fn from_primitives<I: Iterator<Item = P>>(iter: I) -> Result<Self, PrimitiveCastError<P>>;\n\n\n\n /// Creates an iterator from primitive values that yields converted numerical values.\n\n ///\n\n /// If a primitive value cannot be directly converted into a numerical value due to not being\n\n /// finite, it is clamped.\n\n fn from_primitives_bounded<I: Iterator<Item = P>>(iter: I) -> Self;\n\n}\n\n\n\nimpl IntoPrimitives<i32> for Model {\n\n fn into_primitives(self) -> Box<dyn Iterator<Item = Result<i32, ModelCastError>>> {\n\n Box::new(self.0.into_iter().map(|i| {\n\n i.to_integer().to_i32().ok_or(ModelCastError {\n\n weight: i,\n", "file_path": "rust/xaynet-core/src/mask/model.rs", "rank": 91, "score": 60516.28711491204 }, { "content": "/// An interface to convert a collection of numerical values into an iterator of primitive values.\n\n///\n\n/// This trait is used to convert a [`Model`], which has its own internal representation of the\n\n/// weights, into primitive types ([`f32`], [`f64`], [`i32`], [`i64`]). The opposite trait is\n\n/// [`FromPrimitives`].\n\npub trait IntoPrimitives<P: 'static>: Sized {\n\n /// Creates an iterator from numerical values that yields converted primitive values.\n\n ///\n\n /// # Errors\n\n /// Yields an error for each numerical value that can't be converted into a primitive value.\n\n fn into_primitives(self) -> Box<dyn Iterator<Item = Result<P, ModelCastError>>>;\n\n\n\n /// Creates an iterator from numerical values that yields converted primitive values.\n\n ///\n\n /// # Errors\n\n /// Yields an error for each numerical value that can't be converted into a primitive value.\n\n fn to_primitives(&self) -> Box<dyn Iterator<Item = Result<P, ModelCastError>>>;\n\n\n\n /// Consume this model and into an iterator that yields `P` values.\n\n ///\n\n /// # Panics\n\n /// Panics if a numerical value can't be converted into a primitive value.\n\n fn into_primitives_unchecked(self) -> Box<dyn Iterator<Item = P>> {\n\n Box::new(\n\n self.into_primitives()\n\n .map(|res| res.expect(\"conversion to primitive type failed\")),\n\n )\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/mask/model.rs", "rank": 92, "score": 60516.28711491204 }, { "content": "use crate::{\n\n api::ApiClient,\n\n mobile_client::participant::{\n\n Awaiting,\n\n Participant,\n\n ParticipantSettings,\n\n Role,\n\n Sum,\n\n Sum2,\n\n Update,\n\n },\n\n utils::multipart::MessageEncoder,\n\n ClientError,\n\n};\n\nuse derive_more::From;\n\nuse xaynet_core::{\n\n common::RoundParameters,\n\n crypto::ByteObject,\n\n mask::Model,\n\n message::Payload,\n\n InitError,\n\n};\n\n\n\nuse crate::PetError;\n\n\n\n#[async_trait]\n", "file_path": "rust/xaynet-client/src/mobile_client/client.rs", "rank": 93, "score": 58206.856257529886 }, { "content": "\n\nimpl ClientState<Update> {\n\n fn new(participant: Participant<Update>, round_params: RoundParameters) -> Self {\n\n Self {\n\n participant,\n\n round_params,\n\n }\n\n }\n\n\n\n async fn next<L: LocalModel, T: ApiClient>(\n\n mut self,\n\n api: &mut T,\n\n local_model: &mut L,\n\n ) -> ClientStateMachine {\n\n info!(\"selected to update\");\n\n\n\n match self.run(api, local_model).await {\n\n Ok(_) | Err(ClientError::RoundOutdated) => self.reset().into(),\n\n Err(err) => {\n\n error!(\"{:?}\", err);\n", "file_path": "rust/xaynet-client/src/mobile_client/client.rs", "rank": 94, "score": 58205.494393758694 }, { "content": " info!(\"sum participant completed a round\");\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(From, Serialize, Deserialize)]\n\npub enum ClientStateMachine {\n\n Awaiting(ClientState<Awaiting>),\n\n Sum(ClientState<Sum>),\n\n Update(ClientState<Update>),\n\n Sum2(ClientState<Sum2>),\n\n}\n\n\n\nimpl ClientStateMachine {\n\n pub fn new(participant_settings: ParticipantSettings) -> Result<Self, InitError> {\n\n // crucial: init must be called before anything else in this module\n\n sodiumoxide::init().or(Err(InitError))?;\n\n\n\n Ok(ClientState::<Awaiting>::new(\n\n Participant::<Awaiting>::new(participant_settings.into()),\n", "file_path": "rust/xaynet-client/src/mobile_client/client.rs", "rank": 95, "score": 58204.920106791884 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\nimpl ClientState<Sum> {\n\n fn new(participant: Participant<Sum>, round_params: RoundParameters) -> Self {\n\n Self {\n\n participant,\n\n round_params,\n\n }\n\n }\n\n\n\n async fn next<T: ApiClient>(mut self, api: &mut T) -> ClientStateMachine {\n\n info!(\"selected to sum\");\n\n\n\n match self.run(api).await {\n\n Ok(_) => self.into_sum2().into(),\n\n Err(ClientError::RoundOutdated) => self.reset().into(),\n\n Err(err) => {\n", "file_path": "rust/xaynet-client/src/mobile_client/client.rs", "rank": 96, "score": 58204.60758732428 }, { "content": " async fn next<T: ApiClient>(mut self, api: &mut T) -> ClientStateMachine {\n\n info!(\"selected to sum2\");\n\n\n\n match self.run(api).await {\n\n Ok(_) | Err(ClientError::RoundOutdated) => self.reset().into(),\n\n Err(err) => {\n\n error!(\"{:?}\", err);\n\n self.into()\n\n }\n\n }\n\n }\n\n\n\n async fn run<T: ApiClient>(&mut self, api: &mut T) -> Result<(), ClientError<T::Error>> {\n\n self.check_round_freshness(api).await?;\n\n\n\n debug!(\"polling for model/mask length\");\n\n let length = api\n\n .get_mask_length()\n\n .await?\n\n .ok_or(ClientError::TooEarly(\"length\"))?;\n", "file_path": "rust/xaynet-client/src/mobile_client/client.rs", "rank": 97, "score": 58203.43978888537 }, { "content": "\n\n // Unwrapping is fine because this only errors out if the\n\n // payload is a Chunk, which we never create in the client.\n\n let encoder = MessageEncoder::<'_, Type>::new(\n\n &self.participant,\n\n payload,\n\n self.round_params.pk,\n\n max_payload_size,\n\n )\n\n .unwrap();\n\n for part in encoder {\n\n let data = self.round_params.pk.encrypt(part.as_slice());\n\n api.send_message(data).await?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl ClientState<Awaiting> {\n\n fn new(participant: Participant<Awaiting>, round_params: RoundParameters) -> Self {\n", "file_path": "rust/xaynet-client/src/mobile_client/client.rs", "rank": 98, "score": 58202.53526442973 }, { "content": " self.into()\n\n }\n\n }\n\n }\n\n\n\n async fn run<L: LocalModel, T: ApiClient>(\n\n &mut self,\n\n api: &mut T,\n\n local_model: &mut L,\n\n ) -> Result<(), ClientError<T::Error>> {\n\n self.check_round_freshness(api).await?;\n\n\n\n debug!(\"polling for local model\");\n\n let local_model = local_model\n\n .get_local_model()\n\n .await\n\n .ok_or(ClientError::TooEarly(\"local model\"))?;\n\n\n\n debug!(\"polling for sum dict\");\n\n let sums = api\n", "file_path": "rust/xaynet-client/src/mobile_client/client.rs", "rank": 99, "score": 58202.453621103115 } ]
Rust
src/agent/onefuzz-agent/src/local/tui.rs
henryzz0/onefuzz
cb0701b2f2daf5b7b6d71bec9acd8dc0e329e1c3
use crate::local::common::UiEvent; use anyhow::{Context, Result}; use crossterm::{ event::{self, Event, KeyCode}, execute, terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, }; use futures::{StreamExt, TryStreamExt}; use log::Level; use onefuzz::utils::try_wait_all_join_handles; use std::{ collections::HashMap, io::{self, Stdout, Write}, path::PathBuf, thread::{self, JoinHandle}, time::Duration, }; use tokio::{ sync::mpsc::{self, UnboundedReceiver}, time, }; use tui::{ backend::CrosstermBackend, layout::{Constraint, Corner, Direction, Layout}, style::{Color, Modifier, Style}, text::{Span, Spans}, widgets::{Block, Borders}, widgets::{List, ListItem, ListState}, Terminal, }; use arraydeque::{ArrayDeque, Wrapping}; #[derive(Debug, thiserror::Error)] enum UiLoopError { #[error("program exiting")] Exit, #[error("error")] Anyhow(anyhow::Error), } impl From<anyhow::Error> for UiLoopError { fn from(e: anyhow::Error) -> Self { Self::Anyhow(e) } } impl From<std::io::Error> for UiLoopError { fn from(e: std::io::Error) -> Self { Self::Anyhow(e.into()) } } const LOGS_BUFFER_SIZE: usize = 100; const TICK_RATE: Duration = Duration::from_millis(250); #[derive(Debug)] enum TerminalEvent { Input(Event), Tick, FileCount { dir: PathBuf, count: usize }, Quit, } struct UiLoopState { pub logs: ArrayDeque<[(Level, String); LOGS_BUFFER_SIZE], Wrapping>, pub file_count: HashMap<PathBuf, usize>, pub file_count_state: ListState, pub file_monitors: Vec<JoinHandle<Result<()>>>, pub log_event_receiver: mpsc::UnboundedReceiver<(Level, String)>, pub terminal: Terminal<CrosstermBackend<Stdout>>, } impl UiLoopState { fn new( terminal: Terminal<CrosstermBackend<Stdout>>, log_event_receiver: mpsc::UnboundedReceiver<(Level, String)>, ) -> Self { Self { log_event_receiver, logs: Default::default(), file_count: Default::default(), file_count_state: Default::default(), file_monitors: Default::default(), terminal, } } } pub struct TerminalUi { pub task_events: mpsc::UnboundedSender<UiEvent>, task_event_receiver: mpsc::UnboundedReceiver<UiEvent>, ui_event_tx: mpsc::UnboundedSender<TerminalEvent>, ui_event_rx: mpsc::UnboundedReceiver<TerminalEvent>, } impl TerminalUi { pub fn init() -> Result<Self> { let (task_event_sender, task_event_receiver) = mpsc::unbounded_channel(); let (ui_event_tx, ui_event_rx) = mpsc::unbounded_channel(); Ok(Self { task_events: task_event_sender, task_event_receiver, ui_event_tx, ui_event_rx, }) } pub async fn run(self, timeout: Option<Duration>) -> Result<()> { enable_raw_mode()?; let mut stdout = io::stdout(); execute!(stdout, EnterAlternateScreen)?; let backend = CrosstermBackend::new(stdout); let mut terminal = Terminal::new(backend)?; terminal.clear()?; let (log_event_sender, log_event_receiver) = mpsc::unbounded_channel(); let initial_state = UiLoopState::new(terminal, log_event_receiver); env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")) .format(move |_buf, record| { let _r = log_event_sender.send((record.level(), format!("{}", record.args()))); Ok(()) }) .init(); let tick_event_tx_clone = self.ui_event_tx.clone(); let tick_event_handle = tokio::spawn(async { Self::ticking(tick_event_tx_clone).await.context("ticking") }); let keyboard_ui_event_tx = self.ui_event_tx.clone(); let _keyboard_event_handle = Self::read_keyboard_events(keyboard_ui_event_tx); let task_event_receiver = self.task_event_receiver; let ui_event_tx = self.ui_event_tx.clone(); let external_event_handle = tokio::spawn(Self::read_commands(ui_event_tx, task_event_receiver)); let ui_loop = tokio::spawn(Self::ui_loop(initial_state, self.ui_event_rx)); let mut task_handles = vec![tick_event_handle, ui_loop, external_event_handle]; if let Some(timeout) = timeout { let ui_event_tx = self.ui_event_tx.clone(); let timeout_task = tokio::spawn(async move { time::delay_for(timeout).await; let _ = ui_event_tx.send(TerminalEvent::Quit); Ok(()) }); task_handles.push(timeout_task); } try_wait_all_join_handles(task_handles) .await .context("ui_loop")?; Ok(()) } async fn ticking(ui_event_tx: mpsc::UnboundedSender<TerminalEvent>) -> Result<()> { let mut interval = tokio::time::interval(TICK_RATE); loop { interval.tick().await; if let Err(_err) = ui_event_tx.send(TerminalEvent::Tick) { break; } } Ok(()) } fn read_keyboard_events( ui_event_tx: mpsc::UnboundedSender<TerminalEvent>, ) -> JoinHandle<Result<()>> { thread::spawn(move || loop { if event::poll(Duration::from_secs(1))? { let event = event::read()?; if let Err(_err) = ui_event_tx.send(TerminalEvent::Input(event)) { return Ok(()); } } }) } async fn read_commands( ui_event_tx: mpsc::UnboundedSender<TerminalEvent>, mut external_event_rx: mpsc::UnboundedReceiver<UiEvent>, ) -> Result<()> { while let Some(UiEvent::FileCount { dir, count }) = external_event_rx.recv().await { if ui_event_tx .send(TerminalEvent::FileCount { dir, count }) .is_err() { break; } } Ok(()) } fn take_available_logs<T>( receiver: &mut UnboundedReceiver<T>, size: usize, buffer: &mut ArrayDeque<[T; LOGS_BUFFER_SIZE], Wrapping>, ) { let mut count = 0; while let Ok(v) = receiver.try_recv() { count += 1; buffer.push_front(v); if count >= size { break; } } } async fn refresh_ui(ui_state: UiLoopState) -> Result<UiLoopState, UiLoopError> { let mut logs = ui_state.logs; let mut file_count_state = ui_state.file_count_state; let file_count = ui_state.file_count; let mut log_event_receiver = ui_state.log_event_receiver; let mut terminal = ui_state.terminal; Self::take_available_logs(&mut log_event_receiver, 10, &mut logs); terminal.draw(|f| { let chunks = Layout::default() .direction(Direction::Vertical) .constraints([Constraint::Percentage(25), Constraint::Percentage(75)].as_ref()) .split(f.size()); let mut sorted_file_count = file_count.iter().collect::<Vec<_>>(); sorted_file_count.sort_by(|(p1, _), (p2, _)| p1.cmp(p2)); let files = sorted_file_count .iter() .map(|(path, count)| { ListItem::new(Spans::from(vec![ Span::raw( path.file_name() .map(|f| f.to_string_lossy()) .unwrap_or_default(), ), Span::raw(": "), Span::raw(format!("{}", count)), ])) }) .collect::<Vec<_>>(); let log_list = List::new(files) .block(Block::default().borders(Borders::ALL).title("files")) .highlight_style(Style::default().add_modifier(Modifier::BOLD)) .start_corner(Corner::TopLeft); f.render_stateful_widget(log_list, chunks[0], &mut file_count_state); let log_items = logs .iter() .map(|(level, log)| { let style = match level { Level::Debug => Style::default().fg(Color::Magenta), Level::Error => Style::default().fg(Color::Red), Level::Warn => Style::default().fg(Color::Yellow), Level::Info => Style::default().fg(Color::Blue), Level::Trace => Style::default(), }; ListItem::new(Spans::from(vec![ Span::styled(format!("{:<9}", level), style), Span::raw(" "), Span::raw(log), ])) }) .collect::<Vec<_>>(); let log_list = List::new(log_items) .block(Block::default().borders(Borders::ALL).title("Logs")) .start_corner(Corner::BottomLeft); f.render_widget(log_list, chunks[1]); })?; Ok(UiLoopState { logs, file_count_state, file_count, terminal, log_event_receiver, ..ui_state }) } async fn on_key_down(ui_state: UiLoopState) -> Result<UiLoopState, UiLoopError> { let mut file_count_state = ui_state.file_count_state; let count = ui_state.file_count.len(); let i = file_count_state .selected() .map(|i| { if count == 0 { 0 } else { (i + count + 1) % count } }) .unwrap_or_default(); file_count_state.select(Some(i)); Ok(UiLoopState { file_count_state, ..ui_state }) } async fn on_key_up(ui_state: UiLoopState) -> Result<UiLoopState, UiLoopError> { let mut file_count_state = ui_state.file_count_state; let count = ui_state.file_count.len(); let i = file_count_state .selected() .map(|i| { if count == 0 { 0 } else { (i + count - 1) % count } }) .unwrap_or_default(); file_count_state.select(Some(i)); Ok(UiLoopState { file_count_state, ..ui_state }) } async fn on_quit(ui_state: UiLoopState) -> Result<UiLoopState, UiLoopError> { let mut terminal = ui_state.terminal; disable_raw_mode().map_err(|e| anyhow!("{:?}", e))?; execute!(terminal.backend_mut(), LeaveAlternateScreen).map_err(|e| anyhow!("{:?}", e))?; terminal.show_cursor()?; Err(UiLoopError::Exit) } async fn on_file_count( ui_state: UiLoopState, dir: PathBuf, count: usize, ) -> Result<UiLoopState, UiLoopError> { let mut file_count = ui_state.file_count; file_count.insert(dir, count); Ok(UiLoopState { file_count, ..ui_state }) } async fn ui_loop( initial_state: UiLoopState, ui_event_rx: mpsc::UnboundedReceiver<TerminalEvent>, ) -> Result<()> { let loop_result = ui_event_rx .map(Ok) .try_fold(initial_state, |ui_state, event| async { match event { TerminalEvent::Tick => Self::refresh_ui(ui_state).await, TerminalEvent::Input(Event::Key(k)) => match k.code { KeyCode::Char('q') => Self::on_quit(ui_state).await, KeyCode::Down => Self::on_key_down(ui_state).await, KeyCode::Up => Self::on_key_up(ui_state).await, _ => Ok(ui_state), }, TerminalEvent::FileCount { dir, count } => { Self::on_file_count(ui_state, dir, count).await } TerminalEvent::Quit => Self::on_quit(ui_state).await, _ => Ok(ui_state), } }) .await; match loop_result { Err(UiLoopError::Exit) | Ok(_) => Ok(()), Err(UiLoopError::Anyhow(e)) => Err(e), } } }
use crate::local::common::UiEvent; use anyhow::{Context, Result}; use crossterm::{ event::{self, Event, KeyCode}, execute, terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, }; use futures::{StreamExt, TryStreamExt}; use log::Level; use onefuzz::utils::try_wait_all_join_handles; use std::{ collections::HashMap, io::{self, Stdout, Write}, path::PathBuf, thread::{self, JoinHandle}, time::Duration, }; use tokio::{ sync::mpsc::{self, UnboundedReceiver}, time, }; use tui::{ backend::CrosstermBackend, layout::{Constraint, Corner, Direction, Layout}, style::{Color, Modifier, Style}, text::{Span, Spans}, widgets::{Block, Borders}, widgets::{List, ListItem, ListState}, Terminal, }; use arraydeque::{ArrayDeque, Wrapping}; #[derive(Debug, thiserror::Error)] enum UiLoopError { #[error("program exiting")] Exit, #[error("error")] Anyhow(anyhow::Error), } impl From<anyhow::Error> for UiLoopError { fn from(e: anyhow::Error) -> Self { Self::Anyhow(e) } } impl From<std::io::Error> for UiLoopError { fn from(e: std::io::Error) -> Self { Self::Anyhow(e.into()) } } const LOGS_BUFFER_SIZE: usize = 100; const TICK_RATE: Duration = Duration::from_millis(250); #[derive(Debug)] enum TerminalEvent { Input(Event), Tick, FileCount { dir: PathBuf, count: usize }, Quit, } struct UiLoopState { pub logs: ArrayDeque<[(Level, String); LOGS_BUFFER_SIZE], Wrapping>, pub file_count: HashMap<PathBuf, usize>, pub file_count_state: ListState, pub file_monitors: Vec<JoinHandle<Result<()>>>, pub log_event_receiver: mpsc::UnboundedReceiver<(Level, String)>, pub terminal: Terminal<CrosstermBackend<Stdout>>, } impl UiLoopState { fn new( terminal: Terminal<CrosstermBackend<Stdout>>, log_event_receiver: mpsc::UnboundedReceiver<(Level, String)>, ) -> Self { Self { log_event_receiver, logs: Default::default(), file_count: Default::default(), file_count_state: Default::default(), file_monitors: Default::default(), terminal, } } } pub struct TerminalUi { pub task_events: mpsc::UnboundedSender<UiEvent>, task_event_receiver: mpsc::UnboundedReceiver<UiEvent>, ui_event_tx: mpsc::UnboundedSender<TerminalEvent>, ui_event_rx: mpsc::UnboundedReceiver<TerminalEvent>, } impl TerminalUi { pub fn init() -> Result<Self> { let (task_event_sender, task_event_receiver) = mpsc::unbounded_channel(); let (ui_event_tx, ui_event_rx) = mpsc::unbounded_channel(); Ok(Self { task_events: task_event_sender, task_event_receiver, ui_event_tx, ui_event_rx, }) }
async fn ticking(ui_event_tx: mpsc::UnboundedSender<TerminalEvent>) -> Result<()> { let mut interval = tokio::time::interval(TICK_RATE); loop { interval.tick().await; if let Err(_err) = ui_event_tx.send(TerminalEvent::Tick) { break; } } Ok(()) } fn read_keyboard_events( ui_event_tx: mpsc::UnboundedSender<TerminalEvent>, ) -> JoinHandle<Result<()>> { thread::spawn(move || loop { if event::poll(Duration::from_secs(1))? { let event = event::read()?; if let Err(_err) = ui_event_tx.send(TerminalEvent::Input(event)) { return Ok(()); } } }) } async fn read_commands( ui_event_tx: mpsc::UnboundedSender<TerminalEvent>, mut external_event_rx: mpsc::UnboundedReceiver<UiEvent>, ) -> Result<()> { while let Some(UiEvent::FileCount { dir, count }) = external_event_rx.recv().await { if ui_event_tx .send(TerminalEvent::FileCount { dir, count }) .is_err() { break; } } Ok(()) } fn take_available_logs<T>( receiver: &mut UnboundedReceiver<T>, size: usize, buffer: &mut ArrayDeque<[T; LOGS_BUFFER_SIZE], Wrapping>, ) { let mut count = 0; while let Ok(v) = receiver.try_recv() { count += 1; buffer.push_front(v); if count >= size { break; } } } async fn refresh_ui(ui_state: UiLoopState) -> Result<UiLoopState, UiLoopError> { let mut logs = ui_state.logs; let mut file_count_state = ui_state.file_count_state; let file_count = ui_state.file_count; let mut log_event_receiver = ui_state.log_event_receiver; let mut terminal = ui_state.terminal; Self::take_available_logs(&mut log_event_receiver, 10, &mut logs); terminal.draw(|f| { let chunks = Layout::default() .direction(Direction::Vertical) .constraints([Constraint::Percentage(25), Constraint::Percentage(75)].as_ref()) .split(f.size()); let mut sorted_file_count = file_count.iter().collect::<Vec<_>>(); sorted_file_count.sort_by(|(p1, _), (p2, _)| p1.cmp(p2)); let files = sorted_file_count .iter() .map(|(path, count)| { ListItem::new(Spans::from(vec![ Span::raw( path.file_name() .map(|f| f.to_string_lossy()) .unwrap_or_default(), ), Span::raw(": "), Span::raw(format!("{}", count)), ])) }) .collect::<Vec<_>>(); let log_list = List::new(files) .block(Block::default().borders(Borders::ALL).title("files")) .highlight_style(Style::default().add_modifier(Modifier::BOLD)) .start_corner(Corner::TopLeft); f.render_stateful_widget(log_list, chunks[0], &mut file_count_state); let log_items = logs .iter() .map(|(level, log)| { let style = match level { Level::Debug => Style::default().fg(Color::Magenta), Level::Error => Style::default().fg(Color::Red), Level::Warn => Style::default().fg(Color::Yellow), Level::Info => Style::default().fg(Color::Blue), Level::Trace => Style::default(), }; ListItem::new(Spans::from(vec![ Span::styled(format!("{:<9}", level), style), Span::raw(" "), Span::raw(log), ])) }) .collect::<Vec<_>>(); let log_list = List::new(log_items) .block(Block::default().borders(Borders::ALL).title("Logs")) .start_corner(Corner::BottomLeft); f.render_widget(log_list, chunks[1]); })?; Ok(UiLoopState { logs, file_count_state, file_count, terminal, log_event_receiver, ..ui_state }) } async fn on_key_down(ui_state: UiLoopState) -> Result<UiLoopState, UiLoopError> { let mut file_count_state = ui_state.file_count_state; let count = ui_state.file_count.len(); let i = file_count_state .selected() .map(|i| { if count == 0 { 0 } else { (i + count + 1) % count } }) .unwrap_or_default(); file_count_state.select(Some(i)); Ok(UiLoopState { file_count_state, ..ui_state }) } async fn on_key_up(ui_state: UiLoopState) -> Result<UiLoopState, UiLoopError> { let mut file_count_state = ui_state.file_count_state; let count = ui_state.file_count.len(); let i = file_count_state .selected() .map(|i| { if count == 0 { 0 } else { (i + count - 1) % count } }) .unwrap_or_default(); file_count_state.select(Some(i)); Ok(UiLoopState { file_count_state, ..ui_state }) } async fn on_quit(ui_state: UiLoopState) -> Result<UiLoopState, UiLoopError> { let mut terminal = ui_state.terminal; disable_raw_mode().map_err(|e| anyhow!("{:?}", e))?; execute!(terminal.backend_mut(), LeaveAlternateScreen).map_err(|e| anyhow!("{:?}", e))?; terminal.show_cursor()?; Err(UiLoopError::Exit) } async fn on_file_count( ui_state: UiLoopState, dir: PathBuf, count: usize, ) -> Result<UiLoopState, UiLoopError> { let mut file_count = ui_state.file_count; file_count.insert(dir, count); Ok(UiLoopState { file_count, ..ui_state }) } async fn ui_loop( initial_state: UiLoopState, ui_event_rx: mpsc::UnboundedReceiver<TerminalEvent>, ) -> Result<()> { let loop_result = ui_event_rx .map(Ok) .try_fold(initial_state, |ui_state, event| async { match event { TerminalEvent::Tick => Self::refresh_ui(ui_state).await, TerminalEvent::Input(Event::Key(k)) => match k.code { KeyCode::Char('q') => Self::on_quit(ui_state).await, KeyCode::Down => Self::on_key_down(ui_state).await, KeyCode::Up => Self::on_key_up(ui_state).await, _ => Ok(ui_state), }, TerminalEvent::FileCount { dir, count } => { Self::on_file_count(ui_state, dir, count).await } TerminalEvent::Quit => Self::on_quit(ui_state).await, _ => Ok(ui_state), } }) .await; match loop_result { Err(UiLoopError::Exit) | Ok(_) => Ok(()), Err(UiLoopError::Anyhow(e)) => Err(e), } } }
pub async fn run(self, timeout: Option<Duration>) -> Result<()> { enable_raw_mode()?; let mut stdout = io::stdout(); execute!(stdout, EnterAlternateScreen)?; let backend = CrosstermBackend::new(stdout); let mut terminal = Terminal::new(backend)?; terminal.clear()?; let (log_event_sender, log_event_receiver) = mpsc::unbounded_channel(); let initial_state = UiLoopState::new(terminal, log_event_receiver); env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")) .format(move |_buf, record| { let _r = log_event_sender.send((record.level(), format!("{}", record.args()))); Ok(()) }) .init(); let tick_event_tx_clone = self.ui_event_tx.clone(); let tick_event_handle = tokio::spawn(async { Self::ticking(tick_event_tx_clone).await.context("ticking") }); let keyboard_ui_event_tx = self.ui_event_tx.clone(); let _keyboard_event_handle = Self::read_keyboard_events(keyboard_ui_event_tx); let task_event_receiver = self.task_event_receiver; let ui_event_tx = self.ui_event_tx.clone(); let external_event_handle = tokio::spawn(Self::read_commands(ui_event_tx, task_event_receiver)); let ui_loop = tokio::spawn(Self::ui_loop(initial_state, self.ui_event_rx)); let mut task_handles = vec![tick_event_handle, ui_loop, external_event_handle]; if let Some(timeout) = timeout { let ui_event_tx = self.ui_event_tx.clone(); let timeout_task = tokio::spawn(async move { time::delay_for(timeout).await; let _ = ui_event_tx.send(TerminalEvent::Quit); Ok(()) }); task_handles.push(timeout_task); } try_wait_all_join_handles(task_handles) .await .context("ui_loop")?; Ok(()) }
function_block-full_function
[ { "content": "pub fn digest_file_blocking(file: impl AsRef<Path>) -> Result<String> {\n\n let file = file.as_ref();\n\n let data = std::fs::read(file)\n\n .with_context(|| format!(\"unable to read file to generate digest: {}\", file.display()))?;\n\n Ok(hex::encode(Sha256::digest(&data)))\n\n}\n", "file_path": "src/agent/onefuzz/src/sha256.rs", "rank": 0, "score": 357222.56944058696 }, { "content": "pub fn read_failure() -> Result<String> {\n\n let path = failure_path()?;\n\n fs::read_to_string(&path)\n\n .with_context(|| format!(\"unable to read failure log: {}\", path.display()))\n\n}\n", "file_path": "src/agent/onefuzz-supervisor/src/failure.rs", "rank": 1, "score": 330984.36690529843 }, { "content": "pub fn parse_key_value(value: String) -> Result<(String, String)> {\n\n let offset = value\n\n .find('=')\n\n .ok_or_else(|| format_err!(\"invalid key=value, no = found {:?}\", value))?;\n\n\n\n Ok((value[..offset].to_string(), value[offset + 1..].to_string()))\n\n}\n\n\n", "file_path": "src/agent/onefuzz-agent/src/tasks/utils.rs", "rank": 2, "score": 328384.9844799575 }, { "content": "pub fn digest(data: impl AsRef<[u8]>) -> String {\n\n hex::encode(Sha256::digest(data.as_ref()))\n\n}\n\n\n", "file_path": "src/agent/onefuzz/src/sha256.rs", "rank": 3, "score": 301509.4377095117 }, { "content": "pub fn format_events(events: &[EventData]) -> String {\n\n events\n\n .iter()\n\n .map(|x| x.as_values())\n\n .map(|(x, y)| format!(\"{}:{}\", x, y))\n\n .collect::<Vec<String>>()\n\n .join(\" \")\n\n}\n\n\n", "file_path": "src/agent/onefuzz-telemetry/src/lib.rs", "rank": 4, "score": 300834.46046811674 }, { "content": "pub fn digest_iter(data: impl IntoIterator<Item = impl AsRef<[u8]>>) -> String {\n\n let mut ctx = Sha256::new();\n\n\n\n for frame in data {\n\n ctx.update(frame);\n\n }\n\n\n\n hex::encode(ctx.finalize())\n\n}\n\n\n\npub async fn digest_file(file: impl AsRef<Path>) -> Result<String> {\n\n let file = file.as_ref();\n\n let data = fs::read(file)\n\n .await\n\n .with_context(|| format!(\"unable to read file to generate digest: {}\", file.display()))?;\n\n\n\n Ok(hex::encode(Sha256::digest(&data)))\n\n}\n\n\n", "file_path": "src/agent/onefuzz/src/sha256.rs", "rank": 5, "score": 296841.2042515311 }, { "content": "#[allow(clippy::ptr_arg)]\n\npub fn update_path(path: OsString, to_add: &PathBuf) -> Result<OsString> {\n\n let mut paths: Vec<_> = std::env::split_paths(&path).collect();\n\n if !paths.contains(to_add) {\n\n paths.push(to_add.clone())\n\n }\n\n Ok(std::env::join_paths(paths)?)\n\n}\n\n\n", "file_path": "src/agent/onefuzz/src/env.rs", "rank": 6, "score": 292884.84527618944 }, { "content": "/// Read the file and hash the file contents using sha2.\n\n/// Returns the digest of the hash as a string in lowercase hex.\n\nfn hash_file_contents(file: impl AsRef<Path>) -> Result<String> {\n\n let data = fs::read(file.as_ref())?;\n\n let digest = Sha256::digest(&data);\n\n Ok(hex::encode(&digest[..]))\n\n}\n\n\n", "file_path": "src/agent/input-tester/src/tester.rs", "rank": 7, "score": 292669.86876145366 }, { "content": "/// Create the specified directory if it does not already exist.\n\npub fn ensure_directory_exists(path: impl AsRef<Path>) -> Result<()> {\n\n let path = path.as_ref();\n\n if path.is_dir() {\n\n return Ok(());\n\n }\n\n\n\n // Either directory does not exist, or maybe it's a file, either way,\n\n // we'll try to create the directory and using that result for the error if any.\n\n fs::create_dir_all(&path).with_context(|| format!(\"Creating directory {}\", path.display()))?;\n\n Ok(())\n\n}\n", "file_path": "src/agent/input-tester/src/tester.rs", "rank": 8, "score": 292224.5021647598 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn add_asan_log_env<S: BuildHasher>(env: &mut HashMap<String, String, S>, asan_dir: &Path) {\n\n let asan_path = asan_dir.join(\"asan-log\");\n\n let asan_path_as_str = asan_path.to_string_lossy();\n\n if let Some(v) = env.get_mut(\"ASAN_OPTIONS\") {\n\n let log_path = format!(\":log_path={}\", asan_path_as_str);\n\n v.push_str(&log_path);\n\n } else {\n\n let log_path = format!(\"log_path={}\", asan_path_as_str);\n\n env.insert(\"ASAN_OPTIONS\".to_string(), log_path);\n\n }\n\n}\n\n\n\npub async fn check_asan_string(mut data: String) -> Result<Option<CrashLog>> {\n\n match CrashLog::parse(data.clone()) {\n\n Ok(log) => Ok(Some(log)),\n\n Err(err) => {\n\n if data.len() > ASAN_LOG_TRUNCATE_SIZE {\n\n data.truncate(ASAN_LOG_TRUNCATE_SIZE);\n\n data.push_str(\"...<truncated>\");\n\n }\n", "file_path": "src/agent/onefuzz/src/asan.rs", "rank": 9, "score": 290043.5165938098 }, { "content": "pub fn new_test_result(\n\n debugger_result: DebuggerResult,\n\n input_file: &Path,\n\n log_path: &Path,\n\n) -> TestResult {\n\n TestResult {\n\n bugs: debugger_result.exceptions,\n\n input_file: input_file.to_string_lossy().to_string(),\n\n log_path: format!(\"{}\", log_path.display()),\n\n debugger_output: debugger_result.debugger_output,\n\n test_stdout: debugger_result.stdout,\n\n test_stderr: debugger_result.stderr,\n\n exit_status: debugger_result.exit_status,\n\n }\n\n}\n\n\n\n/// The details of an exception observed by the execution engine.\n\n#[derive(Clone)]\n\npub struct Exception {\n\n /// The win32 exception code.\n", "file_path": "src/agent/input-tester/src/test_result/mod.rs", "rank": 10, "score": 284344.2726102715 }, { "content": "pub fn onefuzz_root() -> Result<PathBuf> {\n\n let path = match std::env::var_os(ONEFUZZ_ROOT_ENV) {\n\n Some(path) => PathBuf::from(path),\n\n None => std::env::current_dir()?,\n\n };\n\n Ok(path)\n\n}\n\n\n", "file_path": "src/agent/onefuzz/src/fs.rs", "rank": 11, "score": 282332.6178759289 }, { "content": "pub fn onefuzz_etc() -> Result<PathBuf> {\n\n Ok(onefuzz_root()?.join(\"etc\"))\n\n}\n\n\n\npub async fn has_files(path: impl AsRef<Path>) -> Result<bool> {\n\n let path = path.as_ref();\n\n let mut paths = fs::read_dir(&path)\n\n .await\n\n .with_context(|| format!(\"unable to check if directory has files: {}\", path.display()))?;\n\n let result = paths.next_entry().await?.is_some();\n\n Ok(result)\n\n}\n\n\n\npub async fn list_files(path: impl AsRef<Path>) -> Result<Vec<PathBuf>> {\n\n let path = path.as_ref();\n\n let paths = fs::read_dir(&path)\n\n .await\n\n .with_context(|| format!(\"unable to list files: {}\", path.display()))?;\n\n\n\n let mut files = paths\n", "file_path": "src/agent/onefuzz/src/fs.rs", "rank": 12, "score": 282332.6178759289 }, { "content": "pub fn configure_machine_wide_app_debug_settings(application_path: impl AsRef<Path>) -> Result<()> {\n\n anyhow::ensure!(\n\n process::is_elevated(),\n\n \"Changing registry requires elevation\"\n\n );\n\n\n\n let exe_name = application_path.as_ref().file_name().ok_or_else(|| {\n\n anyhow::anyhow!(\n\n \"Missing executable name in path {}\",\n\n application_path.as_ref().display()\n\n )\n\n })?;\n\n\n\n // This should avoid some popups, e.g. if a dll can't be found.\n\n // I'm not sure SEM_NOGPFAULTERRORBOX is useful anymore (because of Watson),\n\n // but it is another source of popups that could block automation.\n\n unsafe { SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX) };\n\n\n\n // This is a machine-wide setting, not process specific.\n\n wer::disable_wer_ui()?;\n\n\n\n wer::add_exclusion(&exe_name)?;\n\n aedebug::add_exclusion(&exe_name)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/agent/win-util/src/lib.rs", "rank": 13, "score": 281481.5259176498 }, { "content": "pub fn done_path() -> Result<PathBuf> {\n\n Ok(onefuzz_root()?.join(\"supervisor-is-done\"))\n\n}\n", "file_path": "src/agent/onefuzz-supervisor/src/done.rs", "rank": 14, "score": 279939.6414891335 }, { "content": "pub fn failure_path() -> Result<PathBuf> {\n\n Ok(onefuzz_root()?.join(FAILURE_FILE))\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/failure.rs", "rank": 15, "score": 279939.6414891335 }, { "content": "#[allow(clippy::ptr_arg)]\n\npub fn get_path_with_directory(variable: &str, to_add: &PathBuf) -> Result<OsString> {\n\n match std::env::var_os(variable) {\n\n Some(path) => update_path(path, to_add),\n\n None => Ok(to_add.clone().into()),\n\n }\n\n}\n", "file_path": "src/agent/onefuzz/src/env.rs", "rank": 16, "score": 279919.4786288986 }, { "content": "pub fn new_exception(\n\n process_handle: HANDLE,\n\n exception: &EXCEPTION_DEBUG_INFO,\n\n stack: DebugStack,\n\n) -> Exception {\n\n let stack_hash = stack.stable_hash();\n\n Exception {\n\n exception_code: exception.ExceptionRecord.ExceptionCode,\n\n description: new_exception_description(process_handle, &exception.ExceptionRecord),\n\n stack_hash,\n\n first_chance: exception.dwFirstChance != 0,\n\n stack_frames: stack.frames,\n\n }\n\n}\n\n\n", "file_path": "src/agent/input-tester/src/test_result/mod.rs", "rank": 17, "score": 275309.7271278267 }, { "content": "pub fn get_hash_map(args: &clap::ArgMatches<'_>, name: &str) -> Result<HashMap<String, String>> {\n\n let mut env = HashMap::new();\n\n for opt in args.values_of_lossy(name).unwrap_or_default() {\n\n let (k, v) = parse_key_value(opt)?;\n\n env.insert(k, v);\n\n }\n\n Ok(env)\n\n}\n\n\n", "file_path": "src/agent/onefuzz-agent/src/local/common.rs", "rank": 18, "score": 272335.5490512315 }, { "content": "pub fn new_exception_description(\n\n process_handle: HANDLE,\n\n exception_record: &EXCEPTION_RECORD,\n\n) -> ExceptionDescription {\n\n if let Some(generic_exception) = generic_exception(exception_record) {\n\n ExceptionDescription::GenericException(generic_exception)\n\n } else {\n\n match exception_record.ExceptionCode {\n\n EXCEPTION_FAIL_FAST => {\n\n ExceptionDescription::FastFail(fast_fail::from_exception_record(exception_record))\n\n }\n\n STATUS_VERIFIER_STOP => ExceptionDescription::VerifierStop(verifier_stop::new(\n\n process_handle,\n\n exception_record,\n\n )),\n\n EH_SANITIZER => ExceptionDescription::Asan(asan_error_from_exception_record(\n\n process_handle,\n\n exception_record,\n\n )),\n\n vcpp_debugger::EXCEPTION_VISUALCPP_DEBUGGER => {\n", "file_path": "src/agent/input-tester/src/test_result/mod.rs", "rank": 19, "score": 272113.7990276951 }, { "content": "pub fn property(client_type: ClientType, key: impl AsRef<str>) -> Option<String> {\n\n client(client_type).map(|c| {\n\n c.context()\n\n .properties()\n\n .get(key.as_ref())\n\n .map(|s| s.to_owned())\n\n })?\n\n}\n\n\n", "file_path": "src/agent/onefuzz-telemetry/src/lib.rs", "rank": 20, "score": 266327.7872561252 }, { "content": "pub fn process_image(path: impl AsRef<Path>, functions_only: bool) -> Result<FixedBitSet> {\n\n let file = File::open(path.as_ref())?;\n\n let mmap = unsafe { Mmap::map(&file)? };\n\n let pe = PE::parse(&mmap)?;\n\n\n\n process_module(path, &mmap, &pe, functions_only)\n\n}\n", "file_path": "src/agent/coverage/src/pe.rs", "rank": 21, "score": 266063.0167516296 }, { "content": "pub fn refresh() -> Result<()> {\n\n let mut s = SYSTEM.write().map_err(|e| format_err!(\"{}\", e))?;\n\n s.refresh();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/agent/onefuzz/src/system.rs", "rank": 22, "score": 264465.8187923787 }, { "content": "pub fn to_wstring(str: impl AsRef<Path>) -> Vec<u16> {\n\n OsStr::new(str.as_ref())\n\n .encode_wide()\n\n .chain(once(0))\n\n .collect()\n\n}\n\n\n", "file_path": "src/agent/win-util/src/string.rs", "rank": 23, "score": 262616.450903006 }, { "content": "pub fn track_event(event: &Event, properties: &[EventData]) {\n\n use appinsights::telemetry::Telemetry;\n\n\n\n if let Some(client) = client(ClientType::Instance) {\n\n let mut evt = appinsights::telemetry::EventTelemetry::new(event.as_str());\n\n let props = evt.properties_mut();\n\n for property in properties {\n\n let (name, val) = property.as_values();\n\n props.insert(name.to_string(), val);\n\n }\n\n client.track(evt);\n\n }\n\n\n\n if let Some(client) = client(ClientType::Microsoft) {\n\n let mut evt = appinsights::telemetry::EventTelemetry::new(event.as_str());\n\n let props = evt.properties_mut();\n\n\n\n for property in properties {\n\n if property.can_share_with_microsoft() {\n\n let (name, val) = property.as_values();\n\n props.insert(name.to_string(), val);\n\n }\n\n }\n\n client.track(evt);\n\n }\n\n}\n\n\n", "file_path": "src/agent/onefuzz-telemetry/src/lib.rs", "rank": 24, "score": 261473.84060968633 }, { "content": "fn parse_summary(text: &str) -> Result<(String, String, String)> {\n\n // eventually, this should be updated to support multiple callstack formats\n\n asan::parse_summary(&text)\n\n}\n\n\n", "file_path": "src/agent/stacktrace-parser/src/lib.rs", "rank": 25, "score": 259903.90872233387 }, { "content": "fn digest_iter(data: impl IntoIterator<Item = impl AsRef<[u8]>>, depth: Option<usize>) -> String {\n\n let mut ctx = Sha256::new();\n\n\n\n if let Some(depth) = depth {\n\n for frame in data.into_iter().take(depth) {\n\n ctx.update(frame);\n\n }\n\n } else {\n\n for frame in data {\n\n ctx.update(frame);\n\n }\n\n }\n\n\n\n hex::encode(ctx.finalize())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::CrashLog;\n\n use anyhow::{Context, Result};\n", "file_path": "src/agent/stacktrace-parser/src/lib.rs", "rank": 26, "score": 259190.58154094845 }, { "content": "pub fn get_cmd_exe(cmd_type: CmdType, args: &clap::ArgMatches<'_>) -> Result<String> {\n\n let name = match cmd_type {\n\n CmdType::Target => TARGET_EXE,\n\n // CmdType::Supervisor => SUPERVISOR_EXE,\n\n CmdType::Generator => GENERATOR_EXE,\n\n };\n\n\n\n let exe = value_t!(args, name, String)?;\n\n Ok(exe)\n\n}\n\n\n", "file_path": "src/agent/onefuzz-agent/src/local/common.rs", "rank": 27, "score": 258506.0991312483 }, { "content": "pub fn log_message(level: appinsights::telemetry::SeverityLevel, msg: String) {\n\n if let Some(client) = client(ClientType::Instance) {\n\n client.track_trace(msg, level);\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! log_events {\n\n ($name: expr; $events: expr) => {{\n\n onefuzz_telemetry::track_event(&$name, &$events);\n\n log::info!(\n\n \"{} {}\",\n\n $name.as_str(),\n\n onefuzz_telemetry::format_events(&$events)\n\n );\n\n }};\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! event {\n", "file_path": "src/agent/onefuzz-telemetry/src/lib.rs", "rank": 28, "score": 256842.18070691114 }, { "content": "pub fn disable_wer_ui() -> Result<()> {\n\n let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);\n\n let (wer, _) = hklm.create_subkey(WINDOWS_ERROR_REPORTING_KEY)?;\n\n\n\n let restore = match wer.get_value::<DWORD, _>(DONTSHOWUI_PROP) {\n\n Err(_) => RestoreWerUI::DeleteKey,\n\n Ok(v) => RestoreWerUI::Value(v),\n\n };\n\n\n\n wer.set_value(DONTSHOWUI_PROP, &1u32)?;\n\n atexit::register(move || restore_wer_ui(restore));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/agent/win-util/src/wer.rs", "rank": 29, "score": 256780.80573261133 }, { "content": "pub fn get_path_from_handle(handle: HANDLE) -> Result<PathBuf> {\n\n let mut actual_len: usize;\n\n let mut buf: Vec<u16> = Vec::with_capacity(MAX_PATH);\n\n\n\n loop {\n\n actual_len = unsafe {\n\n GetFinalPathNameByHandleW(\n\n handle,\n\n buf.as_mut_ptr(),\n\n buf.capacity() as DWORD,\n\n 0, // default options - normalized with drive letter\n\n ) as usize\n\n };\n\n\n\n if actual_len == 0 {\n\n return Err(last_os_error());\n\n }\n\n\n\n if actual_len > buf.capacity() {\n\n buf.reserve(actual_len);\n", "file_path": "src/agent/win-util/src/file.rs", "rank": 30, "score": 254184.01000661572 }, { "content": "pub fn new(process_handle: HANDLE, exception_record: &EXCEPTION_RECORD) -> VerifierStop {\n\n if exception_record.NumberParameters >= 3 {\n\n match process::read_memory::<VERIFIER_STOP_PARAMS>(\n\n process_handle,\n\n exception_record.ExceptionInformation[2] as LPCVOID,\n\n ) {\n\n Ok(stop_params) => {\n\n let code = stop_params.Header.StopCode as u32;\n\n match code {\n\n stop_codes::HANDLES_INVALID_HANDLE\n\n ..=stop_codes::HANDLES_INCORRECT_OBJECT_TYPE => {\n\n let handles_stop = handles_stop_from_u32(code);\n\n VerifierStop::Handles(handles_stop)\n\n }\n\n stop_codes::HEAPS_UNKNOWN_ERROR..=stop_codes::HEAPS_CORRUPTED_HEAP_LIST => {\n\n let heap_stop = heap_stop_from_u32(code);\n\n VerifierStop::Heap(heap_stop)\n\n }\n\n stop_codes::LEAK_ALLOCATION..=stop_codes::LEAK_COM_ALLOCATION => {\n\n let leak_stop = leak_stop_from_u32(code);\n", "file_path": "src/agent/input-tester/src/test_result/verifier_stop.rs", "rank": 31, "score": 251358.6232451368 }, { "content": "pub fn command_invocation<S, T, I>(command: S, args: I) -> String\n\nwhere\n\n S: AsRef<OsStr>,\n\n T: AsRef<OsStr>,\n\n I: IntoIterator<Item = T>,\n\n{\n\n let mut result = command.as_ref().to_string_lossy().to_string();\n\n\n\n for arg in args {\n\n result.push(' ');\n\n let needs_quotes = arg.as_ref().to_string_lossy().find(' ').is_some();\n\n if needs_quotes {\n\n result.push('\"');\n\n }\n\n let arg: &Path = arg.as_ref().as_ref();\n\n result.push_str(&format!(\"{}\", arg.display()));\n\n if needs_quotes {\n\n result.push('\"');\n\n }\n\n }\n", "file_path": "src/agent/input-tester/src/logging.rs", "rank": 32, "score": 251224.97120460216 }, { "content": "fn get_files() -> Result<Vec<String>> {\n\n let mut files = vec![];\n\n\n\n let stdin = io::stdin();\n\n for line in stdin.lock().lines() {\n\n let line = line?;\n\n files.push(line);\n\n }\n\n Ok(files)\n\n}\n\n\n", "file_path": "src/utils/telemetry-stats/src/main.rs", "rank": 33, "score": 248301.49392959615 }, { "content": "fn edit_exclusion_list<F: Fn(RegKey) -> ::core::result::Result<(), std::io::Error>>(\n\n f: F,\n\n) -> Result<()> {\n\n let hklm = RegKey::predef(HKEY_LOCAL_MACHINE);\n\n\n\n // We want to set both the 32 and 64 bit registries.\n\n for flags in &[0, KEY_WOW64_32KEY] {\n\n let exclusion_list = hklm\n\n .open_subkey_with_flags(AEDEBUG_EXCLUSION_LIST, KEY_SET_VALUE | flags)\n\n .context(\"Opening AeDebug\\\\AutoExclusionList\")?;\n\n f(exclusion_list)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/agent/win-util/src/aedebug.rs", "rank": 34, "score": 248174.4883046872 }, { "content": "/// We use dbghlp Sym apis to walk a stack. dbghlp apis are documented as not being thread safe,\n\n/// so we provide a lock around our use of these apis.\n\n///\n\n/// Note that Rust itself also uses dbghlp to get a stack trace, e.g. when you panic and set\n\n/// RUST_BACKTRACE.\n\n///\n\n/// This function is based on the `backtrace` crate which is also used in Rust std. Here\n\n/// we use the same named local mutex to hopefully avoid any unsynchronized uses of dbghlp\n\n/// in std.\n\npub fn lock() -> Result<DebugHelpGuard> {\n\n use core::sync::atomic::{AtomicUsize, Ordering};\n\n\n\n static LOCK: AtomicUsize = AtomicUsize::new(0);\n\n let mut lock = LOCK.load(Ordering::SeqCst);\n\n if lock == 0 {\n\n lock = unsafe {\n\n CreateMutexA(\n\n std::ptr::null_mut(),\n\n 0,\n\n \"Local\\\\RustBacktraceMutex\\0\".as_ptr() as _,\n\n ) as usize\n\n };\n\n\n\n if lock == 0 {\n\n return Err(last_os_error());\n\n }\n\n\n\n // Handle the race between threads creating our mutex by closing ours if another\n\n // thread created the mutex first.\n", "file_path": "src/agent/debugger/src/dbghelp.rs", "rank": 35, "score": 247804.48667250888 }, { "content": "pub fn system_info() -> Result<SystemInfo> {\n\n let s = SYSTEM.read().map_err(|e| format_err!(\"{}\", e))?;\n\n Ok(s.system_info())\n\n}\n\n\n", "file_path": "src/agent/onefuzz/src/system.rs", "rank": 36, "score": 247793.4466561423 }, { "content": "pub fn is_agent_done() -> Result<bool> {\n\n Ok(metadata(done_path()?).is_ok())\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/done.rs", "rank": 37, "score": 247793.4466561423 }, { "content": "pub fn get_system_memory_info() -> Result<SystemMemoryInfo> {\n\n let mut info = MaybeUninit::zeroed();\n\n if unsafe {\n\n K32GetPerformanceInfo(info.as_mut_ptr(), size_of::<PERFORMANCE_INFORMATION> as u32)\n\n } == FALSE\n\n {\n\n return Err(last_os_error());\n\n }\n\n\n\n let info = unsafe { info.assume_init() };\n\n\n\n Ok(info.into())\n\n}\n", "file_path": "src/agent/win-util/src/memory.rs", "rank": 39, "score": 238672.04718267632 }, { "content": "pub fn debug(opt: DebugOpt) -> Result<()> {\n\n match opt {\n\n DebugOpt::NodeEvent(opt) => debug_node_event(opt)?,\n\n DebugOpt::RunWorker(opt) => debug_run_worker(opt)?,\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(rename_all = \"snake_case\")]\n\npub enum NodeEventOpt {\n\n StateUpdate {\n\n #[structopt(parse(try_from_str = serde_json::from_str))]\n\n state: NodeState,\n\n },\n\n WorkerEvent(WorkerEventOpt),\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/debug.rs", "rank": 40, "score": 237754.66706981542 }, { "content": "pub fn save_failure(err: &Error) -> Result<()> {\n\n let path = failure_path()?;\n\n let message = format!(\"{}\", err);\n\n fs::write(&path, message)\n\n .with_context(|| format!(\"unable to write failure log: {}\", path.display()))\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/failure.rs", "rank": 41, "score": 237754.66706981545 }, { "content": "/// Runs the registered functions but does *not* terminate the process\n\n///\n\n/// This function is not called automatically (e.g. via `drop`).\n\npub fn execute() {\n\n ATEXIT.execute()\n\n}\n\n\n\nimpl AtExit {\n\n fn new() -> Arc<Self> {\n\n let result = Arc::new(AtExit {\n\n functions: RwLock::new(vec![]),\n\n });\n\n {\n\n // This should cover the normal cases of pressing Ctrl+c or Ctrl+Break, but\n\n // we might fail to invoke the cleanup functions (e.g. to disable appverifier)\n\n // if the process is exiting from a logoff, machine reboot, or console closing event.\n\n //\n\n // The problem is the handler that `ctrlc` registers is not this handler, but instead\n\n // a handler that signals another thread to call our handler and then returns to the OS.\n\n // The OS might terminate our application before our handler actually runs.\n\n //\n\n // This is not a problem for Ctrl+c though because the OS won't terminate the program\n\n // (which is why we must exit ourselves.)\n", "file_path": "src/agent/atexit/src/lib.rs", "rank": 42, "score": 237260.200950076 }, { "content": "pub fn add_exclusion(exe_name: &OsStr) -> Result<()> {\n\n let wexe_name = string::to_wstring(&exe_name);\n\n check_hr!(unsafe {\n\n WerAddExcludedApplication(wexe_name.as_ptr(), /*AllUsers*/ TRUE)\n\n });\n\n atexit::register(move || remove_exclusion(&wexe_name));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/agent/win-util/src/wer.rs", "rank": 43, "score": 233197.55753669987 }, { "content": "pub fn add_exclusion(exe_name: &OsStr) -> Result<()> {\n\n edit_exclusion_list(|key| key.set_value(&exe_name, &1u32))?;\n\n let exe_name = exe_name.to_owned();\n\n atexit::register(move || remove_exclusion(&exe_name));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/agent/win-util/src/aedebug.rs", "rank": 44, "score": 233197.55753669987 }, { "content": "fn log_input_test_result(result: &InputTestResult) {\n\n let debugger_result = &result.debugger_result;\n\n let input_path = &result.input_path;\n\n if debugger_result.exceptions.is_empty() {\n\n trace!(\"No bugs found in {}\", input_path.display())\n\n } else {\n\n for exception in &debugger_result.exceptions {\n\n info!(\n\n \"Exception found testing {} ExceptionCode=0x{:08x} Description={} FirstChance={} StackHash={}\",\n\n input_path.display(),\n\n exception.exception_code,\n\n exception.description,\n\n exception.first_chance,\n\n exception.stack_hash,\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/agent/input-tester/src/tester.rs", "rank": 45, "score": 231263.25317588937 }, { "content": "fn print_json(data: impl serde::Serialize) -> Result<()> {\n\n let json = serde_json::to_string_pretty(&data)?;\n\n println!(\"{}\", json);\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(rename_all = \"snake_case\")]\n\npub struct RunWorkerOpt {\n\n #[structopt(long)]\n\n config: PathBuf,\n\n\n\n #[structopt(long)]\n\n setup_url: Url,\n\n\n\n #[structopt(long)]\n\n script: bool,\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/debug.rs", "rank": 46, "score": 229413.1352877626 }, { "content": "pub fn write_memory_slice(\n\n process_handle: HANDLE,\n\n remote_address: LPVOID,\n\n buffer: &[u8],\n\n) -> Result<()> {\n\n let mut bytes_written: SIZE_T = 0;\n\n check_winapi(|| unsafe {\n\n WriteProcessMemory(\n\n process_handle,\n\n remote_address,\n\n buffer.as_ptr() as LPCVOID,\n\n buffer.len(),\n\n &mut bytes_written,\n\n )\n\n })\n\n .context(\"writing process memory\")?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/agent/win-util/src/process.rs", "rank": 47, "score": 229288.13558680908 }, { "content": "pub fn read_wide_string(\n\n process_handle: HANDLE,\n\n remote_address: LPCVOID,\n\n len: usize,\n\n) -> Result<OsString> {\n\n let mut buf: Vec<u16> = Vec::with_capacity(len);\n\n unsafe {\n\n buf.set_len(len);\n\n }\n\n read_memory_array::<u16>(process_handle, remote_address, &mut buf[..])?;\n\n Ok(OsString::from_wide(&buf))\n\n}\n\n\n", "file_path": "src/agent/win-util/src/process.rs", "rank": 48, "score": 229215.4705997329 }, { "content": "pub fn read_narrow_string(\n\n process_handle: HANDLE,\n\n remote_address: LPCVOID,\n\n len: usize,\n\n) -> Result<String> {\n\n let mut buf: Vec<u8> = Vec::with_capacity(len);\n\n unsafe {\n\n buf.set_len(len);\n\n }\n\n read_memory_array::<u8>(process_handle, remote_address, &mut buf[..])?;\n\n Ok(String::from_utf8_lossy(&buf).into())\n\n}\n\n\n", "file_path": "src/agent/win-util/src/process.rs", "rank": 49, "score": 229215.4705997329 }, { "content": "/// Call a windows api that returns BOOL, and if it fails, returns the os error\n\npub fn check_winapi<T: FnOnce() -> BOOL>(f: T) -> Result<()> {\n\n if f() == FALSE {\n\n Err(last_os_error())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/agent/win-util/src/lib.rs", "rank": 50, "score": 228345.02781266958 }, { "content": "pub fn get_synced_dir(\n\n name: &str,\n\n job_id: Uuid,\n\n task_id: Uuid,\n\n args: &ArgMatches<'_>,\n\n) -> Result<SyncedDir> {\n\n let remote_path = value_t!(args, name, PathBuf)?.absolutize()?.into_owned();\n\n let remote_url = Url::from_file_path(remote_path).map_err(|_| anyhow!(\"invalid file path\"))?;\n\n let remote_blob_url = BlobContainerUrl::new(remote_url)?;\n\n let path = std::env::current_dir()?.join(format!(\"{}/{}/{}\", job_id, task_id, name));\n\n Ok(SyncedDir {\n\n url: remote_blob_url,\n\n path,\n\n })\n\n}\n\n\n", "file_path": "src/agent/onefuzz-agent/src/local/common.rs", "rank": 51, "score": 226792.556651161 }, { "content": "pub fn get_synced_dirs(\n\n name: &str,\n\n job_id: Uuid,\n\n task_id: Uuid,\n\n args: &ArgMatches<'_>,\n\n) -> Result<Vec<SyncedDir>> {\n\n let current_dir = std::env::current_dir()?;\n\n args.values_of_os(name)\n\n .ok_or_else(|| anyhow!(\"argument '{}' not specified\", name))?\n\n .enumerate()\n\n .map(|(index, remote_path)| {\n\n let path = PathBuf::from(remote_path);\n\n let remote_path = path.absolutize()?;\n\n let remote_url = Url::from_file_path(remote_path).expect(\"invalid file path\");\n\n let remote_blob_url = BlobContainerUrl::new(remote_url).expect(\"invalid url\");\n\n let path = current_dir.join(format!(\"{}/{}/{}_{}\", job_id, task_id, name, index));\n\n Ok(SyncedDir {\n\n url: remote_blob_url,\n\n path,\n\n })\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/agent/onefuzz-agent/src/local/common.rs", "rank": 52, "score": 226792.556651161 }, { "content": "pub fn record(cmd: Command) -> Result<CommandBlockCov> {\n\n let mut cache = ModuleCache::default();\n\n let filter = CmdFilter::default();\n\n let mut recorder = Recorder::new(&mut cache, filter);\n\n recorder.record(cmd)?;\n\n Ok(recorder.into_coverage())\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Recorder<'c> {\n\n breakpoints: Breakpoints,\n\n cache: &'c mut ModuleCache,\n\n coverage: CommandBlockCov,\n\n demangler: Demangler,\n\n filter: CmdFilter,\n\n images: Option<Images>,\n\n}\n\n\n\nimpl<'c> Recorder<'c> {\n\n pub fn new(cache: &'c mut ModuleCache, filter: CmdFilter) -> Self {\n", "file_path": "src/agent/coverage/src/block/linux.rs", "rank": 53, "score": 226613.78857838648 }, { "content": "pub fn to_log_level(level: &appinsights::telemetry::SeverityLevel) -> log::Level {\n\n match level {\n\n Verbose => log::Level::Debug,\n\n Information => log::Level::Info,\n\n Warning => log::Level::Warn,\n\n Error => log::Level::Error,\n\n Critical => log::Level::Error,\n\n }\n\n}\n\n\n", "file_path": "src/agent/onefuzz-telemetry/src/lib.rs", "rank": 54, "score": 224812.50671526918 }, { "content": "pub fn parse_url_data(data: &[u8]) -> Result<Url> {\n\n let text = std::str::from_utf8(data)?;\n\n let url = Url::parse(text)?;\n\n\n\n Ok(url)\n\n}\n\n\n", "file_path": "src/agent/onefuzz-agent/src/tasks/utils.rs", "rank": 55, "score": 224442.47502483148 }, { "content": "pub fn spawn_file_count_monitor(\n\n dir: PathBuf,\n\n sender: UnboundedSender<UiEvent>,\n\n) -> JoinHandle<Result<()>> {\n\n tokio::spawn(async move {\n\n wait_for_dir(&dir).await?;\n\n\n\n loop {\n\n let mut rd = tokio::fs::read_dir(&dir).await?;\n\n let mut count: usize = 0;\n\n\n\n while let Some(Ok(entry)) = rd.next().await {\n\n if entry.path().is_file() {\n\n count += 1;\n\n }\n\n }\n\n\n\n if sender\n\n .send(UiEvent::FileCount {\n\n dir: dir.clone(),\n", "file_path": "src/agent/onefuzz-agent/src/local/common.rs", "rank": 56, "score": 224404.37310244364 }, { "content": "/// Return the full asan report from the exception record.\n\npub fn get_asan_report(\n\n process_handle: HANDLE,\n\n exception_record: &EXCEPTION_RECORD,\n\n) -> Option<String> {\n\n if exception_record.NumberParameters >= 1 {\n\n let message = get_full_message(\n\n process_handle,\n\n exception_record.ExceptionInformation[0] as LPCVOID,\n\n )\n\n .ok();\n\n\n\n if let Some(message) = message {\n\n return Some(message);\n\n }\n\n }\n\n\n\n None\n\n}\n", "file_path": "src/agent/input-tester/src/test_result/asan.rs", "rank": 57, "score": 224144.64468591684 }, { "content": "pub fn jitter(value: Duration) -> Duration {\n\n let random: u64 = thread_rng().gen_range(0, 10);\n\n Duration::from_secs(random) + value\n\n}\n\n\n\npub async fn delay_with_jitter(value: Duration) {\n\n delay_for(jitter(value)).await\n\n}\n\n\n\npub async fn random_delay(value: Duration) {\n\n let random: u64 = thread_rng().gen_range(0, value.as_secs());\n\n let delay = Duration::new(random, 0);\n\n delay_for(delay).await\n\n}\n", "file_path": "src/agent/onefuzz/src/jitter.rs", "rank": 58, "score": 223041.48054542416 }, { "content": "fn get_full_message(process_handle: HANDLE, remote_asan_error: LPCVOID) -> Result<String> {\n\n let record = get_exception_sanitizer_error(process_handle, remote_asan_error)?;\n\n let asan_error = unsafe { record.u.asan() };\n\n let size = asan_error.uiRuntimeFullMessageLength as usize;\n\n let remote_message_address = asan_error.pwRuntimeFullMessage as LPCVOID;\n\n if size == 0 || remote_message_address.is_null() {\n\n bail!(\"Empty full message\");\n\n }\n\n\n\n let message = process::read_wide_string(process_handle, remote_message_address, size)?;\n\n Ok(message.to_string_lossy().to_string())\n\n}\n\n\n", "file_path": "src/agent/input-tester/src/test_result/asan.rs", "rank": 59, "score": 222429.69478134328 }, { "content": "fn get_runtime_description(process_handle: HANDLE, remote_asan_error: LPCVOID) -> Result<String> {\n\n let record = get_exception_sanitizer_error(process_handle, remote_asan_error)?;\n\n let asan_error = unsafe { record.u.asan() };\n\n let size = asan_error.uiRuntimeDescriptionLength as usize;\n\n let remote_message_address = asan_error.pwRuntimeDescription as LPCVOID;\n\n let message = process::read_wide_string(process_handle, remote_message_address, size)?;\n\n Ok(message.to_string_lossy().to_string())\n\n}\n\n\n", "file_path": "src/agent/input-tester/src/test_result/asan.rs", "rank": 60, "score": 222429.69478134328 }, { "content": "pub fn asan_error_from_exception_record(\n\n process_handle: HANDLE,\n\n exception_record: &EXCEPTION_RECORD,\n\n) -> AsanError {\n\n if exception_record.NumberParameters >= 1 {\n\n let message = get_runtime_description(\n\n process_handle,\n\n exception_record.ExceptionInformation[0] as LPCVOID,\n\n )\n\n .ok();\n\n\n\n if let Some(message) = message {\n\n return get_asan_error_from_runtime_description(&message);\n\n }\n\n }\n\n\n\n AsanError::UnknownAsanError\n\n}\n\n\n", "file_path": "src/agent/input-tester/src/test_result/asan.rs", "rank": 61, "score": 221829.18488008948 }, { "content": "fn debug_node_event_worker_event(opt: WorkerEventOpt) -> Result<()> {\n\n let task_id = uuid::Uuid::new_v4();\n\n\n\n let event = match opt {\n\n WorkerEventOpt::Running => WorkerEvent::Running { task_id },\n\n WorkerEventOpt::Done { code, signal } => {\n\n let (code, signal) = match (code, signal) {\n\n // Default to ok exit.\n\n (None, None) => (Some(0), None),\n\n // Prioritize signal.\n\n (Some(_), Some(s)) => (None, Some(s)),\n\n _ => (code, signal),\n\n };\n\n let success = code == Some(0);\n\n let exit_status = ExitStatus {\n\n code,\n\n signal,\n\n success,\n\n };\n\n let stderr = \"stderr output goes here\".into();\n", "file_path": "src/agent/onefuzz-supervisor/src/debug.rs", "rank": 62, "score": 221286.52658151163 }, { "content": "pub fn proc_info(pid: u32) -> Result<Option<ProcInfo>> {\n\n let s = SYSTEM.read().map_err(|e| format_err!(\"{}\", e))?;\n\n Ok(s.proc_info(pid))\n\n}\n\n\n\nlazy_static! {\n\n static ref SYSTEM: RwLock<System> = {\n\n let mut s = System::new();\n\n s.refresh();\n\n RwLock::new(s)\n\n };\n\n}\n\n\n", "file_path": "src/agent/onefuzz/src/system.rs", "rank": 63, "score": 220885.1427359345 }, { "content": "fn debug_node_event(opt: NodeEventOpt) -> Result<()> {\n\n match opt {\n\n NodeEventOpt::StateUpdate { state } => debug_node_event_state_update(state)?,\n\n NodeEventOpt::WorkerEvent(opt) => debug_node_event_worker_event(opt)?,\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/debug.rs", "rank": 64, "score": 219236.35250147435 }, { "content": "fn run_cmd(args: &[&str]) -> Result<String, Box<dyn Error>> {\n\n let cmd = Command::new(args[0]).args(&args[1..]).output()?;\n\n if cmd.status.success() {\n\n Ok(String::from_utf8_lossy(&cmd.stdout).trim().to_string())\n\n } else {\n\n Err(From::from(\"failed\"))\n\n }\n\n}\n\n\n", "file_path": "src/proxy-manager/build.rs", "rank": 65, "score": 218523.65804622104 }, { "content": "fn read_file(filename: &str) -> Result<String, Box<dyn Error>> {\n\n let mut file = File::open(filename)?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n contents = contents.trim().to_string();\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "src/proxy-manager/build.rs", "rank": 66, "score": 218523.65804622104 }, { "content": "// Return a pair a reader and writer handle wrapping a Win32 named pipe.\n\n// The writer can be converted to Stdio for a Command.\n\n// The reader can be read from without blocking.\n\npub fn pipe() -> Result<(PipeReaderNonBlocking, os_pipe::PipeWriter)> {\n\n let (reader, writer) = os_pipe::pipe().context(\"Creating named pipes\")?;\n\n let handle = reader.as_raw_handle();\n\n set_nonblocking_mode(handle)?;\n\n Ok((PipeReaderNonBlocking::new(reader), writer))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{\n\n io::Write,\n\n iter,\n\n sync::{mpsc, Arc},\n\n thread::{sleep, spawn},\n\n time::Duration,\n\n };\n\n\n\n use super::*;\n\n\n\n fn repeated_bytes(val: u8, count: usize) -> Vec<u8> {\n", "file_path": "src/agent/win-util/src/pipe_handle.rs", "rank": 67, "score": 218323.60638688633 }, { "content": "pub fn parse_call_stack(text: &str) -> Result<Vec<StackEntry>> {\n\n // eventually, this should be updated to support multiple callstack formats\n\n asan::parse_asan_call_stack(text)\n\n}\n\n\n", "file_path": "src/agent/stacktrace-parser/src/lib.rs", "rank": 68, "score": 216609.59253167664 }, { "content": "fn read_file(filename: &str) -> Result<String, Box<dyn Error>> {\n\n let mut file = File::open(filename)?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n contents = contents.trim().to_string();\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/build.rs", "rank": 69, "score": 216278.89466732086 }, { "content": "fn run_cmd(args: &[&str]) -> Result<String, Box<dyn Error>> {\n\n let cmd = Command::new(args[0])\n\n .stdin(Stdio::null())\n\n .args(&args[1..])\n\n .output()?;\n\n if cmd.status.success() {\n\n Ok(String::from_utf8_lossy(&cmd.stdout).trim().to_string())\n\n } else {\n\n Err(From::from(\"failed\"))\n\n }\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/build.rs", "rank": 70, "score": 216278.89466732086 }, { "content": "fn read_file(filename: &str) -> Result<String, Box<dyn Error>> {\n\n let mut file = File::open(filename)?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n contents = contents.trim().to_string();\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "src/agent/onefuzz-agent/build.rs", "rank": 71, "score": 216278.89466732086 }, { "content": "fn run_cmd(args: &[&str]) -> Result<String, Box<dyn Error>> {\n\n let cmd = Command::new(args[0])\n\n .args(&args[1..])\n\n .stdin(Stdio::null())\n\n .output()?;\n\n if cmd.status.success() {\n\n Ok(String::from_utf8_lossy(&cmd.stdout).trim().to_string())\n\n } else {\n\n Err(From::from(\"failed\"))\n\n }\n\n}\n\n\n", "file_path": "src/agent/onefuzz-agent/build.rs", "rank": 72, "score": 216278.89466732086 }, { "content": "/// Runs the registered functions and terminates the process with the specified exit `code`.\n\n///\n\n/// This function is not called automatically (e.g. via `drop`).\n\npub fn exit_process(code: i32) -> ! {\n\n ATEXIT.exit_process(code)\n\n}\n\n\n", "file_path": "src/agent/atexit/src/lib.rs", "rank": 73, "score": 216201.85376275406 }, { "content": "pub fn default_workers() -> usize {\n\n let cpus = num_cpus::get();\n\n usize::max(1, cpus - 1)\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct Config {\n\n pub inputs: SyncedDir,\n\n pub readonly_inputs: Option<Vec<SyncedDir>>,\n\n pub crashes: SyncedDir,\n\n pub target_exe: PathBuf,\n\n pub target_env: HashMap<String, String>,\n\n pub target_options: Vec<String>,\n\n\n\n #[serde(default = \"default_workers\")]\n\n pub target_workers: usize,\n\n pub ensemble_sync_delay: Option<u64>,\n\n\n\n #[serde(default = \"default_bool_true\")]\n\n pub check_fuzzer_help: bool,\n", "file_path": "src/agent/onefuzz-agent/src/tasks/fuzz/libfuzzer_fuzz.rs", "rank": 74, "score": 214972.9509287665 }, { "content": "pub fn terminate(process_handle: HANDLE) {\n\n fn terminate_impl(process_handle: HANDLE) -> Result<()> {\n\n check_winapi(|| unsafe { TerminateProcess(process_handle, 0) })\n\n .context(\"TerminateProcess\")?;\n\n Ok(())\n\n }\n\n\n\n if process_handle != INVALID_HANDLE_VALUE && !process_handle.is_null() {\n\n if let Err(err) = terminate_impl(process_handle) {\n\n error!(\"Error terminating process: {}\", err);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/agent/win-util/src/process.rs", "rank": 75, "score": 213716.69578350824 }, { "content": "pub fn write_memory<T: Sized>(\n\n process_handle: HANDLE,\n\n remote_address: LPVOID,\n\n value: &T,\n\n) -> Result<()> {\n\n let mut bytes_written: SIZE_T = 0;\n\n check_winapi(|| unsafe {\n\n WriteProcessMemory(\n\n process_handle,\n\n remote_address,\n\n value as *const T as LPCVOID,\n\n size_of::<T>(),\n\n &mut bytes_written,\n\n )\n\n })\n\n .context(\"writing process memory\")?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/agent/win-util/src/process.rs", "rank": 76, "score": 213712.9024182264 }, { "content": "pub fn set_property(entry: EventData) {\n\n let (key, value) = entry.as_values();\n\n\n\n if entry.can_share_with_microsoft() {\n\n if let Some(mut client) = client_mut(ClientType::Microsoft) {\n\n client\n\n .context_mut()\n\n .properties_mut()\n\n .insert(key.to_owned(), value.to_owned());\n\n }\n\n }\n\n\n\n if let Some(mut client) = client_mut(ClientType::Instance) {\n\n client\n\n .context_mut()\n\n .properties_mut()\n\n .insert(key.to_owned(), value);\n\n }\n\n}\n\n\n", "file_path": "src/agent/onefuzz-telemetry/src/lib.rs", "rank": 77, "score": 211070.87129563827 }, { "content": "pub fn to_argv(command_line: &str) -> Vec<OsString> {\n\n let mut argv: Vec<OsString> = Vec::new();\n\n let mut argc = 0;\n\n unsafe {\n\n let args = shellapi::CommandLineToArgvW(to_wstring(command_line).as_ptr(), &mut argc);\n\n\n\n for i in 0..argc {\n\n argv.push(os_string_from_wide_ptr(*args.offset(i as isize)));\n\n }\n\n\n\n winbase::LocalFree(args as HLOCAL);\n\n }\n\n argv\n\n}\n\n\n\n/// # Safety\n\npub unsafe fn os_string_from_wide_ptr(ptr: *const u16) -> OsString {\n\n let mut len = 0;\n\n while *ptr.offset(len) != 0 {\n\n len += 1;\n", "file_path": "src/agent/win-util/src/string.rs", "rank": 78, "score": 209812.26283904476 }, { "content": "pub fn record(cmd: Command, filter: CmdFilter) -> Result<CommandBlockCov> {\n\n let mut cache = ModuleCache::default();\n\n let mut recorder = Recorder::new(&mut cache, filter);\n\n let timeout = Duration::from_secs(5);\n\n let mut handler = RecorderEventHandler::new(&mut recorder, timeout);\n\n handler.run(cmd)?;\n\n Ok(recorder.into_coverage())\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RecorderEventHandler<'r, 'c> {\n\n recorder: &'r mut Recorder<'c>,\n\n started: Instant,\n\n timed_out: bool,\n\n timeout: Duration,\n\n}\n\n\n\nimpl<'r, 'c> RecorderEventHandler<'r, 'c> {\n\n pub fn new(recorder: &'r mut Recorder<'c>, timeout: Duration) -> Self {\n\n let started = Instant::now();\n", "file_path": "src/agent/coverage/src/block/windows.rs", "rank": 79, "score": 209539.40898640442 }, { "content": "pub fn get_thread_frame(process_handle: HANDLE, thread_handle: HANDLE) -> Result<FrameContext> {\n\n if process::is_wow64_process(process_handle) {\n\n let mut ctx: WOW64_CONTEXT = unsafe { MaybeUninit::zeroed().assume_init() };\n\n ctx.ContextFlags = WOW64_CONTEXT_ALL;\n\n\n\n check_winapi(|| unsafe { Wow64GetThreadContext(thread_handle, &mut ctx) })\n\n .context(\"Wow64GetThreadContext\")?;\n\n Ok(FrameContext::X86(ctx))\n\n } else {\n\n // required by `CONTEXT`, is a FIXME in winapi right now\n\n let mut ctx: Aligned16<CONTEXT> = unsafe { MaybeUninit::zeroed().assume_init() };\n\n\n\n ctx.0.ContextFlags = CONTEXT_ALL;\n\n check_winapi(|| unsafe { GetThreadContext(thread_handle, &mut ctx.0) })\n\n .context(\"GetThreadContext\")?;\n\n Ok(FrameContext::X64(ctx))\n\n }\n\n}\n\n\n\npub struct ModuleInfo {\n", "file_path": "src/agent/debugger/src/dbghelp.rs", "rank": 80, "score": 207560.32671099884 }, { "content": "pub fn get_memory_info(process_handle: HANDLE, address: u64) -> Result<MemoryInfo> {\n\n let mut mbi = MaybeUninit::zeroed();\n\n let size = unsafe {\n\n VirtualQueryEx(\n\n process_handle,\n\n address as LPVOID,\n\n mbi.as_mut_ptr(),\n\n size_of::<MEMORY_BASIC_INFORMATION>(),\n\n )\n\n };\n\n if size != size_of::<MEMORY_BASIC_INFORMATION>() {\n\n return Err(last_os_error());\n\n }\n\n\n\n let mbi = unsafe { mbi.assume_init() };\n\n Ok(MemoryInfo::new(\n\n mbi.BaseAddress as u64,\n\n mbi.RegionSize as u64,\n\n mbi.Protect,\n\n ))\n", "file_path": "src/agent/win-util/src/memory.rs", "rank": 81, "score": 207560.32671099884 }, { "content": "fn reboot_context_path() -> Result<PathBuf> {\n\n Ok(onefuzz::fs::onefuzz_root()?.join(\"reboot_context.json\"))\n\n}\n\n\n\n#[cfg(test)]\n\npub mod double;\n", "file_path": "src/agent/onefuzz-supervisor/src/reboot.rs", "rank": 82, "score": 207074.87801700173 }, { "content": "fn debug_node_event_state_update(state: NodeState) -> Result<()> {\n\n let event = match state {\n\n NodeState::Init => StateUpdateEvent::Init,\n\n NodeState::Free => StateUpdateEvent::Free,\n\n NodeState::SettingUp => {\n\n let tasks = vec![Uuid::new_v4(), Uuid::new_v4(), Uuid::new_v4()];\n\n StateUpdateEvent::SettingUp { tasks }\n\n }\n\n NodeState::Rebooting => StateUpdateEvent::Rebooting,\n\n NodeState::Ready => StateUpdateEvent::Ready,\n\n NodeState::Busy => StateUpdateEvent::Busy,\n\n NodeState::Done => StateUpdateEvent::Done {\n\n error: None,\n\n script_output: None,\n\n },\n\n };\n\n let event = event.into();\n\n print_json(into_envelope(event))\n\n}\n\n\n", "file_path": "src/agent/onefuzz-supervisor/src/debug.rs", "rank": 83, "score": 204522.07729840765 }, { "content": "fn continue_to_init_execve(tracer: &mut Ptracer) -> Result<Tracee> {\n\n while let Some(tracee) = tracer.wait()? {\n\n if let Stop::SyscallExitStop(..) = &tracee.stop {\n\n return Ok(tracee);\n\n }\n\n\n\n tracer.restart(tracee, Restart::Continue)?;\n\n }\n\n\n\n anyhow::bail!(\"did not see initial execve() in tracee while triaging input\");\n\n}\n", "file_path": "src/agent/onefuzz/src/triage.rs", "rank": 84, "score": 199349.50259534718 }, { "content": "// NOTE: generate_task_id is intended to change the default behavior for local\n\n// fuzzing tasks from generating random task id to using UUID::nil(). This\n\n// enables making the one-shot crash report generation, which isn't really a task,\n\n// consistent across multiple runs.\n\npub fn build_local_context(args: &ArgMatches<'_>, generate_task_id: bool) -> Result<LocalContext> {\n\n let job_id = get_uuid(\"job_id\", args).unwrap_or_else(|_| Uuid::nil());\n\n let task_id = get_uuid(\"task_id\", args).unwrap_or_else(|_| {\n\n if generate_task_id {\n\n Uuid::new_v4()\n\n } else {\n\n Uuid::nil()\n\n }\n\n });\n\n let instance_id = get_uuid(\"instance_id\", args).unwrap_or_else(|_| Uuid::nil());\n\n\n\n let setup_dir = if args.is_present(SETUP_DIR) {\n\n value_t!(args, SETUP_DIR, PathBuf)?\n\n } else if args.is_present(TARGET_EXE) {\n\n value_t!(args, TARGET_EXE, PathBuf)?\n\n .parent()\n\n .map(|x| x.to_path_buf())\n\n .unwrap_or_default()\n\n } else {\n\n PathBuf::default()\n", "file_path": "src/agent/onefuzz-agent/src/local/common.rs", "rank": 85, "score": 197515.85872268403 }, { "content": "pub fn read_memory<T: Copy>(process_handle: HANDLE, remote_address: LPCVOID) -> Result<T> {\n\n let mut buf: MaybeUninit<T> = MaybeUninit::uninit();\n\n check_winapi(|| unsafe {\n\n ReadProcessMemory(\n\n process_handle,\n\n remote_address,\n\n buf.as_mut_ptr() as LPVOID,\n\n size_of::<T>(),\n\n ptr::null_mut(),\n\n )\n\n })\n\n .context(\"Reading process memory\")?;\n\n\n\n let buf = unsafe { buf.assume_init() };\n\n Ok(buf)\n\n}\n\n\n", "file_path": "src/agent/win-util/src/process.rs", "rank": 86, "score": 197220.09645640754 }, { "content": "fn continue_to_init_execve(tracer: &mut Ptracer) -> Result<Tracee> {\n\n while let Some(tracee) = tracer.wait()? {\n\n if let Stop::SyscallExitStop(..) = &tracee.stop {\n\n return Ok(tracee);\n\n }\n\n\n\n tracer.restart(tracee, Restart::Continue)?;\n\n }\n\n\n\n anyhow::bail!(\"did not see initial execve() in tracee while recording coverage\");\n\n}\n", "file_path": "src/agent/coverage/src/block/linux.rs", "rank": 87, "score": 197109.12215568632 }, { "content": "/// Return the duration as a count of 100ns ticks.\n\n/// Defaults to i64::MAX if the duration is too long which is sufficient for job object purposes.\n\nfn duration_as_100ns_ticks(duration: &Duration) -> i64 {\n\n duration\n\n .as_nanos()\n\n .checked_div(100)\n\n .and_then(|v: u128| i64::try_from(v).ok())\n\n .unwrap_or(std::i64::MAX)\n\n}\n\n\n", "file_path": "src/agent/win-util/src/jobs.rs", "rank": 88, "score": 195353.73996998178 }, { "content": "#[derive(Debug, Deserialize, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Event {\n\n name: String,\n\n count: u64,\n\n}\n\n\n", "file_path": "src/utils/telemetry-stats/src/main.rs", "rank": 89, "score": 195212.6997087105 }, { "content": "pub fn get_cmd_arg(cmd_type: CmdType, args: &clap::ArgMatches<'_>) -> Vec<String> {\n\n let name = match cmd_type {\n\n CmdType::Target => TARGET_OPTIONS,\n\n // CmdType::Supervisor => SUPERVISOR_OPTIONS,\n\n CmdType::Generator => GENERATOR_OPTIONS,\n\n };\n\n\n\n args.values_of_lossy(name).unwrap_or_default()\n\n}\n\n\n", "file_path": "src/agent/onefuzz-agent/src/local/common.rs", "rank": 90, "score": 193801.1020344118 }, { "content": "pub fn build_fuzz_config(args: &clap::ArgMatches<'_>, common: CommonConfig) -> Result<Config> {\n\n let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)?;\n\n let target_exe = get_cmd_exe(CmdType::Target, args)?.into();\n\n let target_options = get_cmd_arg(CmdType::Target, args);\n\n let target_env = get_cmd_env(CmdType::Target, args)?;\n\n\n\n let generator_exe = get_cmd_exe(CmdType::Generator, args)?;\n\n let generator_options = get_cmd_arg(CmdType::Generator, args);\n\n let generator_env = get_cmd_env(CmdType::Generator, args)?;\n\n let readonly_inputs = get_synced_dirs(READONLY_INPUTS, common.job_id, common.task_id, args)?;\n\n\n\n let rename_output = args.is_present(RENAME_OUTPUT);\n\n let check_asan_log = args.is_present(CHECK_ASAN_LOG);\n\n let check_debugger = !args.is_present(DISABLE_CHECK_DEBUGGER);\n\n let check_retry_count = value_t!(args, CHECK_RETRY_COUNT, u64)?;\n\n let target_timeout = Some(value_t!(args, TARGET_TIMEOUT, u64)?);\n\n\n\n let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args).ok();\n\n\n\n let ensemble_sync_delay = None;\n", "file_path": "src/agent/onefuzz-agent/src/local/generic_generator.rs", "rank": 91, "score": 193544.03218297637 }, { "content": "pub fn build_fuzz_config(args: &clap::ArgMatches<'_>, common: CommonConfig) -> Result<Config> {\n\n let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)?;\n\n let inputs = get_synced_dir(INPUTS_DIR, common.job_id, common.task_id, args)?;\n\n\n\n let target_exe = get_cmd_exe(CmdType::Target, args)?.into();\n\n let target_env = get_cmd_env(CmdType::Target, args)?;\n\n let target_options = get_cmd_arg(CmdType::Target, args);\n\n\n\n let target_workers = value_t!(args, \"target_workers\", usize).unwrap_or_default();\n\n let readonly_inputs = None;\n\n let check_fuzzer_help = args.is_present(CHECK_FUZZER_HELP);\n\n let expect_crash_on_failure = !args.is_present(DISABLE_EXPECT_CRASH_ON_FAILURE);\n\n\n\n let ensemble_sync_delay = None;\n\n\n\n let config = Config {\n\n inputs,\n\n readonly_inputs,\n\n crashes,\n\n target_exe,\n", "file_path": "src/agent/onefuzz-agent/src/local/libfuzzer_fuzz.rs", "rank": 92, "score": 193544.03218297637 }, { "content": "fn duration_from_100ns_ticks(duration: LARGE_INTEGER) -> Duration {\n\n let duration = unsafe { *duration.QuadPart() } as u64;\n\n Duration::from_nanos(duration.saturating_mul(100))\n\n}\n\n\n", "file_path": "src/agent/win-util/src/jobs.rs", "rank": 93, "score": 193297.9072059811 }, { "content": "pub fn from_exception_record(exception_record: &EXCEPTION_RECORD) -> FastFail {\n\n if exception_record.NumberParameters == 1 {\n\n fast_fail_from_u32(exception_record.ExceptionInformation[0] as u32)\n\n } else {\n\n FastFail::UnknownFastFailCode\n\n }\n\n}\n", "file_path": "src/agent/input-tester/src/test_result/fast_fail.rs", "rank": 94, "score": 193114.6033583195 }, { "content": "fn write_instruction_byte(process_handle: HANDLE, ip: u64, b: u8) -> Result<()> {\n\n let orig_byte = [b; 1];\n\n let remote_address = ip as LPVOID;\n\n process::write_memory_slice(process_handle, remote_address, &orig_byte)?;\n\n process::flush_instruction_cache(process_handle, remote_address, orig_byte.len())?;\n\n Ok(())\n\n}\n", "file_path": "src/agent/debugger/src/target.rs", "rank": 96, "score": 188102.6767102898 }, { "content": "fn main() -> Result<()> {\n\n env_logger::Builder::from_env(env_logger::Env::default().default_filter_or(\"info\")).init();\n\n\n\n let license_cmd = SubCommand::with_name(\"licenses\").about(\"display third-party licenses\");\n\n\n\n let version = format!(\n\n \"{} onefuzz:{} git:{}\",\n\n crate_version!(),\n\n env!(\"ONEFUZZ_VERSION\"),\n\n env!(\"GIT_VERSION\")\n\n );\n\n\n\n let app = App::new(\"onefuzz-proxy\")\n\n .version(version.as_str())\n\n .arg(\n\n Arg::with_name(\"config\")\n\n .long(\"config\")\n\n .short(\"c\")\n\n .takes_value(true),\n\n )\n", "file_path": "src/proxy-manager/src/main.rs", "rank": 97, "score": 185190.57040532568 }, { "content": "#[cfg(target_os = \"windows\")]\n\nfn main() -> Result<()> {\n\n env_logger::init();\n\n\n\n let opt = Opt::from_args();\n\n let filter = opt.load_filter_or_default()?;\n\n\n\n log::info!(\"recording coverage for: {:?}\", opt.cmd);\n\n\n\n let mut cmd = Command::new(&opt.cmd[0]);\n\n cmd.args(&opt.cmd[1..]);\n\n\n\n let coverage = coverage::block::windows::record(cmd, filter)?;\n\n\n\n for (module, cov) in coverage.iter() {\n\n let total = cov.blocks.len();\n\n let hit: u32 = cov.blocks.values().map(|b| b.count).sum();\n\n let percent = 100.0 * (hit as f64) / (total as f64);\n\n log::info!(\"module = {}, {} / {} ({:.2}%)\", module, hit, total, percent);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/agent/coverage/examples/block_coverage.rs", "rank": 98, "score": 183361.35645734382 } ]
Rust
tests/auth_test.rs
andygrove/rust-etcd
c1e51b3dff285ab62fd4424b3d4607b33721f774
use etcd::auth::{self, AuthChange, NewUser, Role, RoleUpdate, UserUpdate}; use etcd::{BasicAuth, Client}; use futures::future::Future; use tokio::runtime::Runtime; #[test] fn auth() { let client = Client::new(&["http://etcd:2379"], None).unwrap(); let client_2 = client.clone(); let client_3 = client.clone(); let basic_auth = BasicAuth { username: "root".into(), password: "secret".into(), }; let authed_client = Client::new(&["http://etcd:2379"], Some(basic_auth)).unwrap(); let authed_client_2 = authed_client.clone(); let authed_client_3 = authed_client.clone(); let authed_client_4 = authed_client.clone(); let authed_client_5 = authed_client.clone(); let authed_client_6 = authed_client.clone(); let authed_client_7 = authed_client.clone(); let authed_client_8 = authed_client.clone(); let authed_client_9 = authed_client.clone(); let root_user = NewUser::new("root", "secret"); let work: Box<dyn Future<Item = (), Error = ()> + Send> = Box::new( auth::status(&client) .then(move |res| { let response = res.unwrap(); assert_eq!(response.data, false); auth::create_user(&client_2, root_user) }) .then(move |res| { let response = res.unwrap(); assert_eq!(response.data.name(), "root"); auth::enable(&client_3) }) .then(move |res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); let mut update_guest = RoleUpdate::new("guest"); update_guest.revoke_kv_write_permission("/*"); auth::update_role(&authed_client, update_guest) }) .then(move |res| { res.unwrap(); let mut rkt_role = Role::new("rkt"); rkt_role.grant_kv_read_permission("/rkt/*"); rkt_role.grant_kv_write_permission("/rkt/*"); auth::create_role(&authed_client_2, rkt_role) }) .then(move |res| { res.unwrap(); let mut rkt_user = NewUser::new("rkt", "secret"); rkt_user.add_role("rkt"); auth::create_user(&authed_client_3, rkt_user) }) .then(move |res| { let response = res.unwrap(); let rkt_user = response.data; assert_eq!(rkt_user.name(), "rkt"); let role_name = &rkt_user.role_names()[0]; assert_eq!(role_name, "rkt"); let mut update_rkt_user = UserUpdate::new("rkt"); update_rkt_user.update_password("secret2"); update_rkt_user.grant_role("root"); auth::update_user(&authed_client_4, update_rkt_user) }) .then(move |res| { res.unwrap(); auth::get_role(&authed_client_5, "rkt") }) .then(move |res| { let response = res.unwrap(); let role = response.data; assert!(role.kv_read_permissions().contains(&"/rkt/*".to_owned())); assert!(role.kv_write_permissions().contains(&"/rkt/*".to_owned())); auth::delete_user(&authed_client_6, "rkt") }) .then(move |res| { res.unwrap(); auth::delete_role(&authed_client_7, "rkt") }) .then(move |res| { res.unwrap(); let mut update_guest = RoleUpdate::new("guest"); update_guest.grant_kv_write_permission("/*"); auth::update_role(&authed_client_8, update_guest) }) .then(move |res| { res.unwrap(); auth::disable(&authed_client_9) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); Ok(()) }), ); let _ = Runtime::new() .expect("failed to create Tokio runtime") .block_on(work); }
use etcd::auth::{self, AuthChange, NewUser, Role, RoleUpdate, UserUpdate}; use etcd::{BasicAuth, Client}; use futures::future::Future; use tokio::runtime::Runtime; #[test] fn auth() { let client = Client::new(&["http://etcd:2379"], None).unwrap(); let client_2 = client.clone(); let client_3 = client.clone(); let basic_auth = BasicAuth { username: "root".into(), password: "secret".into(), }; let authed_client = Client::new(&["http://etcd:2379"], Some(basic_auth)).unwrap(); let authed_client_2 = authed_client.clone(); let authed_client_3 = authed_client.clone(); let authed_client_4 = authed_client.clone(); let authed_client_5 = authed_client.clone(); let authed_client_6 = authed_client.clone(); let authed_client_7 = authed_client.clone(); let authed_client_8 = authed_client.clone(); let authed_client_9 = authed_client.clone(); let root_user = NewUser::new("root", "secret"); let work: Box<dyn Future<Item = (), Error = ()> + Send> = Box::new( auth::status(&client) .then(move |res| { let response = res.unwrap(); assert_eq!(response.data, false); auth::create_user(&client_2, root_user) }) .then(move |res| { let response = res.unwrap(); assert_eq!(response.data.name(), "root"); auth::enable(&client_3) }) .then(move |res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); let mut update_guest = RoleUpdate::new("guest"); update_guest.revoke_kv_write_permission("/*"); auth::update_role(&authed_client, update_guest) }) .then(move |res| { res.unwrap(); let mut rkt_role = Role::new("rkt"); rkt_role.grant_kv_read_permission("/rkt/*"); rkt_role.grant_kv_write_permission("/rkt/*"); auth::create_role(&authed_client_2, rkt_role) }) .then(move |res| { res.unwrap(); let mut rkt_user = NewUser::new("rkt", "secret"); rkt_user.add_role("rkt"); auth::create_user(&authed_client_3, rkt_user) }) .then(move |res| { let response = res.unwrap(); let rkt_user = response.data; assert_eq!(rkt_user.name(), "rkt");
let role_name = &rkt_user.role_names()[0]; assert_eq!(role_name, "rkt"); let mut update_rkt_user = UserUpdate::new("rkt"); update_rkt_user.update_password("secret2"); update_rkt_user.grant_role("root"); auth::update_user(&authed_client_4, update_rkt_user) }) .then(move |res| { res.unwrap(); auth::get_role(&authed_client_5, "rkt") }) .then(move |res| { let response = res.unwrap(); let role = response.data; assert!(role.kv_read_permissions().contains(&"/rkt/*".to_owned())); assert!(role.kv_write_permissions().contains(&"/rkt/*".to_owned())); auth::delete_user(&authed_client_6, "rkt") }) .then(move |res| { res.unwrap(); auth::delete_role(&authed_client_7, "rkt") }) .then(move |res| { res.unwrap(); let mut update_guest = RoleUpdate::new("guest"); update_guest.grant_kv_write_permission("/*"); auth::update_role(&authed_client_8, update_guest) }) .then(move |res| { res.unwrap(); auth::disable(&authed_client_9) }) .then(|res| { let response = res.unwrap(); assert_eq!(response.data, AuthChange::Changed); Ok(()) }), ); let _ = Runtime::new() .expect("failed to create Tokio runtime") .block_on(work); }
function_block-function_prefix_line
[]
Rust
src/bulk_string/mod.rs
WatchDG/rust-resp-protocol
b36689915d7fd3456a80165297a14d9e1a1257b6
use crate::RespError; use bytes::{BufMut, Bytes, BytesMut}; pub const EMPTY_BULK_STRING: BulkString = BulkString(Bytes::from_static(b"$0\r\n\r\n")); pub const NULL_BULK_STRING: BulkString = BulkString(Bytes::from_static(b"$-1\r\n")); #[derive(Debug, Clone, PartialEq)] pub struct BulkString(Bytes); impl BulkString { pub fn new(input: &[u8]) -> Self { let length = input.len(); if length == 0 { return EMPTY_BULK_STRING; } let length_string = length.to_string(); let mut bytes = BytesMut::with_capacity(input.len() + length_string.len() + 5); bytes.put_u8(0x24); bytes.put_slice(length_string.as_bytes()); bytes.put_u8(0x0d); bytes.put_u8(0x0a); bytes.put_slice(input); bytes.put_u8(0x0d); bytes.put_u8(0x0a); Self::from_bytes(bytes.freeze()) } #[inline] pub fn is_empty(&self) -> bool { self == EMPTY_BULK_STRING } #[inline] pub fn is_null(&self) -> bool { self == NULL_BULK_STRING } #[inline] pub fn bytes(&self) -> Bytes { self.0.clone() } #[inline] pub fn len(&self) -> usize { self.0.len() } #[inline] pub fn from_bytes(input: Bytes) -> Self { Self(input) } #[inline] pub fn from_slice(input: &[u8]) -> Self { let bytes = Bytes::copy_from_slice(input); Self::from_bytes(bytes) } #[inline] pub unsafe fn from_raw(ptr: *mut u8, length: usize) -> Self { let vector = Vec::from_raw_parts(ptr, length, length); let bytes = Bytes::from(vector); Self::from_bytes(bytes) } pub fn while_valid(input: &[u8], start: &mut usize, end: &usize) -> Result<(), RespError> { let mut index = *start; if index + 4 >= *end { return Err(RespError::InvalidValue); } if input[index] != 0x24 { return Err(RespError::InvalidFirstChar); } index += 1; if input[index] == 0x2d { if input[index + 1] != 0x31 || input[index + 2] != 0x0d || input[index + 3] != 0x0a { return Err(RespError::InvalidNullValue); } *start = index + 4; return Ok(()); } if input[index] == 0x30 && input[index + 1] >= 0x30 && input[index + 1] <= 0x39 { return Err(RespError::InvalidLength); } while index < *end && input[index] >= 0x30 && input[index] <= 0x39 { index += 1; } if index + 1 >= *end || input[index] != 0x0d || input[index + 1] != 0x0a { return Err(RespError::InvalidLengthSeparator); } let length = unsafe { String::from_utf8_unchecked(input[*start + 1..index].to_vec()) .parse::<usize>() .unwrap() }; index += 2; let value_start_index = index; while index < *end && index - value_start_index <= length && input[index] != 0x0d && input[index] != 0x0a { index += 1; } if length != index - value_start_index { return Err(RespError::LengthsNotMatch); } if index + 1 >= *end || input[index] != 0x0d || input[index + 1] != 0x0a { return Err(RespError::InvalidTerminate); } *start = index + 2; Ok(()) } pub fn parse(input: &[u8], start: &mut usize, end: &usize) -> Result<Self, RespError> { let mut index = *start; Self::while_valid(input, &mut index, end)?; let value = Self::from_slice(&input[*start..index]); *start = index; Ok(value) } } impl<'a> PartialEq<BulkString> for &'a BulkString { fn eq(&self, other: &BulkString) -> bool { self.0 == other.bytes() } fn ne(&self, other: &BulkString) -> bool { self.0 != other.bytes() } } #[cfg(test)] mod tests_bulk_string { use crate::{BulkString, EMPTY_BULK_STRING, NULL_BULK_STRING}; use bytes::Bytes; #[test] fn test_new() { let bulk_string: BulkString = BulkString::new(b"foobar"); assert_eq!(bulk_string.bytes(), Bytes::from_static(b"$6\r\nfoobar\r\n")); } #[test] fn test_new_empty() { let bulk_string: BulkString = BulkString::new(b""); assert_eq!(bulk_string.bytes(), Bytes::from_static(b"$0\r\n\r\n")); } #[test] fn test_from_bytes() { let bulk_string: BulkString = BulkString::from_bytes(Bytes::from_static(b"$6\r\nfoobar\r\n")); assert_eq!(bulk_string.bytes(), Bytes::from_static(b"$6\r\nfoobar\r\n")); } #[test] fn test_from_slice() { let bulk_string: BulkString = BulkString::from_slice(Vec::from("$6\r\nfoobar\r\n").as_slice()); assert_eq!(bulk_string.bytes(), Bytes::from_static(b"$6\r\nfoobar\r\n")); } #[test] fn test_is_empty() { assert_eq!(EMPTY_BULK_STRING.is_empty(), true) } #[test] fn test_is_null() { assert_eq!(NULL_BULK_STRING.is_null(), true) } #[test] fn test_parse() { let string = "$6\r\nfoobar\r\n"; let mut cursor = 0; assert_eq!( BulkString::parse(string.as_bytes(), &mut cursor, &string.len()).unwrap(), BulkString::new(b"foobar") ); assert_eq!(cursor, 12); } #[test] fn test_parse_empty() { let string = "$0\r\n\r\n"; let mut cursor = 0; assert_eq!( BulkString::parse(string.as_bytes(), &mut cursor, &string.len()).unwrap(), EMPTY_BULK_STRING ); assert_eq!(cursor, 6); } #[test] fn test_parse_null() { let string = "$-1\r\n"; let mut cursor = 0; assert_eq!( BulkString::parse(string.as_bytes(), &mut cursor, &string.len()).unwrap(), NULL_BULK_STRING ); assert_eq!(cursor, 5); } }
use crate::RespError; use bytes::{BufMut, Bytes, BytesMut}; pub const EMPTY_BULK_STRING: BulkString = BulkString(Bytes::from_static(b"$0\r\n\r\n")); pub const NULL_BULK_STRING: BulkString = BulkString(Bytes::from_static(b"$-1\r\n")); #[derive(Debug, Clone, PartialEq)] pub struct BulkString(Bytes); impl BulkString { pub fn new(input: &[u8]) -> Self { let length = input.len(); if length == 0 { return EMPTY_BULK_STRING; } let length_string = length.to_string(); let mut bytes = BytesMut::with_capacity(input.len() + length_string.len() + 5); bytes.put_u8(0x24); bytes.put_slice(length_string.as_bytes()); bytes.put_u8(0x0d); bytes.put_u8(0x0a); bytes.put_slice(input); bytes.put_u8(0x0d); bytes.put_u8(0x0a); Self::from_bytes(bytes.freeze()) } #[inline] pub fn is_empty(&self) -> bool { self == EMPTY_BULK_STRING } #[inline] pub fn is_null(&self) -> bool { self == NULL_BULK_STRING } #[inline] pub fn bytes(&self) -> Bytes { self.0.clone() } #[inline] pub fn len(&self) -> usize { self.0.len() } #[inline] pub fn from_bytes(input: Bytes) -> Self { Self(input) } #[inline] pub fn from_slice(input: &[u8]) -> Self { let bytes = Bytes::copy_from_slice(input); Self::from_bytes(bytes) } #[inline] pub unsafe fn from_raw(ptr: *mut u8, length: usize) -> Self { let vector = Vec::from_raw_parts(ptr, length, length); let bytes = Bytes::from(vector); Self::from_bytes(bytes) } pub fn while_valid(input: &[u8], start: &mut usize, end: &usize) -> Result<(), RespError> { let mut index = *start; if index + 4 >= *end { return Err(RespError::InvalidValue); } if input[index] != 0x24 { return Err(RespError::InvalidFirstChar); } index += 1; if input[index] == 0x2d { if input[index + 1] != 0x31 || input[index + 2] != 0x0d || input[index + 3] != 0x0a { return Err(RespError::InvalidNullValue); } *start = index + 4; return Ok(()); } if input[index] == 0x30 && input[index + 1] >= 0x30 && input[index + 1] <= 0x39 { return Err(RespError::InvalidLength); } while index < *end && input[index] >= 0x30 && input[index] <= 0x39 { index += 1; } if index + 1 >= *end || input[index] != 0x0d || input[index + 1] != 0x0a { return Err(RespError::InvalidLengthSeparator); } let length = unsafe { String::from_utf8_unchecked(input[*start + 1..index].to_vec()) .parse::<usize>() .unwrap() }; index += 2; let value_start_index = index; while index < *end && index - value_start_index <= length && input[index] != 0x0d && input[index] != 0x0a { index += 1; } if length != index - value_start_index { return Err(RespError::LengthsNotMatch); } if index + 1 >= *end || input[index] != 0x0d || input[index + 1] != 0x0a { return Err(RespError::InvalidTerminate); } *start = index + 2; Ok(()) }
} impl<'a> PartialEq<BulkString> for &'a BulkString { fn eq(&self, other: &BulkString) -> bool { self.0 == other.bytes() } fn ne(&self, other: &BulkString) -> bool { self.0 != other.bytes() } } #[cfg(test)] mod tests_bulk_string { use crate::{BulkString, EMPTY_BULK_STRING, NULL_BULK_STRING}; use bytes::Bytes; #[test] fn test_new() { let bulk_string: BulkString = BulkString::new(b"foobar"); assert_eq!(bulk_string.bytes(), Bytes::from_static(b"$6\r\nfoobar\r\n")); } #[test] fn test_new_empty() { let bulk_string: BulkString = BulkString::new(b""); assert_eq!(bulk_string.bytes(), Bytes::from_static(b"$0\r\n\r\n")); } #[test] fn test_from_bytes() { let bulk_string: BulkString = BulkString::from_bytes(Bytes::from_static(b"$6\r\nfoobar\r\n")); assert_eq!(bulk_string.bytes(), Bytes::from_static(b"$6\r\nfoobar\r\n")); } #[test] fn test_from_slice() { let bulk_string: BulkString = BulkString::from_slice(Vec::from("$6\r\nfoobar\r\n").as_slice()); assert_eq!(bulk_string.bytes(), Bytes::from_static(b"$6\r\nfoobar\r\n")); } #[test] fn test_is_empty() { assert_eq!(EMPTY_BULK_STRING.is_empty(), true) } #[test] fn test_is_null() { assert_eq!(NULL_BULK_STRING.is_null(), true) } #[test] fn test_parse() { let string = "$6\r\nfoobar\r\n"; let mut cursor = 0; assert_eq!( BulkString::parse(string.as_bytes(), &mut cursor, &string.len()).unwrap(), BulkString::new(b"foobar") ); assert_eq!(cursor, 12); } #[test] fn test_parse_empty() { let string = "$0\r\n\r\n"; let mut cursor = 0; assert_eq!( BulkString::parse(string.as_bytes(), &mut cursor, &string.len()).unwrap(), EMPTY_BULK_STRING ); assert_eq!(cursor, 6); } #[test] fn test_parse_null() { let string = "$-1\r\n"; let mut cursor = 0; assert_eq!( BulkString::parse(string.as_bytes(), &mut cursor, &string.len()).unwrap(), NULL_BULK_STRING ); assert_eq!(cursor, 5); } }
pub fn parse(input: &[u8], start: &mut usize, end: &usize) -> Result<Self, RespError> { let mut index = *start; Self::while_valid(input, &mut index, end)?; let value = Self::from_slice(&input[*start..index]); *start = index; Ok(value) }
function_block-full_function
[ { "content": " #[inline]\n\n pub fn from_bytes(input: Bytes) -> Self {\n\n Self(input)\n\n }\n\n\n\n #[inline]\n\n pub fn from_slice(input: &[u8]) -> Self {\n\n let bytes = Bytes::copy_from_slice(input);\n\n Self::from_bytes(bytes)\n\n }\n\n\n\n #[inline]\n\n pub unsafe fn from_raw(ptr: *mut u8, length: usize) -> Self {\n\n let vector = Vec::from_raw_parts(ptr, length, length);\n\n let bytes = Bytes::from(vector);\n\n Self::from_bytes(bytes)\n\n }\n\n\n\n pub fn while_valid(input: &[u8], start: &mut usize, end: &usize) -> Result<(), RespError> {\n\n let mut index = *start;\n", "file_path": "src/error/mod.rs", "rank": 0, "score": 26.08511387315839 }, { "content": " Self::from_bytes(bytes)\n\n }\n\n\n\n #[inline]\n\n pub unsafe fn from_raw(ptr: *mut u8, length: usize) -> Self {\n\n let vector = Vec::from_raw_parts(ptr, length, length);\n\n let bytes = Bytes::from(vector);\n\n Self::from_bytes(bytes)\n\n }\n\n\n\n pub fn while_valid(input: &[u8], start: &mut usize, end: &usize) -> Result<(), RespError> {\n\n let mut index = *start;\n\n if index >= *end || input[index] != 0x3a {\n\n return Err(RespError::InvalidFirstChar);\n\n }\n\n index += 1;\n\n while index < *end && input[index] != 0x0d && input[index] != 0x0a {\n\n index += 1;\n\n }\n\n if index + 1 >= *end || input[index] != 0x0d || input[index + 1] != 0x0a {\n", "file_path": "src/integer/mod.rs", "rank": 2, "score": 24.325900325020964 }, { "content": "\n\n #[inline]\n\n pub fn from_bytes(input: Bytes) -> Self {\n\n Self(input)\n\n }\n\n\n\n #[inline]\n\n pub fn from_slice(input: &[u8]) -> Self {\n\n let bytes = Bytes::copy_from_slice(input);\n\n Self::from_bytes(bytes)\n\n }\n\n\n\n #[inline]\n\n pub unsafe fn from_raw(ptr: *mut u8, length: usize) -> Self {\n\n let vector = Vec::from_raw_parts(ptr, length, length);\n\n let bytes = Bytes::from(vector);\n\n Self::from_bytes(bytes)\n\n }\n\n\n\n pub fn while_valid(input: &[u8], start: &mut usize, end: &usize) -> Result<(), RespError> {\n", "file_path": "src/array/mod.rs", "rank": 3, "score": 24.261499207985675 }, { "content": " /// let string: String = \"+OK\\r\\n\".to_owned();\n\n /// let mut mdrop_string: ManuallyDrop<String> = ManuallyDrop::new(string);\n\n /// let simple_string: SimpleString = unsafe { SimpleString::from_raw(mdrop_string.as_mut_ptr(), mdrop_string.len()) };\n\n /// ```\n\n #[inline]\n\n pub unsafe fn from_raw(ptr: *mut u8, length: usize) -> Self {\n\n let vector = Vec::from_raw_parts(ptr, length, length);\n\n let bytes = Bytes::from(vector);\n\n Self::from_bytes(bytes)\n\n }\n\n\n\n pub fn while_valid(input: &[u8], start: &mut usize, end: &usize) -> Result<(), RespError> {\n\n let mut index = *start;\n\n if index >= *end || input[index] != 0x2b {\n\n return Err(RespError::InvalidFirstChar);\n\n }\n\n index += 1;\n\n while index < *end && input[index] != 0x0d && input[index] != 0x0a {\n\n index += 1;\n\n }\n", "file_path": "src/simple_string/mod.rs", "rank": 4, "score": 23.133281933366654 }, { "content": " return Err(RespError::InvalidTerminate);\n\n }\n\n *start = index + 2;\n\n Ok(())\n\n }\n\n\n\n pub fn parse(input: &[u8], start: &mut usize, end: &usize) -> Result<Self, RespError> {\n\n let mut index = *start;\n\n Self::while_valid(input, &mut index, end)?;\n\n let value = Self::from_slice(&input[*start..index]);\n\n *start = index;\n\n Ok(value)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests_integer {\n\n use crate::integer::Integer;\n\n use bytes::Bytes;\n\n\n", "file_path": "src/integer/mod.rs", "rank": 6, "score": 21.646413117390654 }, { "content": " return Err(RespError::InvalidValue);\n\n }\n\n }\n\n count += 1;\n\n }\n\n *start = index;\n\n Ok(())\n\n }\n\n\n\n pub fn parse(input: &[u8], start: &mut usize, end: &usize) -> Result<Self, RespError> {\n\n let mut index = *start;\n\n Self::while_valid(input, &mut index, end)?;\n\n let value = Self::from_slice(&input[*start..index]);\n\n *start = index;\n\n Ok(value)\n\n }\n\n}\n\n\n\nimpl<'a> PartialEq<Array> for &'a Array {\n\n fn eq(&self, other: &Array) -> bool {\n", "file_path": "src/array/mod.rs", "rank": 7, "score": 21.577444455226004 }, { "content": " let length = self.0.len();\n\n let mut bytes = self.0.slice(1..(length - 2));\n\n let mut vector = Vec::<u8>::with_capacity(length - 3);\n\n unsafe {\n\n vector.set_len(length - 3);\n\n }\n\n bytes.copy_to_slice(vector.as_mut_slice());\n\n vector\n\n }\n\n\n\n #[inline]\n\n pub fn bytes(&self) -> Bytes {\n\n self.0.clone()\n\n }\n\n\n\n #[inline]\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n", "file_path": "src/integer/mod.rs", "rank": 8, "score": 21.192431625860795 }, { "content": " if index + 1 >= *end || input[index] != 0x0d || input[index + 1] != 0x0a {\n\n return Err(RespError::InvalidTerminate);\n\n };\n\n *start = index + 2;\n\n Ok(())\n\n }\n\n\n\n pub fn parse(input: &[u8], start: &mut usize, end: &usize) -> Result<Self, RespError> {\n\n let mut index = *start;\n\n Self::while_valid(input, &mut index, end)?;\n\n let value = Self::from_slice(&input[*start..index]);\n\n *start = index;\n\n Ok(value)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests_simple_string {\n\n use crate::simple_string::SimpleString;\n\n use bytes::Bytes;\n", "file_path": "src/simple_string/mod.rs", "rank": 9, "score": 20.592813420839537 }, { "content": " pub fn validate_value(input: &[u8]) -> Result<(), RespError> {\n\n let mut index = 0;\n\n let length = input.len();\n\n while index < length && input[index] != 0x0a && input[index] != 0x0d {\n\n index += 1;\n\n }\n\n if index != length {\n\n return Err(RespError::InvalidValue);\n\n }\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n pub fn from_bytes(input: Bytes) -> Self {\n\n Self(input)\n\n }\n\n\n\n #[inline]\n\n pub fn from_slice(input: &[u8]) -> Self {\n\n let bytes = Bytes::copy_from_slice(input);\n", "file_path": "src/integer/mod.rs", "rank": 10, "score": 19.80010049689248 }, { "content": " #[inline]\n\n pub fn to_vec(&self) -> Vec<u8> {\n\n let length = self.0.len();\n\n let mut vector = Vec::<u8>::with_capacity(length);\n\n unsafe {\n\n vector.set_len(length - 3);\n\n }\n\n self.bytes().copy_to_slice(vector.as_mut_slice());\n\n vector\n\n }\n\n\n\n #[inline]\n\n pub fn is_empty(&self) -> bool {\n\n self == EMPTY_ARRAY\n\n }\n\n\n\n #[inline]\n\n pub fn is_null(&self) -> bool {\n\n self == NULL_ARRAY\n\n }\n", "file_path": "src/array/mod.rs", "rank": 11, "score": 19.698059230846987 }, { "content": " let mut vector = Vec::<u8>::with_capacity(length - 3);\n\n unsafe {\n\n vector.set_len(length - 3);\n\n }\n\n bytes.copy_to_slice(vector.as_mut_slice());\n\n vector\n\n }\n\n\n\n pub fn validate_value(input: &[u8]) -> Result<(), RespError> {\n\n let mut index = 0;\n\n let length = input.len();\n\n while index < length && input[index] != 0x0d && input[index] != 0x0a {\n\n index += 1;\n\n }\n\n if index != length {\n\n return Err(RespError::InvalidValue);\n\n }\n\n Ok(())\n\n }\n\n\n", "file_path": "src/error/mod.rs", "rank": 12, "score": 19.20027265960395 }, { "content": " /// let simple_string: SimpleString = SimpleString::new(b\"OK\");\n\n /// let value: Vec<u8> = simple_string.value();\n\n /// println!(\"{:?}\", value); // [79, 75]\n\n /// ```\n\n #[inline]\n\n pub fn value(&self) -> Vec<u8> {\n\n let length = self.len();\n\n let mut bytes = self.bytes().slice(1..(length - 2));\n\n let mut vector = Vec::<u8>::with_capacity(length - 3);\n\n unsafe {\n\n vector.set_len(length - 3);\n\n }\n\n bytes.copy_to_slice(vector.as_mut_slice());\n\n vector\n\n }\n\n\n\n ///\n\n ///\n\n /// ``` rust\n\n /// use resp_protocol::SimpleString;\n", "file_path": "src/simple_string/mod.rs", "rank": 13, "score": 17.941817813615543 }, { "content": " bytes.put_slice(input);\n\n bytes.put_u8(0x0d); // CR\n\n bytes.put_u8(0x0a); // LF\n\n Self::from_bytes(bytes.freeze())\n\n }\n\n\n\n #[inline]\n\n pub fn bytes(&self) -> Bytes {\n\n self.0.clone()\n\n }\n\n\n\n #[inline]\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n #[inline]\n\n pub fn value(&self) -> Vec<u8> {\n\n let length = self.0.len();\n\n let mut bytes = self.0.slice(1..(length - 2));\n", "file_path": "src/error/mod.rs", "rank": 14, "score": 17.85294173398943 }, { "content": " if index >= *end || input[index] != 0x2d {\n\n return Err(RespError::InvalidFirstChar);\n\n }\n\n index += 1;\n\n while index < *end && input[index] != 0x0d && input[index] != 0x0a {\n\n index += 1;\n\n }\n\n if index + 1 >= *end || input[index] != 0x0d || input[index + 1] != 0x0a {\n\n return Err(RespError::InvalidTerminate);\n\n }\n\n *start = index + 2;\n\n Ok(())\n\n }\n\n\n\n pub fn parse(input: &[u8], start: &mut usize, end: &usize) -> Result<Self, RespError> {\n\n let mut index = *start;\n\n Self::while_valid(input, &mut index, end)?;\n\n let value = Self::from_slice(&input[*start..index]);\n\n *start = index;\n\n Ok(value)\n", "file_path": "src/error/mod.rs", "rank": 15, "score": 17.254446075708984 }, { "content": "use crate::RespError;\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Integer(Bytes);\n\n\n\nimpl Integer {\n\n #[inline]\n\n pub fn new(input: i64) -> Self {\n\n let string = input.to_string();\n\n let mut bytes = BytesMut::with_capacity(string.len() + 3);\n\n bytes.put_u8(0x3a); // \":\"\n\n bytes.put_slice(string.as_bytes());\n\n bytes.put_u8(0x0d); // CR\n\n bytes.put_u8(0x0a); // LF\n\n Self::from_bytes(bytes.freeze())\n\n }\n\n\n\n #[inline]\n\n pub fn raw_value(&self) -> Vec<u8> {\n", "file_path": "src/integer/mod.rs", "rank": 16, "score": 16.46274886509981 }, { "content": "use crate::RespError;\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct SimpleString(Bytes);\n\n\n\n/// Simple string type\n\nimpl SimpleString {\n\n /// Build a new Simple string\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use resp_protocol::SimpleString;\n\n ///\n\n /// let simple_string = SimpleString::new(b\"OK\");\n\n /// ```\n\n #[inline]\n\n pub fn new(value: &[u8]) -> Self {\n\n let mut bytes = BytesMut::with_capacity(value.len() + 3);\n\n bytes.put_u8(0x2b); // \"+\"\n", "file_path": "src/simple_string/mod.rs", "rank": 17, "score": 16.456961758176792 }, { "content": " ///\n\n /// let simple_string: SimpleString = SimpleString::new(b\"OK\");\n\n /// let value_length: usize = simple_string.value_len();\n\n /// println!(\"{:?}\", value_length); // 2\n\n /// ```\n\n #[inline]\n\n pub fn value_len(&self) -> usize {\n\n self.len() - 3\n\n }\n\n\n\n pub fn validate_value(input: &[u8]) -> Result<(), RespError> {\n\n let mut index = 0;\n\n let length = input.len();\n\n while index < length && input[index] != 0x0d && input[index] != 0x0a {\n\n index += 1;\n\n }\n\n if index != length {\n\n return Err(RespError::InvalidValue);\n\n }\n\n Ok(())\n", "file_path": "src/simple_string/mod.rs", "rank": 18, "score": 16.379134924272403 }, { "content": "use crate::RespError;\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Error(Bytes);\n\n\n\n/// Error type\n\nimpl Error {\n\n /// Build a new Error\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use resp_protocol::Error;\n\n ///\n\n /// let error = Error::new(b\"Invalid type.\");\n\n /// ```\n\n #[inline]\n\n pub fn new(input: &[u8]) -> Self {\n\n let mut bytes = BytesMut::with_capacity(input.len() + 3);\n\n bytes.put_u8(0x2d); // \"-\"\n", "file_path": "src/error/mod.rs", "rank": 20, "score": 15.953708095774603 }, { "content": "use crate::{BulkString, Error, Integer, RespError, RespType, SimpleString};\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\n\n\npub const EMPTY_ARRAY: Array = Array(Bytes::from_static(b\"*0\\r\\n\"));\n\npub const NULL_ARRAY: Array = Array(Bytes::from_static(b\"*-1\\r\\n\"));\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Array(Bytes);\n\n\n\nimpl Array {\n\n #[inline]\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n #[inline]\n\n pub fn bytes(&self) -> Bytes {\n\n self.0.clone()\n\n }\n\n\n", "file_path": "src/array/mod.rs", "rank": 21, "score": 15.060471657217139 }, { "content": " bytes.put_slice(value);\n\n bytes.put_u8(0x0d); // CR\n\n bytes.put_u8(0x0a); // LF\n\n Self::from_bytes(bytes.freeze())\n\n }\n\n\n\n ///\n\n ///\n\n /// ``` rust\n\n /// use resp_protocol::SimpleString;\n\n /// use bytes::Bytes;\n\n ///\n\n /// let simple_string: SimpleString = SimpleString::new(b\"OK\");\n\n /// let bytes: Bytes = simple_string.bytes();\n\n /// println!(\"{:?}\", bytes); // b\"+OK\\r\\n\"\n\n /// ```\n\n #[inline]\n\n pub fn bytes(&self) -> Bytes {\n\n self.0.clone()\n\n }\n", "file_path": "src/simple_string/mod.rs", "rank": 22, "score": 14.093318584910003 }, { "content": " return Err(RespError::InvalidLength);\n\n }\n\n while index < *end && input[index] >= 0x30 && input[index] <= 0x39 {\n\n index += 1;\n\n }\n\n if index + 1 >= *end || input[index] != 0x0d || input[index + 1] != 0x0a {\n\n return Err(RespError::InvalidLengthSeparator);\n\n }\n\n let length = unsafe {\n\n String::from_utf8_unchecked(input[*start + 1..index].to_vec())\n\n .parse::<usize>()\n\n .unwrap()\n\n };\n\n index += 2;\n\n if length == 0 {\n\n *start = index;\n\n return Ok(());\n\n }\n\n if index >= *end {\n\n return Err(RespError::InvalidValue);\n", "file_path": "src/array/mod.rs", "rank": 23, "score": 13.718294211229042 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn from_bytes(input: Bytes) -> Self {\n\n Self(input)\n\n }\n\n\n\n #[inline]\n\n pub fn from_slice(input: &[u8]) -> Self {\n\n let bytes = Bytes::copy_from_slice(input);\n\n Self::from_bytes(bytes)\n\n }\n\n\n\n /// Build as new Simple String from raw pointer\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use resp_protocol::SimpleString;\n\n /// use std::mem::ManuallyDrop;\n\n ///\n", "file_path": "src/simple_string/mod.rs", "rank": 25, "score": 12.872686560432456 }, { "content": "\n\n ///\n\n ///\n\n /// ``` rust\n\n /// use resp_protocol::SimpleString;\n\n ///\n\n /// let simple_string: SimpleString = SimpleString::new(b\"OK\");\n\n /// let length: usize = simple_string.len();\n\n /// println!(\"{:?}\", length); // 5\n\n /// ```\n\n #[inline]\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n ///\n\n ///\n\n /// ``` rust\n\n /// use resp_protocol::SimpleString;\n\n ///\n", "file_path": "src/simple_string/mod.rs", "rank": 27, "score": 11.875885106077337 }, { "content": " ///\n\n /// let simple_string: SimpleString = SimpleString::new(b\"OK\");\n\n ///\n\n /// array_builder.insert(RespType::SimpleString(simple_string));\n\n ///\n\n /// let array: Array = array_builder.build();\n\n /// ```\n\n #[inline]\n\n pub fn insert(&mut self, value: RespType) -> &mut Self {\n\n self.inner.push(value);\n\n self\n\n }\n\n\n\n #[inline]\n\n pub fn build(&self) -> Array {\n\n let length = self.inner.len();\n\n if length == 0 {\n\n return EMPTY_ARRAY;\n\n }\n\n let length_string = length.to_string();\n", "file_path": "src/array/mod.rs", "rank": 29, "score": 10.956645311782204 }, { "content": "# rust-resp-protocol\n\n\n\nREdis Serialization Protocol\n\n\n\n## Install\n\n\n\nadd `resp-protocol` to `Cargo.toml`\n\n``` toml\n\n[dependencies]\n\nresp-protocol = \"0.0.10\"\n\n```\n\n\n\n## Usage\n\n\n\n``` rust\n\nuse resp_protocol;\n\n```\n\n\n\n## Types\n\n* Simple string\n\n* Error\n\n* Integer\n\n* Bulk string\n\n* Array\n\n\n\n### Simple string\n\n\n\n#### Examples\n\n\n\n##### Value\n\n\n\n``` text\n\n\"+OK\\r\\n\"\n\n```\n\n\n\n##### Build\n\n\n\n``` rust\n\nuse resp_protocol::SimpleString;\n\n\n\nlet simple_string: SimpleString = SimpleString::new(b\"OK\");\n\n```\n\n\n\n##### Parse\n\n\n\n``` rust\n\nuse resp_protocol::SimpleString;\n\n\n\nlet string: &str = \"+OK\\r\\n\";\n\nlet simple_string: SimpleString = SimpleString::parse(string.as_bytes(), &mut 0, &string.len()).unwrap();\n\n```\n\n\n\n### Error\n\n\n\n#### Examples\n\n\n\n##### Value\n\n\n\n``` text\n\n\"-ERROR\\r\\n\"\n\n```\n\n\n\n##### Build\n\n\n\n``` rust\n\nuse resp_protocol::Error;\n\n\n\nlet error: Error = Error::new(b\"ERROR\");\n\n```\n\n\n\n##### Parse\n\n\n\n``` rust\n\nuse resp_protocol::Error;\n\n\n\nlet string: &str = \"-ERROR\\r\\n\";\n\nlet error: Error = Error::parse(string.as_bytes(), &mut 0, &string.len()).unwrap();\n\n```\n\n\n\n### Integer\n\n\n\n#### Examples\n\n\n\n##### Value\n\n\n\n``` text\n\n\":100\\r\\n\"\n\n```\n\n\n\n##### Build\n\n\n\n``` rust\n\nuse resp_protocol::Integer;\n\n\n\nlet integer: Integer = Integer::new(-100i64);\n\n```\n\n\n\n##### Parse\n\n\n\n``` rust\n\nuse resp_protocol::Integer;\n\n\n\nlet string: &str = \":-100\\r\\n\";\n\nlet integer: Integer = Integer::parse(string.as_bytes(), &mut 0, &string.len()).unwrap();\n\n```\n\n\n\n### Bulk string\n\n\n\n#### Examples\n\n\n\n##### Value\n\n\n\n``` text\n\n\"$6\\r\\nfoobar\\r\\n\"\n\n```\n\n\n\n##### Build\n\n\n\n``` rust\n\nuse resp_protocol::BulkString;\n\n\n\nlet bulk_string: BulkString = BulkString::new(b\"foobar\");\n\n```\n\n\n\n##### Parse\n\n\n\n``` rust\n\nuse resp_protocol::BulkString;\n\n\n\nlet string: &str = \"$6\\r\\nfoobar\\r\\n\";\n\nlet bulk_string: BulkString = BulkString::parse(string.as_bytes(), &mut 0, &string.len()).unwrap();\n\n```\n\n\n\n### Array\n\n\n\n#### Examples\n\n\n", "file_path": "README.md", "rank": 30, "score": 10.181651320820649 }, { "content": "use bytes::Bytes;\n\n\n\nmod array;\n\nmod bulk_string;\n\nmod error;\n\nmod integer;\n\nmod simple_string;\n\n\n\npub use array::{Array, ArrayBuilder, EMPTY_ARRAY, NULL_ARRAY};\n\npub use bulk_string::{BulkString, EMPTY_BULK_STRING, NULL_BULK_STRING};\n\npub use error::Error;\n\npub use integer::Integer;\n\npub use simple_string::SimpleString;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum RespError {\n\n InvalidFirstChar,\n\n InvalidLength,\n\n InvalidLengthSeparator,\n\n InvalidNullValue,\n", "file_path": "src/lib.rs", "rank": 32, "score": 9.472103837504777 }, { "content": " let mut total_bytes = length_string.len() + 3;\n\n for element in &self.inner {\n\n total_bytes += element.len();\n\n }\n\n let mut bytes = BytesMut::with_capacity(total_bytes);\n\n bytes.put_u8(0x2a); // \"*\"\n\n bytes.put_slice(length_string.as_bytes());\n\n bytes.put_u8(0x0d); // CR\n\n bytes.put_u8(0x0a); // LF\n\n for element in &self.inner {\n\n bytes.put(element.bytes());\n\n }\n\n Array(bytes.freeze())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests_array {\n\n use crate::{\n\n Array, ArrayBuilder, BulkString, Integer, RespType, SimpleString, EMPTY_ARRAY, NULL_ARRAY,\n", "file_path": "src/array/mod.rs", "rank": 33, "score": 9.042427730596195 }, { "content": " }\n\n let mut count = 0;\n\n while count < length {\n\n match input[index] {\n\n 0x2b => {\n\n SimpleString::while_valid(input, &mut index, end)?;\n\n }\n\n 0x2d => {\n\n Error::while_valid(input, &mut index, end)?;\n\n }\n\n 0x3a => {\n\n Integer::while_valid(input, &mut index, end)?;\n\n }\n\n 0x24 => {\n\n BulkString::while_valid(input, &mut index, end)?;\n\n }\n\n 0x2a => {\n\n Self::while_valid(input, &mut index, end)?;\n\n }\n\n _ => {\n", "file_path": "src/array/mod.rs", "rank": 34, "score": 8.632514231657652 }, { "content": " /// ```\n\n #[inline]\n\n pub fn new() -> ArrayBuilder {\n\n ArrayBuilder {\n\n inner: Vec::<RespType>::new(),\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn value(&mut self) -> Vec<RespType> {\n\n self.inner.clone()\n\n }\n\n\n\n /// Add a new value to Array Builder\n\n ///\n\n /// # Example\n\n /// ```rust\n\n /// use resp_protocol::{RespType, Array, ArrayBuilder, SimpleString};\n\n ///\n\n /// let mut array_builder: ArrayBuilder = ArrayBuilder::new();\n", "file_path": "src/array/mod.rs", "rank": 35, "score": 8.61643121876455 }, { "content": " }\n\n\n\n #[test]\n\n fn test_parse() {\n\n let string = \"-invalid length\\r\\n+bar\\r\\n\";\n\n let mut cursor = 0;\n\n let end = string.len();\n\n assert_eq!(\n\n Error::parse(string.as_bytes(), &mut cursor, &end).unwrap(),\n\n Error::new(\"invalid length\".as_bytes())\n\n );\n\n assert_eq!(cursor, 17);\n\n }\n\n}\n", "file_path": "src/error/mod.rs", "rank": 36, "score": 7.991842849732675 }, { "content": " let mut index = *start;\n\n if index + 3 >= *end {\n\n return Err(RespError::InvalidValue);\n\n }\n\n if input[index] != 0x2a {\n\n return Err(RespError::InvalidFirstChar);\n\n }\n\n index += 1;\n\n if input[index] == 0x2d {\n\n if input[index + 1] != 0x31\n\n || input[index + 2] != 0x0d\n\n || index + 3 == *end\n\n || input[index + 3] != 0x0a\n\n {\n\n return Err(RespError::InvalidNullValue);\n\n }\n\n *start = index + 4;\n\n return Ok(());\n\n }\n\n if input[index] == 0x30 && input[index + 1] >= 0x30 && input[index + 1] <= 0x39 {\n", "file_path": "src/array/mod.rs", "rank": 37, "score": 7.983273542511625 }, { "content": " self.0 == other.bytes()\n\n }\n\n fn ne(&self, other: &Array) -> bool {\n\n self.0 != other.bytes()\n\n }\n\n}\n\n\n\npub struct ArrayBuilder {\n\n inner: Vec<RespType>,\n\n}\n\n\n\nimpl ArrayBuilder {\n\n /// Builad a new Array Builder\n\n ///\n\n /// # Example\n\n /// ``` rust\n\n /// use resp_protocol::{Array, ArrayBuilder};\n\n ///\n\n /// let array_builder: ArrayBuilder = ArrayBuilder::new();\n\n /// let array: Array = array_builder.build();\n", "file_path": "src/array/mod.rs", "rank": 38, "score": 7.962831420051048 }, { "content": "##### Value\n\n\n\n``` text\n\n\"*0\\r\\n\" // empty array\n\n\"*2\\r\\n$3\\r\\nfoo\\r\\n$3\\r\\nbar\\r\\n\" // bulk strings array\n\n\"*2\\r\\n:1\\r\\n$6\\r\\nfoobar\\r\\n\" // mixed types array\n\n```\n\n\n\n##### Build\n\n\n\n``` rust\n\nuse resp_procotol::{Array, ArrayBuilder, RespType, Integer, BulkString};\n\n\n\nlet mut array_builder: ArrayBuilder = ArrayBuilder::new();\n\narray_builder.insert(RespType::Integer(Integer::new(100)));\n\narray_builder.insert(RespType::BulkString(BulkString::new(b\"foobar\")));\n\n\n\nlet array: Array = array_builder.build();\n\nprintln!(\"{:?}\", array); // Array(b\"*2\\r\\n:100\\r\\n$6\\r\\nfoobar\\r\\n\")\n\n```\n\n\n\n##### Parse\n\n\n\n``` rust\n\nuse resp_protocol::Array;\n\n\n\nlet string = \"*2\\r\\n$3\\r\\nfoo\\r\\n$3\\r\\nbar\\r\\n\";\n\nlet array = Array::parse(string.as_bytes(), &mut 0, &string.len()).unwrap();\n\nprintln!(\"{:?}\", array); // Array(b\"*2\\r\\n$3\\r\\nfoo\\r\\n$3\\r\\nbar\\r\\n\")\n", "file_path": "README.md", "rank": 40, "score": 7.005341248274618 }, { "content": " InvalidValue,\n\n InvalidTerminate,\n\n LengthsNotMatch,\n\n}\n\n\n\nimpl std::fmt::Display for RespError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n RespError::InvalidFirstChar => {\n\n write!(f, \"Invalid first char.\")\n\n }\n\n RespError::InvalidLength => {\n\n write!(f, \"Invalid length.\")\n\n }\n\n RespError::InvalidLengthSeparator => {\n\n write!(f, \"Invalid length separator.\")\n\n }\n\n RespError::InvalidValue => {\n\n write!(f, \"Invalid value.\")\n\n }\n", "file_path": "src/lib.rs", "rank": 41, "score": 6.58089198629275 }, { "content": " RespError::InvalidNullValue => {\n\n write!(f, \"Invalid null value.\")\n\n }\n\n RespError::LengthsNotMatch => {\n\n write!(f, \"Lengths do not match.\")\n\n }\n\n RespError::InvalidTerminate => {\n\n write!(f, \"Invalid terminate.\")\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl std::error::Error for RespError {}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum RespType {\n\n SimpleString(SimpleString),\n\n Error(Error),\n\n Integer(Integer),\n", "file_path": "src/lib.rs", "rank": 42, "score": 6.576739096907936 }, { "content": " }\n\n\n\n #[test]\n\n fn test_bytes() {\n\n let simple_string = SimpleString(Bytes::from_static(b\"+OK\\r\\n\"));\n\n assert_eq!(simple_string.bytes(), Bytes::from_static(b\"+OK\\r\\n\"));\n\n assert_eq!(simple_string.bytes(), Bytes::from_static(b\"+OK\\r\\n\"));\n\n }\n\n\n\n #[test]\n\n fn test_len() {\n\n let simple_string = SimpleString(Bytes::from_static(b\"+OK\\r\\n\"));\n\n assert_eq!(simple_string.len(), 5);\n\n assert_eq!(simple_string.len(), 5);\n\n }\n\n\n\n #[test]\n\n fn test_validate_valid_value() {\n\n let value = b\"OK\";\n\n assert_eq!(SimpleString::validate_value(value).unwrap(), ())\n", "file_path": "src/simple_string/mod.rs", "rank": 43, "score": 6.106951364643313 }, { "content": " }\n\n\n\n #[test]\n\n fn test_parse_empty() {\n\n let string = \"*0\\r\\n\";\n\n let mut cursor = 0;\n\n let array = Array::parse(string.as_bytes(), &mut cursor, &string.len()).unwrap();\n\n\n\n assert_eq!(array, EMPTY_ARRAY);\n\n assert_eq!(cursor, 4);\n\n }\n\n\n\n #[test]\n\n fn test_parse_null() {\n\n let string = \"*-1\\r\\n\";\n\n let mut cursor = 0;\n\n let array = Array::parse(string.as_bytes(), &mut cursor, &string.len()).unwrap();\n\n\n\n assert_eq!(array, NULL_ARRAY);\n\n assert_eq!(cursor, 5);\n", "file_path": "src/array/mod.rs", "rank": 44, "score": 5.744461763500633 }, { "content": " #[test]\n\n fn test_validate_valid_value() {\n\n let value = 100i64.to_string();\n\n assert_eq!(Integer::validate_value(value.as_bytes()).unwrap(), ())\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"InvalidValue\")]\n\n fn test_validate_invalid_value() {\n\n let value = b\"100\\r\\n\";\n\n assert_eq!(Integer::validate_value(value).unwrap(), ())\n\n }\n\n\n\n #[test]\n\n fn test_parse() {\n\n let string = \":100\\r\\n+bar\\r\\n\";\n\n let mut cursor = 0;\n\n let end = string.len();\n\n assert_eq!(\n\n Integer::parse(string.as_bytes(), &mut cursor, &end).unwrap(),\n\n Integer::new(100)\n\n );\n\n assert_eq!(cursor, 6);\n\n }\n\n}\n", "file_path": "src/integer/mod.rs", "rank": 46, "score": 5.4110311354305125 }, { "content": " }\n\n\n\n #[test]\n\n #[should_panic(expected = \"InvalidValue\")]\n\n fn test_validate_invalid_value() {\n\n let value = b\"O\\r\\nK\";\n\n assert_eq!(SimpleString::validate_value(value).unwrap(), ())\n\n }\n\n\n\n #[test]\n\n fn test_parse() {\n\n let string = \"+foo\\r\\n+bar\\r\\n\";\n\n let mut cursor = 0;\n\n let end = string.len();\n\n assert_eq!(\n\n SimpleString::parse(string.as_bytes(), &mut cursor, &end).unwrap(),\n\n SimpleString::new(\"foo\".as_bytes())\n\n );\n\n assert_eq!(cursor, 6);\n\n }\n\n}\n", "file_path": "src/simple_string/mod.rs", "rank": 48, "score": 5.334690842586053 }, { "content": "\n\n #[test]\n\n fn test_new() {\n\n let string = \"OK\";\n\n let simple_string = SimpleString::new(string.as_bytes());\n\n assert_eq!(simple_string, SimpleString(Bytes::from_static(b\"+OK\\r\\n\")));\n\n }\n\n\n\n #[test]\n\n fn test_value() {\n\n let simple_string = SimpleString(Bytes::from_static(b\"+OK\\r\\n\"));\n\n assert_eq!(simple_string.value(), Vec::from(\"OK\"));\n\n assert_eq!(simple_string.value(), Vec::from(\"OK\"));\n\n }\n\n\n\n #[test]\n\n fn test_value_len() {\n\n let simple_string = SimpleString(Bytes::from_static(b\"+OK\\r\\n\"));\n\n assert_eq!(simple_string.value_len(), 2);\n\n assert_eq!(simple_string.value_len(), 2);\n", "file_path": "src/simple_string/mod.rs", "rank": 49, "score": 4.5141193563589965 }, { "content": " let string = \"*2\\r\\n$3\\r\\nfoo\\r\\n$3\\r\\nbar\\r\\n\";\n\n let mut cursor = 0;\n\n let array = Array::parse(string.as_bytes(), &mut cursor, &string.len()).unwrap();\n\n\n\n let referance_array = ArrayBuilder::new()\n\n .insert(RespType::BulkString(BulkString::new(b\"foo\")))\n\n .insert(RespType::BulkString(BulkString::new(b\"bar\")))\n\n .build();\n\n\n\n assert_eq!(array, referance_array);\n\n assert_eq!(cursor, 22);\n\n }\n\n}\n", "file_path": "src/array/mod.rs", "rank": 50, "score": 4.240075667044746 }, { "content": " BulkString(BulkString),\n\n Array(Array),\n\n}\n\n\n\nimpl RespType {\n\n fn len(&self) -> usize {\n\n match self {\n\n RespType::SimpleString(simple_string) => simple_string.len(),\n\n RespType::Error(error) => error.len(),\n\n RespType::Integer(integer) => integer.len(),\n\n RespType::BulkString(bulk_string) => bulk_string.len(),\n\n RespType::Array(array) => array.len(),\n\n }\n\n }\n\n\n\n fn bytes(&self) -> Bytes {\n\n match self {\n\n RespType::SimpleString(simple_string) => simple_string.bytes(),\n\n RespType::Error(error) => error.bytes(),\n\n RespType::Integer(integer) => integer.bytes(),\n\n RespType::BulkString(bulk_string) => bulk_string.bytes(),\n\n RespType::Array(array) => array.bytes(),\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 51, "score": 4.06871074452464 }, { "content": " };\n\n use bytes::Bytes;\n\n\n\n #[test]\n\n fn test_build_empty_array() {\n\n let array_builder = ArrayBuilder::new();\n\n assert_eq!(array_builder.build(), EMPTY_ARRAY)\n\n }\n\n\n\n #[test]\n\n fn test_build_array() {\n\n let mut array_builder = ArrayBuilder::new();\n\n array_builder.insert(RespType::SimpleString(SimpleString::new(b\"foo\")));\n\n assert_eq!(\n\n array_builder.build().bytes(),\n\n Bytes::from_static(b\"*1\\r\\n+foo\\r\\n\")\n\n );\n\n array_builder.insert(RespType::BulkString(BulkString::new(b\"bar\")));\n\n assert_eq!(\n\n array_builder.build().bytes(),\n", "file_path": "src/array/mod.rs", "rank": 52, "score": 4.067298995083997 }, { "content": " }\n\n\n\n #[test]\n\n fn test_bytes() {\n\n let error = Error(Bytes::from_static(b\"-Error message\\r\\n\"));\n\n assert_eq!(error.bytes(), Bytes::from_static(b\"-Error message\\r\\n\"));\n\n assert_eq!(error.bytes(), Bytes::from_static(b\"-Error message\\r\\n\"));\n\n }\n\n\n\n #[test]\n\n fn test_validate_valid_value() {\n\n let value = b\"Error message\";\n\n assert_eq!(Error::validate_value(value).unwrap(), ())\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"InvalidValue\")]\n\n fn test_validate_invalid_value() {\n\n let value = b\"Error\\r\\n message\";\n\n assert_eq!(Error::validate_value(value).unwrap(), ())\n", "file_path": "src/error/mod.rs", "rank": 53, "score": 4.062331410118586 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests_error {\n\n use crate::error::Error;\n\n use bytes::Bytes;\n\n\n\n #[test]\n\n fn test_new() {\n\n let string = \"Error message\";\n\n let error = Error::new(string.as_bytes());\n\n assert_eq!(error, Error(Bytes::from_static(b\"-Error message\\r\\n\")));\n\n }\n\n\n\n #[test]\n\n fn test_value() {\n\n let error = Error(Bytes::from_static(b\"-Error message\\r\\n\"));\n\n assert_eq!(error.value(), Vec::from(\"Error message\"));\n\n assert_eq!(error.value(), Vec::from(\"Error message\"));\n", "file_path": "src/error/mod.rs", "rank": 55, "score": 3.9813391576631934 }, { "content": " }\n\n\n\n #[test]\n\n fn parse_array_with_integers() {\n\n let string = \"*3\\r\\n:1\\r\\n:2\\r\\n:3\\r\\n\";\n\n let mut cursor = 0;\n\n let array = Array::parse(string.as_bytes(), &mut cursor, &string.len()).unwrap();\n\n\n\n let referance_array = ArrayBuilder::new()\n\n .insert(RespType::Integer(Integer::new(1)))\n\n .insert(RespType::Integer(Integer::new(2)))\n\n .insert(RespType::Integer(Integer::new(3)))\n\n .build();\n\n\n\n assert_eq!(array, referance_array);\n\n assert_eq!(cursor, 16);\n\n }\n\n\n\n #[test]\n\n fn parse_array_with_bulk_strings() {\n", "file_path": "src/array/mod.rs", "rank": 56, "score": 3.976538503992798 }, { "content": " Bytes::from_static(b\"*2\\r\\n+foo\\r\\n$3\\r\\nbar\\r\\n\")\n\n );\n\n array_builder.insert(RespType::Integer(Integer::new(-100)));\n\n assert_eq!(\n\n array_builder.build().bytes(),\n\n Bytes::from_static(b\"*3\\r\\n+foo\\r\\n$3\\r\\nbar\\r\\n:-100\\r\\n\")\n\n );\n\n let mut subarray_builder = ArrayBuilder::new();\n\n subarray_builder.insert(RespType::SimpleString(SimpleString::new(b\"foo\")));\n\n subarray_builder.insert(RespType::SimpleString(SimpleString::new(b\"bar\")));\n\n let subarray = subarray_builder.build();\n\n assert_eq!(\n\n subarray.bytes(),\n\n Bytes::from_static(b\"*2\\r\\n+foo\\r\\n+bar\\r\\n\")\n\n );\n\n array_builder.insert(RespType::Array(subarray));\n\n assert_eq!(\n\n array_builder.build().bytes(),\n\n Bytes::from_static(b\"*4\\r\\n+foo\\r\\n$3\\r\\nbar\\r\\n:-100\\r\\n*2\\r\\n+foo\\r\\n+bar\\r\\n\")\n\n );\n", "file_path": "src/array/mod.rs", "rank": 57, "score": 2.890840321966116 }, { "content": " #[test]\n\n fn test_new() {\n\n let integer = Integer::new(100);\n\n assert_eq!(integer, Integer(Bytes::from_static(b\":100\\r\\n\")));\n\n }\n\n\n\n #[test]\n\n fn test_raw_value() {\n\n let integer = Integer(Bytes::from_static(b\":100\\r\\n\"));\n\n assert_eq!(integer.raw_value(), Vec::from(\"100\"));\n\n assert_eq!(integer.raw_value(), Vec::from(\"100\"));\n\n }\n\n\n\n #[test]\n\n fn test_bytes() {\n\n let integer = Integer(Bytes::from_static(b\":100\\r\\n\"));\n\n assert_eq!(integer.bytes(), Bytes::from_static(b\":100\\r\\n\"));\n\n assert_eq!(integer.bytes(), Bytes::from_static(b\":100\\r\\n\"));\n\n }\n\n\n", "file_path": "src/integer/mod.rs", "rank": 58, "score": 2.283403024270831 } ]
Rust
src/lib.rs
niuhuan/pixirust
8eeeebaf4e9cb9e7351a6f625c247375d8396644
pub mod entities; mod test; mod utils; pub use anyhow::Error; pub use anyhow::Result; pub use entities::*; use serde_json::json; use utils::*; const APP_SERVER: &'static str = "app-api.pixiv.net"; const APP_SERVER_IP: &'static str = "210.140.131.199"; const OAUTH_SERVER: &'static str = "oauth.secure.pixiv.net"; const OAUTH_SERVER_IP: &'static str = "210.140.131.199"; const IMG_SERVER: &'static str = "i.pximg.net"; const IMG_SERVER_IP: &'static str = "s.pximg.net"; struct Server { pub server: &'static str, pub ip: &'static str, } const APP: Server = Server { server: APP_SERVER, ip: APP_SERVER_IP, }; const OAUTH: Server = Server { server: OAUTH_SERVER, ip: OAUTH_SERVER_IP, }; const IMG: Server = Server { server: IMG_SERVER, ip: IMG_SERVER_IP, }; const SALT: &'static str = "28c1fdd170a5204386cb1313c7077b34f83e4aaf4aa829ce78c231e05b0bae2c"; const CLIENT_ID: &'static str = "MOBrBDS8blbauoSck0ZfDbtuzpyT"; const CLIENT_SECRET: &'static str = "lsACyCD94FhDUtGTXi3QzcFE2uU1hqtDaKeqrdwj"; pub struct Client { pub access_token: String, agent: reqwest::Client, agent_free: bool, } impl Client { pub fn new() -> Self { Self { agent: reqwest::ClientBuilder::new().build().unwrap(), agent_free: false, access_token: String::default(), } } pub fn new_agent_free() -> Self { Self { agent: reqwest::ClientBuilder::new() .danger_accept_invalid_certs(true) .build() .unwrap(), agent_free: true, access_token: String::default(), } } fn base64_pixiv<T: AsRef<[u8]>>(&self, src: T) -> String { base64::encode(src) .replace("=", "") .replace("+", "-") .replace("/", "_") } fn iso_time(&self) -> String { chrono::Local::now() .format("%Y-%m-%dT%H:%M:%S%Z") .to_string() } fn code_verify(&self) -> String { self.base64_pixiv(uuid::Uuid::new_v4().to_string().replace("-", "")) } fn code_challenge(&self, code: &String) -> String { self.base64_pixiv(sha256(code.clone())) } pub fn create_login_url(&self) -> LoginUrl { let verify = self.code_verify(); let url = format!("https://app-api.pixiv.net/web/v1/login?code_challenge={}&code_challenge_method=S256&client=pixiv-android",self.code_challenge(&verify)); LoginUrl { verify, url } } pub fn create_register_url(&self) -> LoginUrl { let verify = self.code_verify(); let url = format!("https://app-api.pixiv.net/web/v1/provisional-accounts/create?code_challenge={}&code_challenge_method=S256&client=pixiv-android",self.code_challenge(&verify)); LoginUrl { verify, url } } async fn load_token(&self, body: serde_json::Value) -> Result<Token> { let req = match self.agent_free { true => self .agent .request( reqwest::Method::POST, format!("https://{}/auth/token", OAUTH.ip).as_str(), ) .header("Host", OAUTH.server), false => self.agent.request( reqwest::Method::POST, format!("https://{}/auth/token", OAUTH.server).as_str(), ), }; let rsp = req.form(&body).send().await; match rsp { Ok(resp) => { let status = resp.status(); match status.as_u16() { 200 => Ok(serde_json::from_str(resp.text().await?.as_str())?), _ => { let err: LoginErrorResponse = serde_json::from_str(resp.text().await?.as_str())?; Err(Error::msg(err.errors.system.message)) } } } Err(err) => Err(Error::msg(err)), } } pub async fn load_token_by_code(&self, code: String, verify: String) -> Result<Token> { self.load_token(json!({ "code": code, "code_verifier": verify, "redirect_uri": "https://app-api.pixiv.net/web/v1/users/auth/pixiv/callback", "grant_type": "authorization_code", "include_policy": "true", "client_id": CLIENT_ID, "client_secret": CLIENT_SECRET, })) .await } pub async fn refresh_token(&self, refresh_token: &String) -> Result<Token> { self.load_token(json!({ "refresh_token": refresh_token, "grant_type": "refresh_token", "include_policy": "true", "client_id": CLIENT_ID, "client_secret": CLIENT_SECRET, })) .await } fn sign_request(&self, request: reqwest::RequestBuilder) -> reqwest::RequestBuilder { let time = self.iso_time(); request .header("x-client-time", &time.clone()) .header("x-client-hash", hex::encode(format!("{}{}", time, SALT))) .header("accept-language", "zh-CN") .header( "User-Agent", "PixivAndroidApp/5.0.234 (Android 10.0; Pixel C)", ) .header("App-OS-Version", "Android 10.0") .header("Referer", "https://app-api.pixiv.net/") .bearer_auth(&self.access_token) } pub async fn get_from_pixiv_raw(&self, url: String) -> Result<String> { let req = match self.agent_free { true => { if url.starts_with(format!("https://{}", APP.server).as_str()) { self.agent .get(url.replacen(APP.server, APP.ip.clone(), 1)) .header("Host", APP.server) } else { self.agent.get(url) } } false => self.agent.get(url), }; let req = self.sign_request(req); let rsp = req.send().await?; match &rsp.status().as_u16() { 200 => Ok(rsp.text().await?), _ => { let ae: AppError = serde_json::from_str(rsp.text().await?.as_str())?; Err(Error::msg(ae.error.message)) } } } async fn get_from_pixiv<T: for<'de> serde::Deserialize<'de>>(&self, url: String) -> Result<T> { let text = self.get_from_pixiv_raw(url).await?; Ok(serde_json::from_str(text.as_str())?) } pub fn illust_recommended_first_url(&self) -> String { format!( "https://{}/v1/illust/recommended?filter=for_ios&include_ranking_label=true", APP.server ) } pub fn illust_rank_first_url(&self, mode: String, date: String) -> String { format!( "https://{}/v1/illust/ranking?filter=for_android&mode={}&date={}", APP.server, mode, date, ) } pub async fn illust_from_url(&self, url: String) -> Result<IllustResponse> { self.get_from_pixiv(url).await } pub fn illust_trending_tags_url(&self) -> String { format!( "https://{}/v1/trending-tags/illust?filter=for_android", APP.server, ) } pub async fn illust_trending_tags(&self) -> Result<IllustTrendingTags> { self.get_from_pixiv(self.illust_trending_tags_url()).await } pub fn illust_search_first_url(&self, word: String, mode: String) -> String { format!( "https://{}/v1/search/illust?word={}&search_target={}&filter=for_ios", APP.server, urlencoding::encode(word.as_str()), mode, ) } pub fn illust_rank_first_utl(&self, mode: String, date: String) -> String { format!( "https://{}/v1/illust/ranking?mode={}&date={}&filter={}", APP.server, mode, date, "for_android", ) } pub async fn load_image_data(&self, url: String) -> Result<bytes::Bytes> { let req = match self.agent_free { true => { if url.starts_with(format!("https://{}", IMG.server).as_str()) { self.agent .get(url.replacen(IMG.server, IMG.ip.clone(), 1)) .header("Host", IMG.server) } else { self.agent.get(url) } } false => self.agent.get(url), }; let req = self.sign_request(req); let rsp = req.send().await?; let status = rsp.status(); match status.as_u16() { 200 => Ok(rsp.bytes().await?), _ => Err(Error::msg(rsp.text().await?)), } } }
pub mod entities; mod test; mod utils; pub use anyhow::Error; pub use anyhow::Result; pub use entities::*; use serde_json::json; use utils::*; const APP_SERVER: &'static str = "app-api.pixiv.net"; const APP_SERVER_IP: &'static str = "210.140.131.199"; const OAUTH_SERVER: &'static str = "oauth.secure.pixiv.net"; const OAUTH_SERVER_IP: &'static str = "210.140.131.199"; const IMG_SERVER: &'static str = "i.pximg.net"; const IMG_SERVER_IP: &'static str = "s.pximg.net"; struct Server { pub server: &'static str, pub ip: &'static str, } const APP: Server = Server { server: APP_SERVER, ip: APP_SERVER_IP, }; const OAUTH: Server = Server { server: OAUTH_SERVER, ip: OAUTH_SERVER_IP, }; const IMG: Server = Server { server: IMG_SERVER, ip: IMG_SERVER_IP, }; const SALT: &'static str = "28c1fdd170a5204386cb1313c7077b34f83e4aaf4aa829ce78c231e05b0bae2c"; const CLIENT_ID: &'static str = "MOBrBDS8blbauoSck0ZfDbtuzpyT"; const CLIENT_SECRET: &'static str = "lsACyCD94FhDUtGTXi3QzcFE2uU1hqtDaKeqrdwj"; pub struct Client { pub access_token: String, agent: reqwest::Client, agent_free: bool, } impl Client { pub fn new() -> Self { Self { agent: reqwest::ClientBuilder::new().build().unwrap(), agent_free: false, access_token: String::default(), } } pub fn new_agent_free() -> Self { Self { agent: reqwest::ClientBuilder::new() .danger_accept_invalid_certs(true) .build() .unwrap(), agent_free: true, access_token: String::default(), } } fn base64_pixiv<T: AsRef<[u8]>>(&self, src: T) -> String { base64::encode(src) .replace("=", "") .replace("+", "-") .replace("/", "_") } fn iso_time(&self) -> String { chrono::Local::now() .format("%Y-%m-%dT%H:%M:%S%Z") .to_string() } fn code_verify(&self) -> String { self.base64_pixiv(uuid::Uuid::new_v4().to_string().replace("-", "")) } fn code_challenge(&self, code: &String) -> String { self.base64_pixiv(sha256(code.clone())) } pub fn create_login_url(&self) -> LoginUrl { let verify = self.code_verify(); let url = format!("https://app-api.pixiv.net/web/v1/login?code_challenge={}&code_challenge_method=S256&client=pixiv-android",self.code_challenge(&verify)); LoginUrl { verify, url } } pub fn create_register_url(&self) -> LoginUrl { let verify = self.code_verify(); let url = format!("https://app-api.pixiv.net/web/v1/provisional-accounts/create?code_challenge={}&code_challenge_method=S256&client=pixiv-android",self.code_challenge(&verify)); LoginUrl { verify, url } } async fn load_token(&self, body: serde_json::Value) -> Result<Token> { let req = match self.agent_free { true => self .agent .request( reqwest::Method::POST, format!("https://{}/auth/token", OAUTH.ip).as_str(), ) .header("Host", OAUTH.server), false => self.agent.request( reqwest::Method::POST, format!("https://{}/auth/token", OAUTH.server).as_str(), ), }; let rsp = req.form(&body).send().await; match rsp { Ok(resp) => { let status = resp.status(); match status.as_u16() { 200 => Ok(s
/{}", APP.server).as_str()) { self.agent .get(url.replacen(APP.server, APP.ip.clone(), 1)) .header("Host", APP.server) } else { self.agent.get(url) } } false => self.agent.get(url), }; let req = self.sign_request(req); let rsp = req.send().await?; match &rsp.status().as_u16() { 200 => Ok(rsp.text().await?), _ => { let ae: AppError = serde_json::from_str(rsp.text().await?.as_str())?; Err(Error::msg(ae.error.message)) } } } async fn get_from_pixiv<T: for<'de> serde::Deserialize<'de>>(&self, url: String) -> Result<T> { let text = self.get_from_pixiv_raw(url).await?; Ok(serde_json::from_str(text.as_str())?) } pub fn illust_recommended_first_url(&self) -> String { format!( "https://{}/v1/illust/recommended?filter=for_ios&include_ranking_label=true", APP.server ) } pub fn illust_rank_first_url(&self, mode: String, date: String) -> String { format!( "https://{}/v1/illust/ranking?filter=for_android&mode={}&date={}", APP.server, mode, date, ) } pub async fn illust_from_url(&self, url: String) -> Result<IllustResponse> { self.get_from_pixiv(url).await } pub fn illust_trending_tags_url(&self) -> String { format!( "https://{}/v1/trending-tags/illust?filter=for_android", APP.server, ) } pub async fn illust_trending_tags(&self) -> Result<IllustTrendingTags> { self.get_from_pixiv(self.illust_trending_tags_url()).await } pub fn illust_search_first_url(&self, word: String, mode: String) -> String { format!( "https://{}/v1/search/illust?word={}&search_target={}&filter=for_ios", APP.server, urlencoding::encode(word.as_str()), mode, ) } pub fn illust_rank_first_utl(&self, mode: String, date: String) -> String { format!( "https://{}/v1/illust/ranking?mode={}&date={}&filter={}", APP.server, mode, date, "for_android", ) } pub async fn load_image_data(&self, url: String) -> Result<bytes::Bytes> { let req = match self.agent_free { true => { if url.starts_with(format!("https://{}", IMG.server).as_str()) { self.agent .get(url.replacen(IMG.server, IMG.ip.clone(), 1)) .header("Host", IMG.server) } else { self.agent.get(url) } } false => self.agent.get(url), }; let req = self.sign_request(req); let rsp = req.send().await?; let status = rsp.status(); match status.as_u16() { 200 => Ok(rsp.bytes().await?), _ => Err(Error::msg(rsp.text().await?)), } } }
erde_json::from_str(resp.text().await?.as_str())?), _ => { let err: LoginErrorResponse = serde_json::from_str(resp.text().await?.as_str())?; Err(Error::msg(err.errors.system.message)) } } } Err(err) => Err(Error::msg(err)), } } pub async fn load_token_by_code(&self, code: String, verify: String) -> Result<Token> { self.load_token(json!({ "code": code, "code_verifier": verify, "redirect_uri": "https://app-api.pixiv.net/web/v1/users/auth/pixiv/callback", "grant_type": "authorization_code", "include_policy": "true", "client_id": CLIENT_ID, "client_secret": CLIENT_SECRET, })) .await } pub async fn refresh_token(&self, refresh_token: &String) -> Result<Token> { self.load_token(json!({ "refresh_token": refresh_token, "grant_type": "refresh_token", "include_policy": "true", "client_id": CLIENT_ID, "client_secret": CLIENT_SECRET, })) .await } fn sign_request(&self, request: reqwest::RequestBuilder) -> reqwest::RequestBuilder { let time = self.iso_time(); request .header("x-client-time", &time.clone()) .header("x-client-hash", hex::encode(format!("{}{}", time, SALT))) .header("accept-language", "zh-CN") .header( "User-Agent", "PixivAndroidApp/5.0.234 (Android 10.0; Pixel C)", ) .header("App-OS-Version", "Android 10.0") .header("Referer", "https://app-api.pixiv.net/") .bearer_auth(&self.access_token) } pub async fn get_from_pixiv_raw(&self, url: String) -> Result<String> { let req = match self.agent_free { true => { if url.starts_with(format!("https:/
random
[ { "content": "pub fn sha256(src: String) -> Vec<u8> {\n\n let mut hasher = Sha256::new();\n\n hasher.update(src.as_bytes());\n\n hasher.finalize().to_vec()\n\n}\n\n\n\n//////////////////////////////////////////////////\n", "file_path": "src/utils.rs", "rank": 0, "score": 71134.40215482653 }, { "content": "use sha2::{Digest, Sha256};\n\n\n\n///////////////////// SHA256 /////////////////////\n\n\n", "file_path": "src/utils.rs", "rank": 2, "score": 24085.819055508713 }, { "content": " match result {\n\n Ok(data) => println!(\"{}\", serde_json::to_string(&data).unwrap()),\n\n Err(err) => panic!(\"{}\", err),\n\n }\n\n }\n\n\n\n /// 生成一个没有认证登录过的客户端\n\n fn no_auth_client() -> Client {\n\n Client::new_agent_free()\n\n }\n\n\n\n /// 生成登录链接\n\n #[tokio::test]\n\n async fn test_login_url() {\n\n println!(\n\n \"{}\",\n\n serde_json::to_string(&no_auth_client().create_login_url()).unwrap()\n\n );\n\n }\n\n\n", "file_path": "src/test.rs", "rank": 3, "score": 23186.30044584229 }, { "content": " /// 使用登录链接登录后, 客户端oauth\n\n #[tokio::test]\n\n async fn test_load_token_by_code() {\n\n let token = no_auth_client()\n\n .load_token_by_code(\"code\".to_string(), \"verify\".to_string())\n\n .await\n\n .unwrap();\n\n write_token(token, chrono::Local::now().timestamp_millis());\n\n }\n\n\n\n /// 保存token到文件\n\n fn write_token(token: Token, time: i64) {\n\n std::fs::write(\n\n \"test_token.json\",\n\n serde_json::to_string(&token.clone()).unwrap(),\n\n )\n\n .unwrap();\n\n std::fs::write(\"test_token_time.json\", format!(\"{}\", time)).unwrap();\n\n }\n\n\n", "file_path": "src/test.rs", "rank": 4, "score": 23181.99377082831 }, { "content": "\n\n #[tokio::test]\n\n async fn test_load_image() {\n\n match no_auth_client().load_image_data(\"https://i.pximg.net/c/540x540_70/img-master/img/2021/04/18/17/22/42/89233845_p0_master1200.jpg\".to_string()).await {\n\n Ok(img_bytes) => match std::fs::write(\"test.jpg\", img_bytes) {\n\n Ok(_) => println!(\"OK\"),\n\n Err(err) => panic!(\"{}\", err),\n\n },\n\n Err(err) => panic!(\"{}\", err),\n\n }\n\n }\n\n\n\n #[tokio::test]\n\n async fn test() {\n\n println!(\"{}\", chrono::Local::now().timestamp_millis())\n\n }\n\n}\n", "file_path": "src/test.rs", "rank": 5, "score": 23181.71989432837 }, { "content": " println!(\n\n \"{}\",\n\n client\n\n .get_from_pixiv_raw(client.illust_recommended_first_url())\n\n .await\n\n .unwrap()\n\n )\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_illust() {\n\n let client = authed_client().await.unwrap();\n\n print(\n\n client\n\n .illust_from_url(\n\n client.illust_rank_first_url(\"day_r18\".to_string(), String::default()),\n\n )\n\n .await,\n\n )\n\n }\n", "file_path": "src/test.rs", "rank": 6, "score": 23180.89369597262 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use crate::{Client, Result, Token};\n\n use std::sync::{Mutex, MutexGuard};\n\n\n\n // 懒加载, 维护全局token\n\n lazy_static::lazy_static! {\n\n static ref TOKEN:Mutex<Token> = Mutex::<Token>::new(\n\n Token{\n\n access_token: String::default(),\n\n expires_in: 0,\n\n token_type: String::default(),\n\n scope: String::default(),\n\n refresh_token: String::default(),\n\n }\n\n );\n\n }\n\n\n\n /// 打印结果\n\n fn print<T: serde::Serialize>(result: Result<T>) {\n", "file_path": "src/test.rs", "rank": 7, "score": 23180.785673608178 }, { "content": " let mut token = TOKEN.lock().unwrap();\n\n copy(&mut token, src_token);\n\n drop(token);\n\n // 运行中, 每次请求\n\n let mut client = no_auth_client();\n\n let mut token = TOKEN.lock().unwrap();\n\n if token.expires_in + time < now {\n\n let new_token = (&client).refresh_token(&token.refresh_token).await?;\n\n write_token(new_token.clone(), now);\n\n copy(&mut token, new_token)\n\n }\n\n let result = token.access_token.clone();\n\n drop(token);\n\n client.access_token = result;\n\n Ok(client)\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_raw() {\n\n let client = authed_client().await.unwrap();\n", "file_path": "src/test.rs", "rank": 8, "score": 23180.604206476386 }, { "content": "pub struct MetaPage {\n\n pub image_urls: MetaPageImageUrls,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct MetaPageImageUrls {\n\n pub square_medium: String,\n\n pub medium: String,\n\n pub large: String,\n\n pub original: String,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct IllustTrendingTags {\n\n pub trend_tags: Vec<TrendTag>,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct TrendTag {\n\n pub tag: String,\n\n pub translated_name: Option<String>,\n\n pub illust: Illust,\n\n}\n\n\n\npub const ILLUST_SEARCH_MODE_PARTIAL_MATCH_FOR_TAGS: &'static str = \"partial_match_for_tags\";\n\npub const ILLUST_SEARCH_MODE_EXACT_MATCH_FOR_TAGS: &'static str = \"exact_match_for_tags\";\n\npub const ILLUST_SEARCH_MODE_TITLE_AND_CAPTION: &'static str = \"title_and_caption\";\n", "file_path": "src/entities.rs", "rank": 9, "score": 23179.797629680812 }, { "content": " fn copy(token: &mut MutexGuard<Token>, source: Token) {\n\n token.token_type = source.token_type;\n\n token.access_token = source.access_token;\n\n token.refresh_token = source.refresh_token;\n\n token.scope = source.scope;\n\n token.expires_in = source.expires_in;\n\n }\n\n\n\n async fn authed_client() -> Result<Client> {\n\n // 初始化(仅一次)\n\n let now = chrono::Local::now().timestamp_millis();\n\n let src_token: Token =\n\n serde_json::from_str(std::fs::read_to_string(\"test_token.json\").unwrap().as_str())\n\n .unwrap();\n\n let time: i64 = serde_json::from_str(\n\n std::fs::read_to_string(\"test_token_time.json\")\n\n .unwrap()\n\n .as_str(),\n\n )\n\n .unwrap();\n", "file_path": "src/test.rs", "rank": 10, "score": 23179.655332461807 }, { "content": "pub struct LoginErrorResponse {\n\n pub has_error: bool,\n\n pub errors: Errors,\n\n pub error: String,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Errors {\n\n pub system: System,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct System {\n\n pub message: String,\n\n pub code: i64,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct AppError {\n\n pub error: ErrorBody,\n", "file_path": "src/entities.rs", "rank": 11, "score": 23177.94229949485 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse serde_json::Value;\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct LoginUrl {\n\n pub verify: String,\n\n pub url: String,\n\n}\n\n\n\n// user 和 response 省略\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Token {\n\n pub access_token: String,\n\n pub expires_in: i64,\n\n pub token_type: String,\n\n pub scope: String,\n\n pub refresh_token: String,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n", "file_path": "src/entities.rs", "rank": 12, "score": 23177.91993133788 }, { "content": " pub total_view: i64,\n\n pub total_bookmarks: i64,\n\n pub is_bookmarked: bool,\n\n pub visible: bool,\n\n pub is_muted: bool,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct MainImageUrls {\n\n pub square_medium: String,\n\n pub medium: String,\n\n pub large: String,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct User {\n\n pub id: i64,\n\n pub name: String,\n\n pub account: String,\n\n pub profile_image_urls: ProfileImageUrls,\n", "file_path": "src/entities.rs", "rank": 13, "score": 23177.37991740676 }, { "content": " pub is_followed: bool,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct ProfileImageUrls {\n\n pub medium: String,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Tag {\n\n pub name: String,\n\n pub translated_name: Option<String>,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct MetaSinglePage {\n\n pub original_image_url: Option<String>,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n", "file_path": "src/entities.rs", "rank": 14, "score": 23175.977832111676 }, { "content": "}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct ErrorBody {\n\n pub user_message: String,\n\n pub message: String,\n\n pub reason: String,\n\n pub user_message_details: UserMessageDetails,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct UserMessageDetails {}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct IllustResponse {\n\n pub illusts: Vec<Illust>,\n\n pub next_url: String,\n\n}\n\n\n\n#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]\n", "file_path": "src/entities.rs", "rank": 15, "score": 23175.384381144693 }, { "content": "pub struct Illust {\n\n pub id: i64,\n\n pub title: String,\n\n #[serde(rename = \"type\")]\n\n pub illust_type: String,\n\n pub image_urls: MainImageUrls,\n\n pub caption: String,\n\n pub restrict: i64,\n\n pub user: User,\n\n pub tags: Vec<Tag>,\n\n pub tools: Vec<String>,\n\n pub create_date: String,\n\n pub page_count: i64,\n\n pub width: i64,\n\n pub height: i64,\n\n pub sanity_level: i64,\n\n pub x_restrict: i64,\n\n pub series: Value,\n\n pub meta_single_page: MetaSinglePage,\n\n pub meta_pages: Vec<MetaPage>,\n", "file_path": "src/entities.rs", "rank": 16, "score": 23174.950582527596 }, { "content": "PIXI-RUST\n\n===========\n\n\n\nRUST PIXIV API\n\n\n\n## 实现功能\n\n\n\n- [x] 登录验证\n\n- [x] 免流\n\n- [x] 刷新TOKEN\n\n- [x] 插画\n\n\n\n## 使用方法\n\n\n\n在Cargo.toml中增加依赖\n\n\n\n```toml\n\n[dependencies]\n\npixirust = { git = \"https://github.com/niuhuan/pixirust.git\", branch = \"master\" }\n\n```\n\n\n\n参考src/test.rs\n", "file_path": "README.md", "rank": 31, "score": 3.2855784898288167 } ]
Rust
codecs/src/aper/mod.rs
nplrkn/hampi
0b07137ecef245d462c9f942bb219ad7bac2f434
#![allow(dead_code)] pub mod error; pub use error::Error as AperCodecError; pub mod decode; pub mod encode; pub trait AperCodec { type Output; fn decode(data: &mut AperCodecData) -> Result<Self::Output, AperCodecError>; fn encode(&self, _data: &mut AperCodecData) -> Result<(), AperCodecError> { todo!(); } } use bitvec::prelude::*; #[derive(Default, Debug)] pub struct AperCodecData { bits: BitVec<Msb0, u8>, decode_offset: usize, key: Option<i128>, } impl AperCodecData { pub fn new() -> Self { Self::default() } pub fn from_slice(bytes: &[u8]) -> Self { Self { bits: BitSlice::<_, _>::from_slice(bytes).unwrap().to_bitvec(), decode_offset: 0, key: None, } } pub fn into_bytes(self) -> Vec<u8> { self.bits.into() } pub fn decode_align(&mut self) -> Result<(), AperCodecError> { if self.decode_offset % 8 == 0 { return Ok(()); } let remaining = 8 - (self.decode_offset & 0x7_usize); log::trace!("Aligning Codec Buffer with {} bits", remaining); if !self.bits[self.decode_offset..self.decode_offset + remaining] .iter() .all(|b| b == false) { Err(AperCodecError::new( format!( "{} Padding bits at Offset {} not all '0'.", remaining, self.decode_offset, ) .as_str(), )) } else { self.decode_offset += remaining; Ok(()) } } fn decode_bool(&mut self) -> Result<bool, AperCodecError> { if self.bits.len() == self.decode_offset { return Err(AperCodecError::new( "AperCodec:DecodeError:End of Bitstream reached while trying to decode bool.", )); } let bit = *self.bits.get(self.decode_offset).as_deref().unwrap(); let _ = self.advance_maybe_err(1, true)?; Ok(bit) } fn decode_bits_as_integer( &mut self, bits: usize, signed: bool, ) -> Result<i128, AperCodecError> { let remaining = self.bits.len() - self.decode_offset; if remaining < bits { Err(AperCodecError::new( format!( "AperCodec:DecodeError:Requested Bits to decode {}, Remaining bits {}", bits, remaining ) .as_str(), )) } else { log::trace!( "Decoding Bits as Integer. offset: {}, bits: {}", self.decode_offset, bits ); let value = if !signed { if bits == 0 { 0_i128 } else { self.bits[self.decode_offset..self.decode_offset + bits].load_be::<u128>() as i128 } } else { match bits { 8 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u128>() as i8; inner as i128 } 16 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u128>() as i16; inner as i128 } 24 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u32>() as u32; let inner = if self.bits[self.decode_offset] { inner | 0xFF000000 } else { inner & 0x00FFFFFF }; let inner = inner as i32; inner as i128 } 32 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u128>() as i32; inner as i128 } 40 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u64>() as u64; let inner = if self.bits[self.decode_offset] { inner | 0xFFFFFF0000000000 } else { inner & 0x000000FFFFFFFFFF }; let inner = inner as i64; inner as i128 } 48 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u64>() as u64; let inner = if self.bits[self.decode_offset] { inner | 0xFFFF000000000000 } else { inner & 0x0000FFFFFFFFFFFF }; let inner = inner as i64; inner as i128 } 56 => { eprintln!("{}", self.decode_offset); let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u64>() as u64; let inner = if self.bits[self.decode_offset] { inner | 0xFF00000000000000 } else { inner & 0x00FFFFFFFFFFFFFF }; let inner = inner as i64; inner as i128 } 64 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u128>() as i64; inner as i128 } 128 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u128>() as i128; inner as i128 } _ => { return Err( AperCodecError::new( format!( "For a signed number in 2's compliment form, requested bits {} not supported!", bits))); } } }; log::trace!("Decoded Value: {:#?}", value); self.advance_maybe_err(bits, false)?; Ok(value) } } fn advance_maybe_err(&mut self, bits: usize, ignore: bool) -> Result<(), AperCodecError> { let offset = self.decode_offset + bits; if offset > self.bits.len() { if ignore { self.decode_offset = self.bits.len() } else { let remaining = self.bits.len() - self.decode_offset; return Err(AperCodecError::new( format!( "AperCodec:DecodeError:Requested Bits to advance {}, Remaining bits {}", bits, remaining ) .as_str(), )); } } else { self.decode_offset = offset } Ok(()) } fn get_bit(&self) -> Result<bool, AperCodecError> { if self.decode_offset >= self.bits.len() { return Err(AperCodecError::new( format!( "AperCodec:GetBitError:Requested Bit {}, Remaining bits {}", self.decode_offset, self.bits.len() - self.decode_offset ) .as_str(), )); } let bit = *self.bits.get(self.decode_offset).as_deref().unwrap(); Ok(bit) } fn get_bitvec(&mut self, length: usize) -> Result<BitVec<Msb0, u8>, AperCodecError> { if length + self.decode_offset >= self.bits.len() { return Err(AperCodecError::new( format!( "AperCodec:GetBitError:Requested Bit {}, Remaining bits {}", length, self.bits.len() - self.decode_offset ) .as_str(), )); } let bv = BitVec::from_bitslice(&self.bits[self.decode_offset..self.decode_offset + length]); let _ = self.advance_maybe_err(length, true)?; Ok(bv) } fn get_bytes(&mut self, length: usize) -> Result<Vec<u8>, AperCodecError> { let length = length * 8; if length + self.decode_offset >= self.bits.len() { return Err(AperCodecError::new( format!( "AperCodec:GetBitError:Requested Bits {}, Remaining bits {}", length, self.bits.len() - self.decode_offset ) .as_str(), )); } let mut bv = self.bits[self.decode_offset..self.decode_offset + length].to_bitvec(); bv.force_align(); self.advance_maybe_err(length, true)?; Ok(BitVec::into_vec(bv)) } pub fn get_inner(&self) -> Result<Vec<u8>, AperCodecError> { Ok(BitVec::into_vec(self.bits.to_bitvec())) } pub fn get_key(&self) -> Option<i128> { self.key } pub fn set_key(&mut self, key: i128) { let _ = self.key.replace(key); } #[inline] pub fn dump(&self) { log::trace!("AperCodecData: offset: {}", self.decode_offset); } #[inline] pub fn dump_encode(&self) { log::trace!("AperCodecData: current_len : {}", self.bits.len()); } #[inline] pub fn reserve(&mut self, count: usize) { self.bits.reserve(count); self.decode_offset = count; } #[inline] pub fn seek(&mut self, offset: usize) { self.decode_offset = offset; } pub fn swap_bits(&mut self, other: &mut BitSlice<Msb0, u8>, offset: usize) { self.bits[offset..other.len() + offset].swap_with_bitslice(other); } pub fn set_bit(&mut self, index: usize, value: bool) { self.bits.set(index, value); } fn encode_bool(&mut self, value: bool) { self.bits.push(value); } fn append_bits(&mut self, bits: &BitSlice<Msb0, u8>) { self.bits.extend_from_bitslice(bits); } fn align(&mut self) { let remaining = 8 - (self.bits.len() & 0x7_usize); if remaining < 8 { self.bits.resize(self.bits.len() + remaining, false); } } pub fn length_in_bytes(&self) -> usize { ((self.bits.len() - 1) / 8) + 1 } pub fn append_aligned(&mut self, other: &mut Self) { self.align(); other.align(); self.append_bits(&other.bits) } } fn bytes_needed_for_range(range: i128) -> u8 { let bits_needed: u8 = 128 - range.leading_zeros() as u8; let mut bytes_needed = bits_needed / 8; if bits_needed % 8 != 0 { bytes_needed += 1 } bytes_needed } #[cfg(test)] mod tests { use super::*; #[test] fn get_bytes_unaligned() { let mut d = AperCodecData::from_slice(&vec![0x0f, 0xf0]); let _ = d.get_bitvec(4); let bytes = d.get_bytes(1).unwrap(); assert_eq!(bytes, vec![0xff]); } #[test] fn test_encode_decode_unconstrained_whole_number() { let numbers: Vec<i128> = vec![ 140737488355328, 140737488355327, 549755813888, 549755813887, 2147483648, 2147483647, 8388608, 8388607, 32768, 32767, 128, 127, 1, 0, -1, -128, -129, -32768, -32769, -8388608, -8388609, -2147483648, -2147483649, -549755813888, -549755813889, -140737488355328, -140737488355329, ]; for num in numbers { let mut d = AperCodecData::new(); eprintln!("number: {}", num); let result = encode::encode_integer(&mut d, None, None, false, num, false); eprintln!("{:?}", d); assert!(result.is_ok(), "{:#?}", d); let value = decode::decode_integer(&mut d, None, None, false); assert!(value.is_ok(), "{:#?}", value.err()); assert!(value.unwrap().0 == num); } } }
#![allow(dead_code)] pub mod error; pub use error::Error as AperCodecError; pub mod decode; pub mod encode; pub trait AperCodec { type Output; fn decode(data: &mut AperCodecData) -> Result<Self::Output, AperCodecError>; fn encode(&self, _data: &mut AperCodecData) -> Result<(), AperCodecError> { todo!(); } } use bitvec::prelude::*; #[derive(Default, Debug)] pub struct AperCodecData { bits: BitVec<Msb0, u8>, decode_offset: usize, key: Option<i128>, } impl AperCodecData { pub fn new() -> Self { Self::default() } pub fn from_slice(bytes: &[u8]) -> Self { Self { bits: BitSlice::<_, _>::from_slice(bytes).unwrap().to_bitvec(), decode_offset: 0, key: None, } } pub fn into_bytes(self) -> Vec<u8> { self.bits.into() } pub fn decode_align(&mut self) -> Result<(), AperCodecError> { if self.decode_offset % 8 == 0 { return Ok(()); } let remaining = 8 - (self.decode_offset & 0x7_usize); log::trace!("Aligning Codec Buffer with {} bits", remaining); if !self.bits[self.decode_offset..self.decode_offset + remaining] .iter() .all(|b| b == false) { Err(AperCodecError::new( format!( "{} Padding bits at Offset {} not all '0'.", remaining, self.decode_offset, ) .as_str(), )) } else { self.decode_offset += remaining; Ok(()) } } fn decode_bool(&mut self) -> Result<bool, AperCodecError> { if self.bits.len() == self.decode_offset { return Err(AperCodecError::new( "AperCodec:DecodeError:End of Bitstream reached while trying to decode bool.", )); } let bit = *self.bits.get(self.decode_offset).a
FF }; let inner = inner as i64; inner as i128 } 64 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u128>() as i64; inner as i128 } 128 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u128>() as i128; inner as i128 } _ => { return Err( AperCodecError::new( format!( "For a signed number in 2's compliment form, requested bits {} not supported!", bits))); } } }; log::trace!("Decoded Value: {:#?}", value); self.advance_maybe_err(bits, false)?; Ok(value) } } fn advance_maybe_err(&mut self, bits: usize, ignore: bool) -> Result<(), AperCodecError> { let offset = self.decode_offset + bits; if offset > self.bits.len() { if ignore { self.decode_offset = self.bits.len() } else { let remaining = self.bits.len() - self.decode_offset; return Err(AperCodecError::new( format!( "AperCodec:DecodeError:Requested Bits to advance {}, Remaining bits {}", bits, remaining ) .as_str(), )); } } else { self.decode_offset = offset } Ok(()) } fn get_bit(&self) -> Result<bool, AperCodecError> { if self.decode_offset >= self.bits.len() { return Err(AperCodecError::new( format!( "AperCodec:GetBitError:Requested Bit {}, Remaining bits {}", self.decode_offset, self.bits.len() - self.decode_offset ) .as_str(), )); } let bit = *self.bits.get(self.decode_offset).as_deref().unwrap(); Ok(bit) } fn get_bitvec(&mut self, length: usize) -> Result<BitVec<Msb0, u8>, AperCodecError> { if length + self.decode_offset >= self.bits.len() { return Err(AperCodecError::new( format!( "AperCodec:GetBitError:Requested Bit {}, Remaining bits {}", length, self.bits.len() - self.decode_offset ) .as_str(), )); } let bv = BitVec::from_bitslice(&self.bits[self.decode_offset..self.decode_offset + length]); let _ = self.advance_maybe_err(length, true)?; Ok(bv) } fn get_bytes(&mut self, length: usize) -> Result<Vec<u8>, AperCodecError> { let length = length * 8; if length + self.decode_offset >= self.bits.len() { return Err(AperCodecError::new( format!( "AperCodec:GetBitError:Requested Bits {}, Remaining bits {}", length, self.bits.len() - self.decode_offset ) .as_str(), )); } let mut bv = self.bits[self.decode_offset..self.decode_offset + length].to_bitvec(); bv.force_align(); self.advance_maybe_err(length, true)?; Ok(BitVec::into_vec(bv)) } pub fn get_inner(&self) -> Result<Vec<u8>, AperCodecError> { Ok(BitVec::into_vec(self.bits.to_bitvec())) } pub fn get_key(&self) -> Option<i128> { self.key } pub fn set_key(&mut self, key: i128) { let _ = self.key.replace(key); } #[inline] pub fn dump(&self) { log::trace!("AperCodecData: offset: {}", self.decode_offset); } #[inline] pub fn dump_encode(&self) { log::trace!("AperCodecData: current_len : {}", self.bits.len()); } #[inline] pub fn reserve(&mut self, count: usize) { self.bits.reserve(count); self.decode_offset = count; } #[inline] pub fn seek(&mut self, offset: usize) { self.decode_offset = offset; } pub fn swap_bits(&mut self, other: &mut BitSlice<Msb0, u8>, offset: usize) { self.bits[offset..other.len() + offset].swap_with_bitslice(other); } pub fn set_bit(&mut self, index: usize, value: bool) { self.bits.set(index, value); } fn encode_bool(&mut self, value: bool) { self.bits.push(value); } fn append_bits(&mut self, bits: &BitSlice<Msb0, u8>) { self.bits.extend_from_bitslice(bits); } fn align(&mut self) { let remaining = 8 - (self.bits.len() & 0x7_usize); if remaining < 8 { self.bits.resize(self.bits.len() + remaining, false); } } pub fn length_in_bytes(&self) -> usize { ((self.bits.len() - 1) / 8) + 1 } pub fn append_aligned(&mut self, other: &mut Self) { self.align(); other.align(); self.append_bits(&other.bits) } } fn bytes_needed_for_range(range: i128) -> u8 { let bits_needed: u8 = 128 - range.leading_zeros() as u8; let mut bytes_needed = bits_needed / 8; if bits_needed % 8 != 0 { bytes_needed += 1 } bytes_needed } #[cfg(test)] mod tests { use super::*; #[test] fn get_bytes_unaligned() { let mut d = AperCodecData::from_slice(&vec![0x0f, 0xf0]); let _ = d.get_bitvec(4); let bytes = d.get_bytes(1).unwrap(); assert_eq!(bytes, vec![0xff]); } #[test] fn test_encode_decode_unconstrained_whole_number() { let numbers: Vec<i128> = vec![ 140737488355328, 140737488355327, 549755813888, 549755813887, 2147483648, 2147483647, 8388608, 8388607, 32768, 32767, 128, 127, 1, 0, -1, -128, -129, -32768, -32769, -8388608, -8388609, -2147483648, -2147483649, -549755813888, -549755813889, -140737488355328, -140737488355329, ]; for num in numbers { let mut d = AperCodecData::new(); eprintln!("number: {}", num); let result = encode::encode_integer(&mut d, None, None, false, num, false); eprintln!("{:?}", d); assert!(result.is_ok(), "{:#?}", d); let value = decode::decode_integer(&mut d, None, None, false); assert!(value.is_ok(), "{:#?}", value.err()); assert!(value.unwrap().0 == num); } } }
s_deref().unwrap(); let _ = self.advance_maybe_err(1, true)?; Ok(bit) } fn decode_bits_as_integer( &mut self, bits: usize, signed: bool, ) -> Result<i128, AperCodecError> { let remaining = self.bits.len() - self.decode_offset; if remaining < bits { Err(AperCodecError::new( format!( "AperCodec:DecodeError:Requested Bits to decode {}, Remaining bits {}", bits, remaining ) .as_str(), )) } else { log::trace!( "Decoding Bits as Integer. offset: {}, bits: {}", self.decode_offset, bits ); let value = if !signed { if bits == 0 { 0_i128 } else { self.bits[self.decode_offset..self.decode_offset + bits].load_be::<u128>() as i128 } } else { match bits { 8 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u128>() as i8; inner as i128 } 16 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u128>() as i16; inner as i128 } 24 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u32>() as u32; let inner = if self.bits[self.decode_offset] { inner | 0xFF000000 } else { inner & 0x00FFFFFF }; let inner = inner as i32; inner as i128 } 32 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u128>() as i32; inner as i128 } 40 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u64>() as u64; let inner = if self.bits[self.decode_offset] { inner | 0xFFFFFF0000000000 } else { inner & 0x000000FFFFFFFFFF }; let inner = inner as i64; inner as i128 } 48 => { let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u64>() as u64; let inner = if self.bits[self.decode_offset] { inner | 0xFFFF000000000000 } else { inner & 0x0000FFFFFFFFFFFF }; let inner = inner as i64; inner as i128 } 56 => { eprintln!("{}", self.decode_offset); let inner = self.bits[self.decode_offset..self.decode_offset + bits] .load_be::<u64>() as u64; let inner = if self.bits[self.decode_offset] { inner | 0xFF00000000000000 } else { inner & 0x00FFFFFFFFFFFF
random
[ { "content": "/// Decode a Boolean\n\n///\n\n/// Decode a Boolean value. Returns the decoded value as a `bool`.\n\npub fn decode_bool(data: &mut AperCodecData) -> Result<bool, AperCodecError> {\n\n data.decode_bool()\n\n}\n\n\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 0, "score": 262312.41935393296 }, { "content": "/// Encode a BOOLEAN Value\n\n///\n\n/// Encodes a boolean value into the passed `AperCodecData` structure.\n\npub fn encode_bool(data: &mut AperCodecData, value: bool) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_bool\");\n\n data.encode_bool(value);\n\n Ok(())\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 1, "score": 252455.95629671589 }, { "content": "// Called when `ub` is not determined or `ub ` - `lb` is greater than 64K and in this case value of\n\n// `lb` is don't care.\n\nfn decode_indefinite_length_determinent(data: &mut AperCodecData) -> Result<usize, AperCodecError> {\n\n let _ = data.decode_align()?;\n\n let first = data.decode_bool()?;\n\n let length = if !first {\n\n data.decode_bits_as_integer(7, false)?\n\n } else {\n\n let second = data.decode_bool()?;\n\n if !second {\n\n data.decode_bits_as_integer(14, false)?\n\n } else {\n\n let length = data.decode_bits_as_integer(6, false)?;\n\n if !(1..=4).contains(&length) {\n\n return Err(AperCodecError::new(\"The value should be 1 to 4\"));\n\n } else {\n\n length * 16384\n\n }\n\n }\n\n };\n\n Ok(length.try_into().unwrap())\n\n}\n", "file_path": "codecs/src/aper/decode/decode_internal.rs", "rank": 2, "score": 200505.13960908414 }, { "content": "/// Encode a Bit String\n\npub fn encode_bitstring(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n bit_string: &BitSlice<Msb0, u8>,\n\n extended: bool,\n\n) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_bitstring\");\n\n\n\n if extended {\n\n return Err(AperCodecError::new(\n\n \"Encode of extended bitstring not yet implemented\",\n\n ));\n\n }\n\n\n\n if is_extensible {\n\n data.encode_bool(extended);\n\n }\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 3, "score": 192114.84311950183 }, { "content": "/// Encode an Integer\n\npub fn encode_integer(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n value: i128,\n\n extended: bool,\n\n) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_integer\");\n\n if extended {\n\n return Err(AperCodecError::new(\n\n \"Encode of extended integer not yet implemented\",\n\n ));\n\n }\n\n\n\n if is_extensible {\n\n data.encode_bool(extended);\n\n }\n\n\n\n match (lb, ub) {\n\n (None, _) => encode_unconstrained_whole_number(data, value),\n\n (Some(lb), None) => encode_semi_constrained_whole_number(data, lb, value),\n\n (Some(lb), Some(ub)) => encode_constrained_whole_number(data, lb, ub, value),\n\n }\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 4, "score": 192108.99699754934 }, { "content": "/// Encode an OCTET STRING\n\npub fn encode_octetstring(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n octet_string: &Vec<u8>,\n\n extended: bool,\n\n) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_octetstring\");\n\n\n\n if extended {\n\n return Err(AperCodecError::new(\n\n \"Encode of extended octetstring not yet implemented\",\n\n ));\n\n }\n\n\n\n if is_extensible {\n\n data.encode_bool(extended);\n\n }\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 5, "score": 192108.90733483422 }, { "content": "/// Encode an Enumerated Value\n\npub fn encode_enumerated(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n value: i128,\n\n extended: bool,\n\n) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_enumerated\");\n\n if extended {\n\n return Err(AperCodecError::new(\n\n \"Encode of extended enumerated not yet implemented\",\n\n ));\n\n }\n\n\n\n if is_extensible {\n\n data.encode_bool(extended);\n\n }\n\n\n\n encode_integer(data, lb, ub, false, value, false)\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 6, "score": 192108.90733483422 }, { "content": "/// Decode an Integer\n\n///\n\n/// Given an Integer Specification with PER Visible Constraints, decode an Integer Value to obtain\n\n/// the integer value which will always be returned as an i128 value.\n\n///\n\n/// `lb` and `ub` are upper and lower bounds as determined by the PER Constraints (and hence can be\n\n/// `None` if no Constraints are not speicifed. `is_extensible` specifies whether the defined type\n\n/// is extensible (as per PER Constraints). Returned value is the value of the Integer (i128) and\n\n/// whether the value is outside the extension root (`bool`: `true` if value is outside the\n\n/// extension root.).\n\npub fn decode_integer(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n) -> Result<(i128, bool), AperCodecError> {\n\n log::trace!(\n\n \"decode_integer: Lower: {:#?} Upper:{:#?} Extensible: {}\",\n\n lb,\n\n ub,\n\n is_extensible\n\n );\n\n data.dump();\n\n let extended_value = if is_extensible {\n\n data.decode_bool()?\n\n } else {\n\n false\n\n };\n\n\n\n let value = if extended_value {\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 7, "score": 192106.98586888472 }, { "content": "/// Decode a Bit String\n\n///\n\n/// Decodes the value of the BIT STRING from the Buffer.\n\npub fn decode_bitstring(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n) -> Result<BitVec<Msb0, u8>, AperCodecError> {\n\n let is_extended = if is_extensible {\n\n data.decode_bool()?\n\n } else {\n\n false\n\n };\n\n\n\n let mut bv = BitVec::new();\n\n loop {\n\n let length = if is_extended {\n\n decode_length_determinent(data, None, None, false)?\n\n } else {\n\n decode_length_determinent(data, lb, ub, false)?\n\n };\n\n\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 8, "score": 192106.33962594497 }, { "content": "/// Decode an OCTET STRING\n\n///\n\n/// Decodes the value of the OCTET STRING from the Buffer.\n\npub fn decode_octetstring(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n) -> Result<Vec<u8>, AperCodecError> {\n\n let is_extended = if is_extensible {\n\n data.decode_bool()?\n\n } else {\n\n false\n\n };\n\n\n\n let mut octets = Vec::new();\n\n loop {\n\n let length = if is_extended {\n\n decode_length_determinent(data, None, None, false)?\n\n } else {\n\n decode_length_determinent(data, lb, ub, false)?\n\n };\n\n\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 9, "score": 192099.77688592093 }, { "content": "/// Decode an Enumerated Value\n\n///\n\n/// Decodes an Enumerated value as an index into either `root_values` of the ENUMERATED or\n\n/// `ext_values` of the ENUMERATED and also decodes a flag indicating where the value belongs. If\n\n/// `false` the value is from the `root_values`, else the value is from the `ext_values` of the\n\n/// ENUMERATED.\n\npub fn decode_enumerated(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n) -> Result<(i128, bool), AperCodecError> {\n\n log::trace!(\"decode_enumerated\");\n\n data.dump();\n\n\n\n let is_extended = if is_extensible {\n\n data.decode_bool()?\n\n } else {\n\n false\n\n };\n\n\n\n let decoded = if !is_extended {\n\n let decoded = decode_integer(data, lb, ub, false)?;\n\n decoded.0\n\n } else {\n\n decode_normally_small_non_negative_whole_number(data)?\n\n };\n\n\n\n data.dump();\n\n\n\n Ok((decoded, is_extended))\n\n}\n\n\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 10, "score": 192093.86019967208 }, { "content": "/// Encode a Choice Index\n\n///\n\n/// During Encoding a 'CHOICE' Type to help decoding, the 'CHOICE' Index is encoded first, followed\n\n/// by the actual encoding of the 'CHOICE' variant.\n\npub fn encode_choice_idx(\n\n data: &mut AperCodecData,\n\n lb: i128,\n\n ub: i128,\n\n is_extensible: bool,\n\n idx: i128,\n\n extended: bool,\n\n) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_choice_idx\");\n\n\n\n if extended {\n\n return Err(AperCodecError::new(\n\n \"Encode of extended choice not yet implemented\",\n\n ));\n\n }\n\n\n\n if is_extensible {\n\n data.encode_bool(extended);\n\n }\n\n encode_integer(data, Some(lb), Some(ub), false, idx, false)\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 11, "score": 184557.3652374931 }, { "content": "/// Encode a VisibleString CharacterString Type.\n\npub fn encode_visible_string(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n value: &String,\n\n extended: bool,\n\n) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_visible_string\");\n\n encode_string(data, lb, ub, is_extensible, value, extended)\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 12, "score": 184551.87869079833 }, { "content": "/// Encode a PrintableString CharacterString Type.\n\npub fn encode_printable_string(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n value: &String,\n\n extended: bool,\n\n) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_printable_string\");\n\n encode_string(data, lb, ub, is_extensible, value, extended)\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 13, "score": 184551.87869079833 }, { "content": "/// Encode a UTF8String CharacterString Type.\n\npub fn encode_utf8_string(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n value: &String,\n\n extended: bool,\n\n) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_utf8_string\");\n\n encode_string(data, lb, ub, is_extensible, value, extended)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn encode_bool_always_success() {\n\n let mut data = AperCodecData::new();\n\n\n\n let result = encode_bool(&mut data, true);\n\n assert!(result.is_ok());\n\n assert_eq!(data.bits.len(), 1);\n\n assert_eq!(data.bits[0], true);\n\n }\n\n}\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 14, "score": 184551.87869079833 }, { "content": "/// Encode sequence header\n\npub fn encode_sequence_header(\n\n data: &mut AperCodecData,\n\n is_extensible: bool,\n\n optionals: &BitSlice<Msb0, u8>,\n\n extended: bool,\n\n) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_sequence_header\");\n\n\n\n if extended {\n\n return Err(AperCodecError::new(\n\n \"Encode of extended sequence not yet implemented\",\n\n ));\n\n }\n\n\n\n if is_extensible {\n\n data.encode_bool(extended);\n\n }\n\n\n\n data.append_bits(optionals);\n\n Ok(())\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 15, "score": 184546.48257719955 }, { "content": "// Encode a Length Determinent\n\npub fn encode_length_determinent(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n normally_small: bool,\n\n value: usize,\n\n) -> Result<(), AperCodecError> {\n\n log::trace!(\"encode_length_determinent\");\n\n\n\n if normally_small {\n\n return encode_normally_small_length_determinent(data, value);\n\n }\n\n\n\n match ub {\n\n Some(ub) if ub < 65_536 => {\n\n encode_constrained_whole_number(data, lb.unwrap_or(0), ub, value as i128)\n\n }\n\n _ => encode_indefinite_length_determinent(data, value),\n\n }\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 16, "score": 184546.48257719955 }, { "content": "/// Decode a Choice Index.\n\n///\n\n/// For an ASN.1 `CHOICE` Type, a CHOICE Index is first decoded. This function is used to `decode`\n\n/// the choice index. Returns the Index in the 'root' or 'additions' and a flag indicated whether\n\n/// the value is from the 'root_extensions' or 'addtions'. The caller would then decide the\n\n/// appropriate `decode` function for the CHOICE variant is called.\n\npub fn decode_choice_idx(\n\n data: &mut AperCodecData,\n\n lb: i128,\n\n ub: i128,\n\n is_extensible: bool,\n\n) -> Result<(i128, bool), AperCodecError> {\n\n log::trace!(\"decode_choice_idx\");\n\n data.dump();\n\n\n\n let (idx, extended) = if is_extensible {\n\n let extended = data.decode_bool()?;\n\n if !extended {\n\n let (idx, _) = decode_integer(data, Some(lb), Some(ub), false)?;\n\n (idx, extended)\n\n } else {\n\n let idx = decode_normally_small_non_negative_whole_number(data)?;\n\n (idx, extended)\n\n }\n\n } else {\n\n let (idx, _) = decode_integer(data, Some(lb), Some(ub), false)?;\n\n (idx, false)\n\n };\n\n\n\n data.dump();\n\n\n\n Ok((idx, extended))\n\n}\n\n\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 17, "score": 184544.49299926695 }, { "content": "/// Decode The Sequence Header\n\n///\n\n/// The Sequence Header consists of potentially two fields\n\n/// 1. Whether `extensions` are present in the encoding\n\n/// 2. Which of the OPTIONAL fields (if any) are present as a bitmap.\n\npub fn decode_sequence_header(\n\n data: &mut AperCodecData,\n\n is_extensible: bool,\n\n optional_count: usize,\n\n) -> Result<(BitVec<Msb0, u8>, bool), AperCodecError> {\n\n log::trace!(\"decode_sequence_header\");\n\n data.dump();\n\n let extended = if is_extensible {\n\n data.decode_bool()?\n\n } else {\n\n false\n\n };\n\n\n\n let mut bitmap = BitVec::new();\n\n if optional_count > 0 {\n\n bitmap.extend(data.get_bitvec(optional_count)?);\n\n }\n\n\n\n data.dump();\n\n Ok((bitmap, extended))\n\n}\n\n\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 18, "score": 184535.62274703235 }, { "content": "fn encode_string(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n value: &String,\n\n extended: bool,\n\n) -> Result<(), AperCodecError> {\n\n if extended {\n\n return Err(AperCodecError::new(\n\n \"Encode of extended visible string not yet implemented\",\n\n ));\n\n }\n\n\n\n if is_extensible {\n\n data.encode_bool(extended);\n\n }\n\n encode_length_determinent(data, lb, ub, false, value.len())?;\n\n if value.len() > 2 {\n\n data.align();\n\n }\n\n data.append_bits(value.as_bits());\n\n Ok(())\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 20, "score": 159569.21044557684 }, { "content": "/// Decode a PrintableString CharacterString Type.\n\npub fn decode_printable_string(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n) -> Result<String, AperCodecError> {\n\n let (_val_lower, _val_higher) = (32u8, 122u8);\n\n\n\n let num_bits = 8; // N = 74, B = 7, B2 = 8\n\n\n\n let mut alphabet = vec![' ', '\\'', '(', ')', '+', ',', '-', '.', '/'];\n\n alphabet.extend(('0'..='9').collect::<Vec<char>>());\n\n alphabet.extend(vec![':', '=', '?']);\n\n alphabet.extend(('a'..='z').collect::<Vec<char>>());\n\n alphabet.extend(('A'..='Z').collect::<Vec<char>>());\n\n\n\n let is_extended = if is_extensible {\n\n data.decode_bool()?\n\n } else {\n\n false\n", "file_path": "codecs/src/aper/decode/decode_charstrings.rs", "rank": 21, "score": 158153.77357081856 }, { "content": "// 27.5.3 and 27.5.4\n\n/// Decode a VisibleString CharacterString Type.\n\npub fn decode_visible_string(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n is_extensible: bool,\n\n) -> Result<String, AperCodecError> {\n\n // Following values are never used instead Canonical decode\n\n let (_val_lower, _val_higher) = (32u8, 127u8);\n\n\n\n let num_bits = 8; // N = 95, B = 7, B2 = 8\n\n\n\n let is_extended = if is_extensible {\n\n data.decode_bool()?\n\n } else {\n\n false\n\n };\n\n\n\n let length = if is_extended {\n\n decode_length_determinent(data, None, None, false)?\n\n } else {\n", "file_path": "codecs/src/aper/decode/decode_charstrings.rs", "rank": 22, "score": 158153.77357081856 }, { "content": "// UTF-8 String is always - indefinite length case as it's not a fixed character width string. It's\n\n// almost like decoding an octet string.\n\n// 27.6\n\n/// Decode a UTF8String CharacterString Type.\n\npub fn decode_utf8_string(\n\n data: &mut AperCodecData,\n\n _lb: Option<i128>,\n\n _ub: Option<i128>,\n\n _is_extensible: bool,\n\n) -> Result<String, AperCodecError> {\n\n let (_val_lower, _val_higher) = (0u8, 255u8);\n\n\n\n let num_bits = 8; // N = 74, B = 7, B2 = 8\n\n\n\n let length = decode_length_determinent(data, None, None, false)?;\n\n let mut out = String::new();\n\n if length > 0 {\n\n let length = length * num_bits;\n\n\n\n if length > 16 {\n\n data.decode_align()?;\n\n }\n\n\n\n let bits = data.get_bitvec(length)?;\n", "file_path": "codecs/src/aper/decode/decode_charstrings.rs", "rank": 23, "score": 158153.03753022262 }, { "content": "// Decode a Length Determinent (Section 10.9)\n\n//\n\n// Decodes a Length Determinent.\n\npub fn decode_length_determinent(\n\n data: &mut AperCodecData,\n\n lb: Option<i128>,\n\n ub: Option<i128>,\n\n normally_small: bool,\n\n) -> Result<usize, AperCodecError> {\n\n // Normally small is told to us by caller and we don't care about `lb` and `ub` values in that\n\n // case. We simply follow the procedure as explained in 10.9.3.4\n\n log::trace!(\"decode_length_determinent\");\n\n data.dump();\n\n if normally_small {\n\n return decode_normally_small_length_determinent(data);\n\n }\n\n\n\n let lb = if let Some(l) = lb {\n\n l.try_into().unwrap()\n\n } else {\n\n 0usize\n\n };\n\n\n", "file_path": "codecs/src/aper/decode/decode_internal.rs", "rank": 24, "score": 158149.06366899319 }, { "content": "/// Tokenize ASN file.\n\n///\n\n/// This function would work on any input that implements `std::io::Read` trait, but would work\n\n/// mostly with files because this 'reads the input to end'. We look at the first character of a\n\n/// non-whitespace sequence and then tokenize that into appropriate tokens.\n\npub fn tokenize<T>(mut input: T) -> Result<Vec<Token>, Error>\n\nwhere\n\n T: std::io::Read,\n\n{\n\n let mut line = 1;\n\n let mut tokens: Vec<Token> = Vec::new();\n\n let mut buffer = Vec::new();\n\n let _ = input.read_to_end(&mut buffer).unwrap();\n\n let buffer = String::from_utf8(buffer).unwrap();\n\n let chars: Vec<char> = buffer.chars().collect();\n\n let mut column = 0_usize;\n\n let mut processed = 0;\n\n let total_read = chars.len();\n\n loop {\n\n let c = chars[processed];\n\n match c {\n\n ' ' | '\\t' => {\n\n processed += 1;\n\n column += 1;\n\n }\n", "file_path": "asn-compiler/src/tokenizer/mod.rs", "rank": 25, "score": 156223.13482619543 }, { "content": "fn parse_referenced_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if let Ok(success) = expect_tokens(\n\n &tokens[consumed..],\n\n &[\n\n &[Token::is_object_class_reference],\n\n &[Token::is_dot],\n\n &[\n\n Token::is_type_field_reference,\n\n Token::is_value_field_reference,\n\n ],\n\n ],\n\n ) {\n\n if success {\n\n let classref = tokens[consumed].text.clone();\n\n let fieldref = tokens[consumed + 2].text.clone();\n\n return Ok((\n\n Asn1TypeKind::Reference(Asn1TypeReference::ClassField { classref, fieldref }),\n\n 3,\n", "file_path": "asn-compiler/src/parser/asn/types/int.rs", "rank": 26, "score": 137986.48735660157 }, { "content": "fn parse_sequence_of_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n // Initial SEQUENCE is already consumed.\n\n consumed += 1;\n\n\n\n let (size, size_consumed) = match parse_constraint(&tokens[consumed..]) {\n\n Ok(result) => (Some(result.0), result.1),\n\n Err(_) => (None, 0),\n\n };\n\n consumed += size_consumed;\n\n\n\n if !expect_keyword(&tokens[consumed..], \"OF\")? {\n\n return Err(unexpected_token!(\"'OF'\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n\n let (ty, ty_consumed) = parse_type(&tokens[consumed..])?;\n\n consumed += ty_consumed;\n\n\n\n let ty = Box::new(ty);\n\n Ok((\n\n Asn1TypeKind::Constructed(Asn1ConstructedType::SequenceOf(Asn1TypeSequenceOf {\n\n size,\n\n ty,\n\n })),\n\n consumed,\n\n ))\n\n}\n\n\n", "file_path": "asn-compiler/src/parser/asn/types/constructed/seq.rs", "rank": 27, "score": 134443.69044499207 }, { "content": "fn parse_sequence_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Error> {\n\n let mut consumed = 0;\n\n // Initial 'SEQUENCE' is consumed by the caller. We start with '{'\n\n\n\n consumed += 1; // For the SEQUENCE\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_curly_begin)? {\n\n return Err(unexpected_token!(\"'{'\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n\n let mut root_components = vec![];\n\n let mut additions = vec![];\n\n let mut ext_marker_found = 0;\n\n loop {\n\n let (component, component_consumed) = match parse_seq_component(&tokens[consumed..]) {\n\n Ok(result) => result,\n\n Err(_) => (None, 0),\n\n };\n\n if let Some(root_comp) = component {\n", "file_path": "asn-compiler/src/parser/asn/types/constructed/seq.rs", "rank": 28, "score": 134443.69044499207 }, { "content": "// Get token for a number Integer or Real\n\nfn get_number_token(chars: &[char], line: usize, begin: usize) -> Result<(Token, usize), Error> {\n\n let neg = (chars[0] == '-') as usize;\n\n\n\n if neg > 0 && chars.len() == 1 {\n\n return Err(Error::TokenizeError(14, line, begin));\n\n }\n\n\n\n let mut consumed = neg;\n\n let last = chars[neg..].iter().position(|&x| !x.is_numeric());\n\n if let Some(lst) = last {\n\n consumed += lst;\n\n } else {\n\n consumed += chars[neg..].len();\n\n }\n\n\n\n Ok((\n\n Token {\n\n r#type: TokenType::NumberInt,\n\n span: Span::new(\n\n LineColumn::new(line, begin),\n\n LineColumn::new(line, begin + consumed),\n\n ),\n\n text: chars[..consumed].iter().collect::<String>(), // include the sign as well\n\n },\n\n consumed,\n\n ))\n\n}\n\n\n", "file_path": "asn-compiler/src/tokenizer/mod.rs", "rank": 29, "score": 133067.32231187506 }, { "content": "pub fn ngap_decode_bench(c: &mut Criterion) {\n\n let ngap_data = hex::decode(\n\n \"0015404a000004001b00084002f898000000000052400f06004d79206c6974746c6520674e420066001f01000000000002f8980001000800800000010002f8390001001881c00013880015400140\",\n\n ).unwrap();\n\n\n\n c.bench_function(\"NGAP_decode\", |b| {\n\n b.iter(|| {\n\n let mut codec_data = AperCodecData::from_slice(&ngap_data);\n\n let _ = NGAP_PDU::decode(&mut codec_data).unwrap();\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(ngap_decode, ngap_decode_bench);\n\ncriterion_main!(ngap_decode);\n", "file_path": "examples/benches/ngap_bench.rs", "rank": 30, "score": 132870.9833062564 }, { "content": "/// Parse the tokens into internal Asn1Module representation\n\n///\n\n/// Token obtained from running [`tokenize`][`crate::tokenizer::tokenize] on an ANS file are parsed\n\n/// into an internal representation of [`Asn1Module`][`crate::structs::Asn1Module`]. Semantic\n\n/// errors during parsing the tokens are returned as appropriate variant of `Error`.\n\npub fn parse(tokens: &mut Vec<Token>) -> Result<Vec<Asn1Module>, Error> {\n\n // Get rid of the comments, it complicates things\n\n tokens.retain(|x| !x.is_comment());\n\n\n\n let mut modules = vec![];\n\n let mut total = 0;\n\n loop {\n\n let (module, consumed) = parse_module(&tokens[total..])?;\n\n modules.push(module);\n\n total += consumed;\n\n if total == tokens.len() {\n\n break;\n\n }\n\n }\n\n Ok(modules)\n\n}\n\n\n\n// TODO: Test cases, at-least single-module, multiple modules etc.\n", "file_path": "asn-compiler/src/parser/int.rs", "rank": 31, "score": 129720.3718090379 }, { "content": "// Gets Begin/End of round/curly brackets.\n\n//\n\n// Note: square brackets need a special treatment due to \"[[\" and \"]]\"\n\nfn get_single_char_token(token: char, line: usize, begin: usize) -> Result<Token, Error> {\n\n let token_type: TokenType;\n\n match token {\n\n '{' => token_type = TokenType::CurlyBegin,\n\n '}' => token_type = TokenType::CurlyEnd,\n\n '(' => token_type = TokenType::RoundBegin,\n\n ')' => token_type = TokenType::RoundEnd,\n\n '!' => token_type = TokenType::ExceptionMarker,\n\n ';' => token_type = TokenType::SemiColon,\n\n ',' => token_type = TokenType::Comma,\n\n '|' => token_type = TokenType::SetUnionToken,\n\n '^' => token_type = TokenType::SetIntersectionToken,\n\n '<' => token_type = TokenType::LessThan,\n\n _ => return Err(Error::TokenizeError(21, line, begin)),\n\n }\n\n Ok(Token {\n\n r#type: token_type,\n\n span: Span::new(\n\n LineColumn::new(line, begin),\n\n LineColumn::new(line, begin + 1),\n\n ),\n\n text: token.to_string(),\n\n })\n\n}\n\n\n", "file_path": "asn-compiler/src/tokenizer/mod.rs", "rank": 33, "score": 125887.62092943948 }, { "content": "// Parse a Type Assignment\n\n//\n\n// Identifier [{Params}] :== {Type|Refere} [(Constraints)]\n\nfn parse_type_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_type_reference)? {\n\n return Err(unexpected_token!(\"Type Reference\", tokens[consumed]));\n\n }\n\n let id = tokens[consumed].text.clone();\n\n consumed += 1;\n\n\n\n // Parse Optional Params\n\n let (params, params_consumed) = match parse_params(&tokens[consumed..]) {\n\n Ok(result) => (Some(result.0), result.1),\n\n Err(_) => (None, 0),\n\n };\n\n consumed += params_consumed;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_assignment)? {\n\n return Err(unexpected_token!(\"::=\", tokens[consumed]));\n\n }\n\n consumed += 1;\n", "file_path": "asn-compiler/src/parser/asn/defs.rs", "rank": 34, "score": 125171.86766395088 }, { "content": "// Just like parse_union_set, except it consumes the wrapping `(` and `)`\n\n//\n\n// This avoid having to write a lot of boiler-plate code to check for `(` or `)` in a few\n\n// functions (typically inside `parse_intersection_set`.)\n\nfn parse_intersection_set(tokens: &[Token]) -> Result<(Elements, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n // First try to Parse a Size\n\n if expect_one_of_keywords(&tokens[consumed..], &[\"SIZE\", \"FROM\"])? {\n\n // If we come inside, following is guaranteed. to succeed.\n\n let variant = if expect_keyword(&tokens[consumed..], \"SIZE\").unwrap() {\n\n SubtypeElements::SizeConstraint\n\n } else {\n\n SubtypeElements::PermittedAlphabet\n\n };\n\n consumed += 1;\n\n\n\n if expect_token(&tokens[consumed..], Token::is_round_begin)? {\n\n let (element_set, element_set_consumed) = parse_element_set(&tokens[consumed..])?;\n\n consumed += element_set_consumed;\n\n\n\n return Ok((Elements::Subtype(variant(element_set)), consumed));\n\n }\n\n }\n", "file_path": "asn-compiler/src/parser/asn/types/constraints.rs", "rank": 35, "score": 125165.34463391737 }, { "content": "fn parse_type_field_spec(tokens: &[Token]) -> Result<(ObjectClassFieldSpec, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_type_field_reference)? {\n\n return Err(unexpected_token!(\"'TYPE FIELD REF'\", tokens[consumed]));\n\n }\n\n\n\n let id = tokens[consumed].text.clone();\n\n consumed += 1;\n\n\n\n let mut optional = false;\n\n let mut default = None;\n\n if expect_one_of_keywords(&tokens[consumed..], &[\"OPTIONAL\", \"DEFAULT\"])? {\n\n if expect_keyword(&tokens[consumed..], \"OPTIONAL\")? {\n\n optional = match expect_keyword(&tokens[consumed..], \"OPTIONAL\") {\n\n Ok(o) => {\n\n if o {\n\n consumed += 1;\n\n true\n\n } else {\n", "file_path": "asn-compiler/src/parser/asn/types/ioc.rs", "rank": 36, "score": 123115.8203965946 }, { "content": "struct StructFieldType {\n\n ty: Option<syn::Ident>,\n\n is_optional: bool,\n\n}\n\n\n", "file_path": "codecs_derive/src/aper/seq.rs", "rank": 37, "score": 122423.10618461034 }, { "content": "// Called when `lb` and `ub` are known and the range is less than 64K\n\nfn decode_constrained_length_determinent(\n\n data: &mut AperCodecData,\n\n lb: usize,\n\n ub: usize,\n\n) -> Result<usize, AperCodecError> {\n\n log::trace!(\n\n \"decode_constrained_length_determinent, lb: {}, ub: {}\",\n\n lb,\n\n ub\n\n );\n\n let range = ub - lb + 1;\n\n\n\n if range <= 65536 {\n\n // Almost always for our use cases, so let's just use it.\n\n let length = decode_constrained_whole_number(data, lb as i128, ub as i128)?;\n\n log::trace!(\"decoded length : {}\", length);\n\n Ok(length as usize)\n\n } else {\n\n unimplemented!(\"Lengths larger than 65536 are not supported yet.\")\n\n }\n\n}\n\n\n", "file_path": "codecs/src/aper/decode/decode_internal.rs", "rank": 38, "score": 121782.6341248573 }, { "content": "// Parses a Range Value, supports all possible formats.\n\n//\n\n// If parsing fails (tokens of not adequate length or tokens don't match) returns an Error. The\n\n// caller should do the error handling. Note: Typically caller will simply say Oh it didn't match,\n\n// let's try next.\n\nfn parse_range_elements(tokens: &[Token]) -> Result<(SubtypeElements, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n fn is_min_max_keyword(token: &Token) -> bool {\n\n [\"MIN\", \"MAX\"]\n\n .iter()\n\n .any(|k| Token::is_given_keyword(token, k))\n\n }\n\n\n\n let (lower, lower_consumed) = match parse_value(&tokens[consumed..]) {\n\n Ok(result) => (result.0, result.1),\n\n Err(_) => {\n\n if expect_token(&tokens[consumed..], is_min_max_keyword)? {\n\n (tokens[consumed].text.clone(), 1)\n\n } else {\n\n return Err(unexpected_token!(\n\n \"'MIN', 'MAX' or 'Value'\",\n\n tokens[consumed]\n\n ));\n\n }\n", "file_path": "asn-compiler/src/parser/asn/types/constraints.rs", "rank": 39, "score": 121389.97549135487 }, { "content": "fn parse_table_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_round_begin)? {\n\n return Err(unexpected_token!(\"'('\", tokens[0]));\n\n }\n\n consumed += 1;\n\n\n\n // First Simple Table - Must succeed\n\n let table = if expect_tokens(\n\n &tokens[consumed..],\n\n &[\n\n &[Token::is_curly_begin],\n\n &[Token::is_object_set_reference],\n\n &[Token::is_curly_end],\n\n ],\n\n )? {\n\n tokens[consumed + 1].text.clone()\n\n } else {\n\n return Err(parse_error!(\"Failed to parse Simple Table Constraint.\"));\n", "file_path": "asn-compiler/src/parser/asn/types/constraints.rs", "rank": 40, "score": 121367.45032509684 }, { "content": "fn parse_subtype_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize), Error> {\n\n let (element_set, element_set_consumed) = parse_element_set(tokens)?;\n\n Ok((Asn1Constraint::Subtype(element_set), element_set_consumed))\n\n}\n\n\n", "file_path": "asn-compiler/src/parser/asn/types/constraints.rs", "rank": 41, "score": 121367.45032509684 }, { "content": "fn parse_element_set(tokens: &[Token]) -> Result<(ElementSet, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_round_begin)? {\n\n return Err(unexpected_token!(\"'('\", tokens[0]));\n\n }\n\n consumed += 1;\n\n\n\n let (root_elements, root_consumed) = parse_union_set(&tokens[consumed..])?;\n\n consumed += root_consumed;\n\n\n\n if root_elements.elements.is_empty() {\n\n return Err(parse_error!(\"Empty Set in a Constraint!\"));\n\n }\n\n\n\n let mut additional_elements = None;\n\n if expect_token(&tokens[consumed..], Token::is_comma)? {\n\n consumed += 1;\n\n\n\n // Extension Marker\n", "file_path": "asn-compiler/src/parser/asn/types/constraints.rs", "rank": 42, "score": 121367.45032509684 }, { "content": "fn parse_contents_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_round_begin)? {\n\n return Err(unexpected_token!(\"'('\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n\n if !expect_keyword(&tokens[consumed..], \"CONTAINING\")? {\n\n return Err(unexpected_token!(\"'CONTAINING'\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n\n let _containing = if expect_token(&tokens[consumed..], Token::is_type_reference)? {\n\n tokens[consumed].text.clone()\n\n } else {\n\n return Err(unexpected_token!(\"'TYPE Reference'\", tokens[consumed]));\n\n };\n\n consumed += 1;\n\n\n", "file_path": "asn-compiler/src/parser/asn/types/constraints.rs", "rank": 43, "score": 121367.45032509684 }, { "content": "fn parse_union_set(tokens: &[Token]) -> Result<(UnionSet, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n let mut elements = vec![];\n\n // UnionSet Loop\n\n loop {\n\n // IntersectionSet Loop\n\n let mut iset_elements = vec![];\n\n let mut expecting_iset = false;\n\n loop {\n\n match parse_intersection_set(&tokens[consumed..]) {\n\n Ok(result) => {\n\n iset_elements.push(result.0);\n\n consumed += result.1;\n\n }\n\n Err(_) => {\n\n if expecting_iset {\n\n return Err(parse_error!(\"Expecting Interesection Set in a Constraint.\"));\n\n }\n\n }\n", "file_path": "asn-compiler/src/parser/asn/types/constraints.rs", "rank": 44, "score": 121367.45032509684 }, { "content": "fn generate_choice_variant_decode_tokens_using_attrs(\n\n ast: &syn::DeriveInput,\n\n lb: i128,\n\n ub: i128,\n\n ext: Option<&syn::LitBool>,\n\n) -> Result<(Vec<proc_macro2::TokenStream>, Vec<proc_macro2::TokenStream>), syn::Error> {\n\n let mut decode_tokens = vec![];\n\n let mut encode_tokens = vec![];\n\n\n\n let mut errors = vec![];\n\n if let syn::Data::Enum(ref data) = ast.data {\n\n for variant in &data.variants {\n\n let codec_params = parse_fld_meta_as_codec_params(&variant.attrs);\n\n match codec_params {\n\n Err(e) => errors.push(e),\n\n Ok(cp) => {\n\n let key = cp.key.as_ref();\n\n if key.is_none() {\n\n errors.push(syn::Error::new_spanned(\n\n variant,\n", "file_path": "codecs_derive/src/aper/choice.rs", "rank": 45, "score": 118287.47468858468 }, { "content": "fn generate_open_type_variant_tokens_using_attrs(\n\n ast: &syn::DeriveInput,\n\n) -> Result<(Vec<proc_macro2::TokenStream>, Vec<proc_macro2::TokenStream>), syn::Error> {\n\n let mut decode_tokens = vec![];\n\n let mut encode_tokens = vec![];\n\n\n\n let mut errors = vec![];\n\n if let syn::Data::Enum(ref data) = ast.data {\n\n for variant in &data.variants {\n\n let codec_params = parse_fld_meta_as_codec_params(&variant.attrs);\n\n match codec_params {\n\n Err(e) => errors.push(e),\n\n Ok(cp) => {\n\n let key = cp.key.as_ref();\n\n if key.is_none() {\n\n errors.push(syn::Error::new_spanned(\n\n variant,\n\n \"Missing Key for the variant. Please provide `#[asn(key = <int>)]` attribute.\",\n\n ));\n\n continue;\n", "file_path": "codecs_derive/src/aper/open.rs", "rank": 46, "score": 118037.75254731937 }, { "content": "// Decode \"Normally Small\" Length Determinent\n\n//\n\n// This type of \"length\" determinent is used to encode bitmap length in the SEQUENCE extensions,\n\n// TODO: Support for the case when the length is greater than 64. We almost never come across this\n\n// case in practice, so right now it just Errors, if in real life we actually see this error for\n\n// any time it might have to be implemented to take care of that case.\n\nfn decode_normally_small_length_determinent(\n\n data: &mut AperCodecData,\n\n) -> Result<usize, AperCodecError> {\n\n let is_small = data.decode_bool()?;\n\n if !is_small {\n\n Ok(data.decode_bits_as_integer(6, false)? as usize + 1_usize)\n\n } else {\n\n decode_indefinite_length_determinent(data)\n\n }\n\n}\n\n\n", "file_path": "codecs/src/aper/decode/decode_internal.rs", "rank": 47, "score": 118010.79445775063 }, { "content": "fn parse_actual_params(tokens: &[Token]) -> Result<(Vec<ActualParam>, usize), Error> {\n\n let mut consumed = 0;\n\n if !expect_token(&tokens[consumed..], Token::is_curly_begin)? {\n\n return Err(unexpected_token!(\"'{'\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n\n let mut params = vec![];\n\n loop {\n\n if expect_token(&tokens[consumed..], Token::is_curly_begin)? {\n\n consumed += 1;\n\n let param = if expect_one_of_tokens(\n\n &tokens[consumed..],\n\n &[Token::is_numeric, Token::is_identifier],\n\n )? {\n\n let param = tokens[consumed].text.clone();\n\n consumed += 1;\n\n param\n\n } else {\n\n return Err(unexpected_token!(\"'IDENTIFIER'\", tokens[consumed]));\n", "file_path": "asn-compiler/src/parser/asn/types/int.rs", "rank": 48, "score": 116122.35914505542 }, { "content": "fn parse_field_spec(tokens: &[Token]) -> Result<(ObjectClassFieldSpec, usize), Error> {\n\n if expect_token(tokens, Token::is_value_field_reference)? {\n\n parse_fixed_type_value_field_spec(tokens)\n\n } else if expect_token(tokens, Token::is_type_field_reference)? {\n\n parse_type_field_spec(tokens)\n\n } else {\n\n Err(parse_error!(\"Unsupported Field Spec in CLASS Definition\"))\n\n }\n\n}\n\n\n", "file_path": "asn-compiler/src/parser/asn/types/ioc.rs", "rank": 49, "score": 114519.3560504064 }, { "content": "fn get_field_type(field: &syn::Field) -> StructFieldType {\n\n fn field_is_optional(field: &syn::Field) -> bool {\n\n if let syn::Type::Path(ref typepath) = field.ty {\n\n typepath.path.leading_colon.is_none()\n\n && typepath.path.segments.len() == 1\n\n && typepath.path.segments.iter().next().unwrap().ident == \"Option\"\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n let is_optional = field_is_optional(field);\n\n\n\n let ty = if is_optional {\n\n if let syn::Type::Path(ref tp) = field.ty {\n\n let type_params = &tp.path.segments.iter().next().unwrap().arguments;\n\n match type_params {\n\n syn::PathArguments::AngleBracketed(params) => {\n\n let generic_args = params.args.iter().next().unwrap();\n\n if let syn::GenericArgument::Type(syn::Type::Path(tpinner)) = generic_args {\n", "file_path": "codecs_derive/src/aper/seq.rs", "rank": 50, "score": 113184.87296688593 }, { "content": "// Parses values in an Enum. Used for parsing values either in the root or extension.\n\nfn parse_enum_values(tokens: &[Token]) -> Result<(Vec<EnumValue>, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n let mut values = vec![];\n\n loop {\n\n let (named_value, named_value_consumed) = parse_named_maybe_value(&tokens[consumed..])?;\n\n let value = EnumValue {\n\n name: named_value.0,\n\n value: named_value.1,\n\n };\n\n\n\n values.push(value);\n\n consumed += named_value_consumed;\n\n\n\n if expect_token(&tokens[consumed..], Token::is_comma)? {\n\n consumed += 1;\n\n }\n\n\n\n if expect_one_of_tokens(\n\n &tokens[consumed..],\n", "file_path": "asn-compiler/src/parser/asn/types/base/enumerated.rs", "rank": 51, "score": 112819.95366683339 }, { "content": "fn parse_seq_component(tokens: &[Token]) -> Result<(Option<SeqComponent>, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n let (component, component_consumed) = match parse_component(&tokens[consumed..]) {\n\n Ok(result) => (Some(result.0), result.1),\n\n Err(_) => (None, 0),\n\n };\n\n consumed += component_consumed;\n\n\n\n if let Some(component) = component {\n\n let optional = if expect_keyword(&tokens[consumed..], \"OPTIONAL\")? {\n\n consumed += 1;\n\n true\n\n } else {\n\n false\n\n };\n\n\n\n let _default = if expect_keyword(&tokens[consumed..], \"DEFAULT\")? {\n\n consumed += 1;\n\n let (value, value_consumed) = parse_value(&tokens[consumed..])?;\n", "file_path": "asn-compiler/src/parser/asn/types/constructed/seq.rs", "rank": 52, "score": 112814.47257178352 }, { "content": "fn parse_seq_addition_group(tokens: &[Token]) -> Result<(SeqAdditionGroup, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_addition_groups_begin)? {\n\n return Err(unexpected_token!(\"'[['\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n\n let _version = match expect_token(&tokens[consumed..], Token::is_numeric) {\n\n Ok(success) => {\n\n if success {\n\n let version = tokens[consumed].text.clone();\n\n consumed += 1;\n\n if !expect_token(&tokens[consumed..], Token::is_colon)? {\n\n return Err(unexpected_token!(\"'[['\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n Some(version)\n\n } else {\n\n None\n", "file_path": "asn-compiler/src/parser/asn/types/constructed/seq.rs", "rank": 53, "score": 111421.64850853474 }, { "content": "fn parse_choice_addition_group(tokens: &[Token]) -> Result<(ChoiceAdditionGroup, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_addition_groups_begin)? {\n\n return Err(unexpected_token!(\"'[['\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n\n let _version = if expect_token(&tokens[consumed..], Token::is_numeric)? {\n\n let version = tokens[consumed].text.clone();\n\n consumed += 1;\n\n if !expect_token(&tokens[consumed..], Token::is_colon)? {\n\n return Err(unexpected_token!(\"':'\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n Some(version)\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "asn-compiler/src/parser/asn/types/constructed/choice.rs", "rank": 54, "score": 111421.64850853474 }, { "content": "//! ASN.1 Aper Encoder module.\n\n\n\nuse crate::aper::AperCodecData;\n\nuse crate::aper::AperCodecError;\n\nuse bitvec::prelude::*;\n\nmod encode_internal;\n\nuse bitvec::view::AsBits;\n\nuse encode_internal::*;\n\n\n\n/// Encode a Choice Index\n\n///\n\n/// During Encoding a 'CHOICE' Type to help decoding, the 'CHOICE' Index is encoded first, followed\n\n/// by the actual encoding of the 'CHOICE' variant.\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 55, "score": 108193.46193786069 }, { "content": " let length = bit_string.len();\n\n if length >= 16384 {\n\n return Err(AperCodecError::new(\n\n \"Encode of fragmented bitstring not yet implemented\",\n\n ));\n\n }\n\n\n\n encode_length_determinent(data, lb, ub, false, length)?;\n\n if length > 0 {\n\n if length > 16 {\n\n data.align();\n\n }\n\n data.append_bits(bit_string);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 56, "score": 108184.87248026504 }, { "content": " let length = octet_string.len();\n\n if length >= 16384 {\n\n return Err(AperCodecError::new(\n\n \"Encode of fragmented octetstring not yet implemented\",\n\n ));\n\n }\n\n\n\n encode_length_determinent(data, lb, ub, false, length)?;\n\n\n\n if length > 0 {\n\n if length > 2 {\n\n data.align();\n\n }\n\n data.append_bits(octet_string.view_bits());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "codecs/src/aper/encode/mod.rs", "rank": 57, "score": 108183.75039094631 }, { "content": "//! Decode APIs for APER Codec\n\nuse bitvec::prelude::*;\n\n\n\nuse crate::aper::AperCodecData;\n\nuse crate::aper::AperCodecError;\n\n\n\nmod decode_internal;\n\nuse decode_internal::*;\n\n\n\npub use decode_internal::decode_length_determinent;\n\n\n\n/// Decode a Choice Index.\n\n///\n\n/// For an ASN.1 `CHOICE` Type, a CHOICE Index is first decoded. This function is used to `decode`\n\n/// the choice index. Returns the Index in the 'root' or 'additions' and a flag indicated whether\n\n/// the value is from the 'root_extensions' or 'addtions'. The caller would then decide the\n\n/// appropriate `decode` function for the CHOICE variant is called.\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 58, "score": 108177.28940853309 }, { "content": " if length > 0 {\n\n if length > 2 {\n\n let _ = data.decode_align()?;\n\n }\n\n octets.extend(data.get_bytes(length)?);\n\n }\n\n\n\n // Fragmented So get the chunks in multiples of 16384,\n\n if length >= 16384 {\n\n continue;\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n Ok(octets)\n\n}\n\n\n\nmod decode_charstrings;\n\npub use decode_charstrings::*;\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 59, "score": 108168.08544993396 }, { "content": " // 12.1\n\n decode_unconstrained_whole_number(data)?\n\n } else {\n\n // 12.2\n\n match lb {\n\n None =>\n\n // 12.2.4\n\n {\n\n decode_unconstrained_whole_number(data)?\n\n }\n\n Some(lb) => {\n\n match ub {\n\n None =>\n\n // 12.2.3\n\n {\n\n decode_semi_constrained_whole_number(data, lb)?\n\n }\n\n Some(ub) => {\n\n // 12.2.1 and 12.2.2\n\n log::trace!(\"decode_constrained_whole_number: {}, {}\", lb, ub);\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 60, "score": 108157.18248524716 }, { "content": " decode_constrained_whole_number(data, lb, ub)?\n\n }\n\n }\n\n }\n\n }\n\n };\n\n\n\n data.dump();\n\n\n\n Ok((value, extended_value))\n\n}\n\n\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 61, "score": 108155.53604645448 }, { "content": " if length > 0 {\n\n if length > 16 {\n\n let _ = data.decode_align()?;\n\n }\n\n bv.extend(data.get_bitvec(length)?);\n\n }\n\n\n\n // Fragmented So get the chunks in multiples of 16384,\n\n if length >= 16384 {\n\n continue;\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n Ok(bv)\n\n}\n\n\n", "file_path": "codecs/src/aper/decode/mod.rs", "rank": 62, "score": 108154.67683039058 }, { "content": "fn parse_params(tokens: &[Token]) -> Result<(DefinitionParams, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_curly_begin)? {\n\n return Err(unexpected_token!(\"'{'\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n\n let mut params = vec![];\n\n loop {\n\n // Try to parse the Governer: DummyReference if fails, whatever remains is a Type or Class\n\n let (_governer, dummyref) = if expect_tokens(\n\n &tokens[consumed..],\n\n &[\n\n &[\n\n Token::is_type_reference,\n\n Token::is_asn_builtin_type,\n\n Token::is_object_class_reference,\n\n ],\n\n &[Token::is_colon],\n", "file_path": "asn-compiler/src/parser/asn/defs.rs", "rank": 63, "score": 106967.19716720264 }, { "content": "fn generate_seq_field_codec_tokens_using_attrs(\n\n ast: &syn::DeriveInput,\n\n) -> Result<\n\n (\n\n Vec<proc_macro2::TokenStream>,\n\n Vec<proc_macro2::TokenStream>,\n\n Vec<proc_macro2::TokenStream>,\n\n ),\n\n syn::Error,\n\n> {\n\n let mut decode_tokens = vec![];\n\n let mut encode_tokens = vec![];\n\n let mut hdr_encode_tokens = vec![];\n\n\n\n let mut errors: Vec<syn::Error> = vec![];\n\n if let syn::Data::Struct(ref data) = ast.data {\n\n if let syn::Fields::Named(ref fields) = data.fields {\n\n for field in &fields.named {\n\n let codec_params = parse_fld_meta_as_codec_params(&field.attrs);\n\n match codec_params {\n", "file_path": "codecs_derive/src/aper/seq.rs", "rank": 64, "score": 106184.90582598971 }, { "content": "// Parse A `TypeAssignment`, a `ObjectClassAssignement or `ObjectSetAssignment`\n\n//\n\n// All the above assignments start with a lowe-case letter and will have to be parsed into their\n\n// respective 'values'. Returns the corresponding variant of the `Asn1Definition` and the number\n\n// of tokens consumed or error.\n\nfn parse_typeish_definition(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> {\n\n // Try to parse a type_definition\n\n if let Ok(x) = parse_type_assignment(tokens) {\n\n return Ok(x);\n\n }\n\n\n\n if let Ok(x) = parse_class_assignment(tokens) {\n\n return Ok(x);\n\n }\n\n\n\n if let Ok(x) = parse_object_set_assignment(tokens) {\n\n return Ok(x);\n\n }\n\n\n\n Err(parse_error!(\n\n \"Failed to parse a definition at Token: {:#?}\",\n\n tokens[0]\n\n ))\n\n}\n\n\n", "file_path": "asn-compiler/src/parser/asn/defs.rs", "rank": 65, "score": 103706.56766072365 }, { "content": "// Parse `ValueAssignment` and `ObjectAssignment`\n\n//\n\n// All the above assignments start with a lowe-case letter and will have to be parsed into their\n\n// respective 'values'. Returns the corresponding variant of the `Asn1Definition` and the number\n\n// of tokens consumed or error.\n\nfn parse_valueish_definition(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> {\n\n if let Ok(x) = parse_object_assignment(tokens) {\n\n return Ok(x);\n\n }\n\n\n\n if let Ok(x) = parse_value_assignment(tokens) {\n\n return Ok(x);\n\n }\n\n\n\n Err(parse_error!(\n\n \"Failed to parse a definition at Token: {:#?}\",\n\n tokens[0]\n\n ))\n\n}\n\n\n", "file_path": "asn-compiler/src/parser/asn/defs.rs", "rank": 66, "score": 103702.61188393715 }, { "content": "// Parse object Assginemtnt\n\n//\n\n// value Type ::= ValueDefinition -- Value Definition can be anything for now.\n\nfn parse_value_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_value_reference)? {\n\n return Err(unexpected_token!(\"Value Reference\", tokens[consumed]));\n\n }\n\n let id = tokens[consumed].text.clone();\n\n consumed += 1;\n\n\n\n let (typeref, typeref_consumed) = parse_type(&tokens[consumed..])?;\n\n consumed += typeref_consumed;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_assignment)? {\n\n return Err(unexpected_token!(\"::=\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n\n let (value, value_consumed) = parse_value(&tokens[consumed..])?;\n\n consumed += value_consumed;\n\n\n\n Ok((\n\n Asn1Definition {\n\n kind: Asn1AssignmentKind::Value(Asn1ValueAssignment { id, typeref, value }),\n\n params: None,\n\n resolved: false,\n\n },\n\n consumed,\n\n ))\n\n}\n\n\n", "file_path": "asn-compiler/src/parser/asn/defs.rs", "rank": 67, "score": 103698.59823798123 }, { "content": "// Wrapper for Parsing an OID Component\n\n//\n\n// Parses Either Numbered or Named/Numbered OID components\n\nfn parse_oid_component(tokens: &[Token]) -> Result<(OIDComponent, usize), Error> {\n\n let consumed = 0;\n\n\n\n if expect_one_of_tokens(\n\n &tokens[consumed..],\n\n &[Token::is_identifier, Token::is_numeric],\n\n )? {\n\n let first = &tokens[0];\n\n if first.is_identifier() {\n\n parse_named_oid_component(tokens)\n\n } else {\n\n let number = first\n\n .text\n\n .parse::<u32>()\n\n .map_err(|_| invalid_token!(first))?;\n\n Ok((OIDComponent::new(None, number), 1))\n\n }\n\n } else {\n\n Err(unexpected_token!(\n\n \"Expected 'identifier' or 'number'\",\n", "file_path": "asn-compiler/src/parser/asn/oid.rs", "rank": 68, "score": 103693.3896768618 }, { "content": "// Parse An Object CLASS Assignment\n\n//\n\n// CLASS-NAME :== CLASS { .... -- CLASS DEFINITION -- }\n\n// Parameterized Class assignment not supported.\n\nfn parse_class_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> {\n\n let mut consumed = 0;\n\n if !expect_token(&tokens[consumed..], Token::is_object_class_reference)? {\n\n return Err(unexpected_token!(\"CLASS Reference\", tokens[consumed]));\n\n }\n\n let id = tokens[consumed].text.clone();\n\n consumed += 1;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_assignment)? {\n\n return Err(unexpected_token!(\"::=\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n\n if !expect_keyword(&tokens[consumed..], \"CLASS\")? {\n\n return Err(unexpected_token!(\"'CLASS'\", tokens[consumed]));\n\n }\n\n\n\n let (classref, classref_consumed) = parse_class(&tokens[consumed..])?;\n\n consumed += classref_consumed;\n\n\n\n Ok((\n\n Asn1Definition {\n\n kind: Asn1AssignmentKind::Class(Asn1ObjectClassAssignment { id, classref }),\n\n params: None,\n\n resolved: false,\n\n },\n\n consumed,\n\n ))\n\n}\n\n\n", "file_path": "asn-compiler/src/parser/asn/defs.rs", "rank": 69, "score": 103693.3896768618 }, { "content": "// Parse object Assginemtnt\n\n//\n\n// object CLASS ::= { ... -- Object Defined Syntax -- }\n\nfn parse_object_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_tokens(\n\n &tokens[consumed..],\n\n &[\n\n &[Token::is_object_reference],\n\n &[Token::is_object_class_reference],\n\n ],\n\n )? {\n\n return Err(unexpected_token!(\n\n \"'object', 'CLASS' Reference\",\n\n tokens[consumed]\n\n ));\n\n }\n\n let id = tokens[consumed].text.clone();\n\n consumed += 1;\n\n\n\n let class = tokens[consumed].text.clone();\n\n consumed += 1;\n", "file_path": "asn-compiler/src/parser/asn/defs.rs", "rank": 70, "score": 103693.3896768618 }, { "content": "fn parse_module_name(tokens: &[Token]) -> Result<(Asn1ModuleName, usize), Error> {\n\n let mut consumed = 0;\n\n // First Name\n\n\n\n let name = if expect_token(&tokens[consumed..], Token::is_module_reference)? {\n\n tokens[consumed].text.clone()\n\n } else {\n\n return Err(parse_error!(\n\n \"Module Name '{}' is not a valid Module Reference\",\n\n tokens[consumed].text\n\n ));\n\n };\n\n consumed += 1;\n\n\n\n // Now OID\n\n // Optional Object Identifier\n\n let (oid, oid_consumed) = maybe_parse_object_identifer(&tokens[consumed..])?;\n\n consumed += oid_consumed;\n\n\n\n Ok((Asn1ModuleName::new(name, oid), consumed))\n\n}\n\n\n", "file_path": "asn-compiler/src/parser/asn/module.rs", "rank": 71, "score": 100649.5098556365 }, { "content": "// Parses a named OID component\n\n//\n\n// Parses named OID components of the form `iso` or `iso(1)`\n\nfn parse_named_oid_component(tokens: &[Token]) -> Result<(OIDComponent, usize), Error> {\n\n if !expect_token(&tokens, Token::is_value_reference)? {\n\n return Err(unexpected_token!(\"'IDENTIFIER'\", tokens[0]));\n\n }\n\n let name_token = &tokens[0];\n\n let name = &name_token.text;\n\n let (number, consumed) = match expect_tokens(\n\n &tokens[1..],\n\n &[\n\n &[Token::is_round_begin],\n\n &[Token::is_numeric],\n\n &[Token::is_round_end],\n\n ],\n\n ) {\n\n Ok(success) => {\n\n if success {\n\n let number_token = &tokens[2];\n\n let number = number_token\n\n .text\n\n .parse::<u32>()\n", "file_path": "asn-compiler/src/parser/asn/oid.rs", "rank": 72, "score": 100649.5098556365 }, { "content": "// Parse an Object Set Assignment\n\n//\n\n// ObjectSetName CLASS ::= { Objects } -- Where Objects can be an Object/ObjectSet/Reference\n\nfn parse_object_set_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_type_reference)? {\n\n return Err(unexpected_token!(\"'Type Reference'\", tokens[consumed]));\n\n }\n\n let id = tokens[consumed].text.clone();\n\n consumed += 1;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_object_class_reference)? {\n\n return Err(unexpected_token!(\"'CLASS Reference'\", tokens[consumed]));\n\n }\n\n let class = tokens[consumed].text.clone();\n\n consumed += 1;\n\n\n\n if !expect_token(&tokens[consumed..], Token::is_assignment)? {\n\n return Err(unexpected_token!(\"'::='\", tokens[consumed]));\n\n }\n\n consumed += 1;\n\n\n", "file_path": "asn-compiler/src/parser/asn/defs.rs", "rank": 73, "score": 100649.5098556365 }, { "content": "// Get bit string or hex string\n\nfn get_bit_or_hex_string_token(\n\n chars: &[char],\n\n line: usize,\n\n begin: usize,\n\n) -> Result<(Token, usize, usize, usize), Error> {\n\n if chars.len() == 1 {\n\n return Err(Error::TokenizeError(6, line, begin));\n\n }\n\n\n\n let last = chars[1..].iter().position(|&c| c == '\\'');\n\n if last.is_none() {\n\n // No matching '\\'' found till the end of the string. Clearly an error.\n\n return Err(Error::TokenizeError(7, line, begin));\n\n }\n\n let mut consumed = last.unwrap() + 1 + 1;\n\n if consumed == chars.len() {\n\n // Matching'\\'' found, but the string ends, Error.\n\n return Err(Error::TokenizeError(8, line, begin));\n\n }\n\n\n", "file_path": "asn-compiler/src/tokenizer/mod.rs", "rank": 74, "score": 100039.67000121695 }, { "content": "fn maybe_parse_header_tags(tokens: &[Token]) -> Result<(Asn1ModuleTag, usize), Error> {\n\n let mut consumed = 0;\n\n\n\n let tag =\n\n if expect_one_of_keywords(&tokens[consumed..], &[\"EXPLICIT\", \"IMPLICIT\", \"AUTOMATIC\"])? {\n\n let tag: Asn1ModuleTag;\n\n match tokens[consumed].text.as_str() {\n\n \"EXPLICIT\" => tag = Asn1ModuleTag::Explicit,\n\n \"IMPLICIT\" => tag = Asn1ModuleTag::Implicit,\n\n \"AUTOMATIC\" => tag = Asn1ModuleTag::Automatic,\n\n _ => {\n\n // Will never reach\n\n return Err(parse_error!(\"Should Never Reach\"));\n\n }\n\n }\n\n consumed += 1;\n\n if expect_keyword(&tokens[consumed..], \"TAGS\")? {\n\n consumed += 1\n\n } else {\n\n return Err(unexpected_token!(\"TAGS\", tokens[consumed]));\n\n }\n\n tag\n\n } else {\n\n Asn1ModuleTag::Explicit\n\n };\n\n Ok((tag, consumed))\n\n}\n\n\n", "file_path": "asn-compiler/src/parser/asn/module.rs", "rank": 75, "score": 97812.15686235615 }, { "content": "#[proc_macro_derive(AperCodec, attributes(asn))]\n\npub fn derive(input: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n\n\n let codec_params = attrs::parse_ty_meta_as_codec_params(&ast.attrs);\n\n if codec_params.is_err() {\n\n return codec_params.err().unwrap().to_compile_error().into();\n\n }\n\n\n\n let codec_params = codec_params.unwrap();\n\n if codec_params.attr.is_none() {\n\n return syn::Error::new_spanned(ast, \"Missing attribute 'asn' for the struct.\")\n\n .to_compile_error()\n\n .into();\n\n }\n\n\n\n if codec_params.ty.is_none() {\n\n return syn::Error::new_spanned(\n\n codec_params.attr,\n\n \"Missing parameter 'type' for the attribute.\",\n\n )\n\n .to_compile_error()\n\n .into();\n\n }\n\n\n\n aper::generate_codec(&ast, &codec_params)\n\n}\n", "file_path": "codecs_derive/src/lib.rs", "rank": 76, "score": 93892.79430281761 }, { "content": "//! Structures related to ASN.1 Type\n\n\n\npub(crate) mod base;\n\nuse base::{Asn1TypeBitString, Asn1TypeEnumerated, Asn1TypeInteger};\n\n\n\npub(crate) mod constraints;\n\nuse constraints::Asn1Constraint;\n\n\n\npub(crate) mod constructed;\n\nuse constructed::{Asn1TypeChoice, Asn1TypeSequence, Asn1TypeSequenceOf};\n\n\n\npub(crate) mod ioc;\n\n\n\n#[allow(dead_code)]\n\n#[derive(Debug, Clone)]\n\npub(crate) enum Asn1BuiltinType {\n\n Integer(Asn1TypeInteger),\n\n Enumerated(Asn1TypeEnumerated),\n\n BitString(Asn1TypeBitString),\n\n Boolean,\n", "file_path": "asn-compiler/src/parser/asn/structs/types/mod.rs", "rank": 77, "score": 91347.742949215 }, { "content": "use std::collections::BTreeMap;\n\n\n\npub(crate) mod constructed;\n\nuse constructed::ResolvedConstructedType;\n\n\n\npub(crate) mod base;\n\nuse base::ResolvedBaseType;\n\n\n\npub(crate) mod ioc;\n\n\n\npub(crate) mod constraints;\n\n\n\npub(crate) type ResolvedSetTypeMap = BTreeMap<(String, String), (String, Asn1ResolvedType)>;\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct ResolvedSetType {\n\n pub(crate) setref: String,\n\n pub(crate) types: ResolvedSetTypeMap,\n\n}\n\n\n", "file_path": "asn-compiler/src/resolver/asn/structs/types/mod.rs", "rank": 78, "score": 91347.67094041118 }, { "content": " Constructed(Asn1ConstructedType),\n\n}\n\n\n\nimpl Default for Asn1TypeKind {\n\n fn default() -> Self {\n\n Self::Reference(Asn1TypeReference::Reference(\"\".to_string()))\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) enum ActualParam {\n\n Set(String),\n\n Single(String),\n\n}\n\n\n\nimpl ActualParam {\n\n fn dependent_references(&self) -> Vec<String> {\n\n match self {\n\n Self::Set(ref s) => vec![s.clone()],\n\n Self::Single(ref s) => vec![s.clone()],\n", "file_path": "asn-compiler/src/parser/asn/structs/types/mod.rs", "rank": 79, "score": 91340.5423283009 }, { "content": "#[derive(Debug, Clone)]\n\npub(crate) enum Asn1ConstructedType {\n\n Choice(Asn1TypeChoice),\n\n Sequence(Asn1TypeSequence),\n\n SequenceOf(Asn1TypeSequenceOf),\n\n Set,\n\n SetOf,\n\n}\n\n\n\nimpl Asn1ConstructedType {\n\n pub(crate) fn dependent_references(&self) -> Vec<String> {\n\n match self {\n\n Self::Choice(ref c) => c.dependent_references(),\n\n Self::Sequence(ref s) => s.dependent_references(),\n\n Self::SequenceOf(ref so) => so.dependent_references(),\n\n _ => vec![],\n\n }\n\n }\n\n}\n", "file_path": "asn-compiler/src/parser/asn/structs/types/mod.rs", "rank": 80, "score": 91340.277338787 }, { "content": " Null,\n\n OctetString,\n\n ObjectIdentifier,\n\n RelativeOid,\n\n\n\n // Consumes a lot of String Types.\n\n CharacterString { str_type: String },\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct Asn1Type {\n\n pub(crate) kind: Asn1TypeKind,\n\n pub(crate) constraints: Option<Vec<Asn1Constraint>>,\n\n}\n\n\n\nimpl Asn1Type {\n\n pub(crate) fn dependent_references(&self) -> Vec<String> {\n\n let mut kind_references = match self.kind {\n\n Asn1TypeKind::Builtin(ref _b) => vec![],\n\n Asn1TypeKind::Reference(ref r) => r.dependent_references(),\n", "file_path": "asn-compiler/src/parser/asn/structs/types/mod.rs", "rank": 81, "score": 91337.89325093944 }, { "content": " }, // FIXME: For now We can make it a struct\n\n}\n\n\n\nimpl Asn1TypeReference {\n\n pub(crate) fn dependent_references(&self) -> Vec<String> {\n\n match self {\n\n Self::ClassField { classref, .. } => vec![classref.clone()],\n\n Self::Reference(ref r) => vec![r.clone()],\n\n Self::Parameterized { typeref, params } => {\n\n let mut dependent_references = vec![typeref.clone()];\n\n for param in params {\n\n dependent_references.extend(param.dependent_references());\n\n }\n\n dependent_references\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n", "file_path": "asn-compiler/src/parser/asn/structs/types/mod.rs", "rank": 82, "score": 91337.05946175497 }, { "content": " }\n\n }\n\n\n\n pub(crate) fn param_string(&self) -> String {\n\n match self {\n\n Self::Set(ref r) | Self::Single(ref r) => r.clone(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) enum Asn1TypeReference {\n\n Reference(String),\n\n ClassField {\n\n classref: String,\n\n fieldref: String,\n\n },\n\n Parameterized {\n\n typeref: String,\n\n params: Vec<ActualParam>,\n", "file_path": "asn-compiler/src/parser/asn/structs/types/mod.rs", "rank": 83, "score": 91334.72669929282 }, { "content": " Asn1TypeKind::Constructed(ref c) => c.dependent_references(),\n\n };\n\n\n\n let mut constraint_references = vec![];\n\n if self.constraints.is_some() {\n\n for constraint in self.constraints.as_ref().unwrap() {\n\n let constraint_dependent = constraint.dependent_references();\n\n constraint_references.extend(constraint_dependent);\n\n }\n\n }\n\n\n\n kind_references.extend(constraint_references);\n\n kind_references\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) enum Asn1TypeKind {\n\n Builtin(Asn1BuiltinType),\n\n Reference(Asn1TypeReference),\n", "file_path": "asn-compiler/src/parser/asn/structs/types/mod.rs", "rank": 84, "score": 91333.23839316018 }, { "content": "#[derive(Debug, Clone)]\n\npub(crate) enum Asn1ResolvedType {\n\n // One of the resolved Base Types.\n\n Base(ResolvedBaseType),\n\n\n\n // A Constructed Type with fields that are Asn1ResolvedType.\n\n Constructed(ResolvedConstructedType),\n\n\n\n // A reference to a Resolved Type\n\n Reference(String),\n\n\n\n // A Set of Resolved Types. This is true if the type is obtained from Object Sets or Value Sets\n\n Set(ResolvedSetType),\n\n}\n", "file_path": "asn-compiler/src/resolver/asn/structs/types/mod.rs", "rank": 85, "score": 91330.95707440139 }, { "content": "fn main() {\n\n eprintln!(\"Null\");\n\n}\n", "file_path": "codecs_derive/tests/07-null.rs", "rank": 86, "score": 88975.82184584191 }, { "content": "fn main() {\n\n eprintln!(\"Sequence\");\n\n}\n", "file_path": "codecs_derive/tests/08-seq.rs", "rank": 87, "score": 88975.82184584191 }, { "content": "fn main() {\n\n eprintln!(\"BIT STRING\");\n\n}\n", "file_path": "codecs_derive/tests/04-bitstring.rs", "rank": 88, "score": 88975.82184584191 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn tests() {\n\n let t = trybuild::TestCases::new();\n\n t.pass(\"tests/01-choice.rs\");\n\n t.pass(\"tests/02-integer.rs\");\n\n t.pass(\"tests/03-enumerated.rs\");\n\n t.pass(\"tests/04-bitstring.rs\");\n\n t.pass(\"tests/05-octetstring.rs\");\n\n t.pass(\"tests/06-charstring.rs\");\n\n t.pass(\"tests/07-null.rs\");\n\n t.pass(\"tests/08-seq.rs\");\n\n t.pass(\"tests/09-open.rs\");\n\n t.pass(\"tests/10-seqof.rs\");\n\n}\n", "file_path": "codecs_derive/tests/compile.rs", "rank": 89, "score": 88975.82184584191 }, { "content": "fn main() {\n\n eprintln!(\"SequenceOf\");\n\n}\n", "file_path": "codecs_derive/tests/10-seqof.rs", "rank": 90, "score": 88975.82184584191 }, { "content": "fn main() {\n\n eprintln!(\"Choice\");\n\n}\n", "file_path": "codecs_derive/tests/01-choice.rs", "rank": 91, "score": 88975.82184584191 }, { "content": "fn main() {\n\n eprintln!(\"Integer\");\n\n}\n", "file_path": "codecs_derive/tests/02-integer.rs", "rank": 92, "score": 88975.82184584191 }, { "content": "fn main() {\n\n eprintln!(\"Charstring\");\n\n}\n", "file_path": "codecs_derive/tests/06-charstring.rs", "rank": 93, "score": 88975.82184584191 }, { "content": "fn main() {\n\n eprintln!(\"Open\");\n\n}\n", "file_path": "codecs_derive/tests/09-open.rs", "rank": 94, "score": 88975.82184584191 }, { "content": "fn main() {\n\n eprintln!(\"OCTET STRING\");\n\n}\n", "file_path": "codecs_derive/tests/05-octetstring.rs", "rank": 95, "score": 88975.82184584191 }, { "content": "fn main() {\n\n eprintln!(\"Enumerated\");\n\n}\n", "file_path": "codecs_derive/tests/03-enumerated.rs", "rank": 96, "score": 88975.82184584191 }, { "content": "fn resolve_sequence_type(\n\n sequence: &Asn1TypeSequence,\n\n resolver: &mut Resolver,\n\n) -> Result<Asn1ResolvedType, Error> {\n\n let mut components = vec![];\n\n // FIXME: implement for additional_components too\n\n for c in &sequence.root_components {\n\n let ty = match resolve_type(&c.component.ty, resolver) {\n\n Ok(ty) => ty,\n\n Err(_e) => {\n\n return resolve_sequence_classfield_components(sequence, resolver);\n\n }\n\n };\n\n let component = ResolvedComponent {\n\n id: c.component.id.clone(),\n\n ty,\n\n };\n\n let seq_component = ResolvedSeqComponent {\n\n component,\n\n optional: c.optional,\n", "file_path": "asn-compiler/src/resolver/asn/types/constructed.rs", "rank": 97, "score": 85638.39642001931 }, { "content": "fn resolve_reference_type(\n\n ty: &Asn1Type,\n\n resolver: &mut Resolver,\n\n) -> Result<Asn1ResolvedType, Error> {\n\n if let Asn1TypeKind::Reference(ref reference) = ty.kind {\n\n match reference {\n\n Asn1TypeReference::Reference(ref r) => {\n\n let resolved = resolver.resolved_defs.get(r);\n\n match resolved {\n\n Some(res) => match res {\n\n Asn1ResolvedDefinition::Type(..) => {\n\n Ok(Asn1ResolvedType::Reference(r.to_string()))\n\n }\n\n _ => Err(resolve_error!(\n\n \"Expected a Resolved Type, found {:#?}\",\n\n resolved\n\n )),\n\n },\n\n None => Err(resolve_error!(\n\n \"Referenced Type for '{}' Not resolved yet!\",\n", "file_path": "asn-compiler/src/resolver/asn/types/int.rs", "rank": 98, "score": 85638.39642001931 }, { "content": "fn resolve_choice_type(\n\n choice: &Asn1TypeChoice,\n\n resolver: &mut Resolver,\n\n) -> Result<Asn1ResolvedType, Error> {\n\n let mut root_components = vec![];\n\n for c in &choice.root_components {\n\n let ty = resolve_type(&c.ty, resolver)?;\n\n let component = ResolvedComponent {\n\n id: c.id.clone(),\n\n ty,\n\n };\n\n root_components.push(component);\n\n }\n\n\n\n let additions = if choice.additions.is_some() {\n\n let mut components = vec![];\n\n for addition in choice.additions.as_ref().unwrap() {\n\n for c in &addition.components {\n\n let ty = resolve_type(&c.ty, resolver)?;\n\n let component = ResolvedComponent {\n", "file_path": "asn-compiler/src/resolver/asn/types/constructed.rs", "rank": 99, "score": 85638.39642001931 } ]
Rust
sdk/finspacedata/src/json_ser.rs
StevenBlack/aws-sdk-rust
f4a1458f0154318c47d7e6a4ac55226f400fbfbc
pub fn serialize_structure_crate_input_create_changeset_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::CreateChangesetInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_1) = &input.change_type { object.key("changeType").string(var_1.as_str()); } if let Some(var_2) = &input.client_token { object.key("clientToken").string(var_2.as_str()); } if let Some(var_3) = &input.format_params { let mut object_4 = object.key("formatParams").start_object(); for (key_5, value_6) in var_3 { { object_4.key(key_5).string(value_6.as_str()); } } object_4.finish(); } if let Some(var_7) = &input.source_params { let mut object_8 = object.key("sourceParams").start_object(); for (key_9, value_10) in var_7 { { object_8.key(key_9).string(value_10.as_str()); } } object_8.finish(); } Ok(()) } pub fn serialize_structure_crate_input_create_dataset_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::CreateDatasetInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_11) = &input.alias { object.key("alias").string(var_11.as_str()); } if let Some(var_12) = &input.client_token { object.key("clientToken").string(var_12.as_str()); } if let Some(var_13) = &input.dataset_description { object.key("datasetDescription").string(var_13.as_str()); } if let Some(var_14) = &input.dataset_title { object.key("datasetTitle").string(var_14.as_str()); } if let Some(var_15) = &input.kind { object.key("kind").string(var_15.as_str()); } if let Some(var_16) = &input.owner_info { let mut object_17 = object.key("ownerInfo").start_object(); crate::json_ser::serialize_structure_crate_model_dataset_owner_info( &mut object_17, var_16, )?; object_17.finish(); } if let Some(var_18) = &input.permission_group_params { let mut object_19 = object.key("permissionGroupParams").start_object(); crate::json_ser::serialize_structure_crate_model_permission_group_params( &mut object_19, var_18, )?; object_19.finish(); } if let Some(var_20) = &input.schema_definition { let mut object_21 = object.key("schemaDefinition").start_object(); crate::json_ser::serialize_structure_crate_model_schema_union(&mut object_21, var_20)?; object_21.finish(); } Ok(()) } pub fn serialize_structure_crate_input_create_data_view_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::CreateDataViewInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_22) = &input.as_of_timestamp { object.key("asOfTimestamp").number( #[allow(clippy::useless_conversion)] aws_smithy_types::Number::NegInt((*var_22).into()), ); } if input.auto_update { object.key("autoUpdate").boolean(input.auto_update); } if let Some(var_23) = &input.client_token { object.key("clientToken").string(var_23.as_str()); } if let Some(var_24) = &input.destination_type_params { let mut object_25 = object.key("destinationTypeParams").start_object(); crate::json_ser::serialize_structure_crate_model_data_view_destination_type_params( &mut object_25, var_24, )?; object_25.finish(); } if let Some(var_26) = &input.partition_columns { let mut array_27 = object.key("partitionColumns").start_array(); for item_28 in var_26 { { array_27.value().string(item_28.as_str()); } } array_27.finish(); } if let Some(var_29) = &input.sort_columns { let mut array_30 = object.key("sortColumns").start_array(); for item_31 in var_29 { { array_30.value().string(item_31.as_str()); } } array_30.finish(); } Ok(()) } pub fn serialize_structure_crate_input_create_permission_group_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::CreatePermissionGroupInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_32) = &input.application_permissions { let mut array_33 = object.key("applicationPermissions").start_array(); for item_34 in var_32 { { array_33.value().string(item_34.as_str()); } } array_33.finish(); } if let Some(var_35) = &input.client_token { object.key("clientToken").string(var_35.as_str()); } if let Some(var_36) = &input.description { object.key("description").string(var_36.as_str()); } if let Some(var_37) = &input.name { object.key("name").string(var_37.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_create_user_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::CreateUserInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_38) = &input.api_access { object.key("ApiAccess").string(var_38.as_str()); } if let Some(var_39) = &input.api_access_principal_arn { object.key("apiAccessPrincipalArn").string(var_39.as_str()); } if let Some(var_40) = &input.client_token { object.key("clientToken").string(var_40.as_str()); } if let Some(var_41) = &input.email_address { object.key("emailAddress").string(var_41.as_str()); } if let Some(var_42) = &input.first_name { object.key("firstName").string(var_42.as_str()); } if let Some(var_43) = &input.last_name { object.key("lastName").string(var_43.as_str()); } if let Some(var_44) = &input.r#type { object.key("type").string(var_44.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_disable_user_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::DisableUserInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_45) = &input.client_token { object.key("clientToken").string(var_45.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_enable_user_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::EnableUserInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_46) = &input.client_token { object.key("clientToken").string(var_46.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_get_working_location_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::GetWorkingLocationInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_47) = &input.location_type { object.key("locationType").string(var_47.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_reset_user_password_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::ResetUserPasswordInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_48) = &input.client_token { object.key("clientToken").string(var_48.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_update_changeset_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::UpdateChangesetInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_49) = &input.client_token { object.key("clientToken").string(var_49.as_str()); } if let Some(var_50) = &input.format_params { let mut object_51 = object.key("formatParams").start_object(); for (key_52, value_53) in var_50 { { object_51.key(key_52).string(value_53.as_str()); } } object_51.finish(); } if let Some(var_54) = &input.source_params { let mut object_55 = object.key("sourceParams").start_object(); for (key_56, value_57) in var_54 { { object_55.key(key_56).string(value_57.as_str()); } } object_55.finish(); } Ok(()) } pub fn serialize_structure_crate_input_update_dataset_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::UpdateDatasetInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_58) = &input.alias { object.key("alias").string(var_58.as_str()); } if let Some(var_59) = &input.client_token { object.key("clientToken").string(var_59.as_str()); } if let Some(var_60) = &input.dataset_description { object.key("datasetDescription").string(var_60.as_str()); } if let Some(var_61) = &input.dataset_title { object.key("datasetTitle").string(var_61.as_str()); } if let Some(var_62) = &input.kind { object.key("kind").string(var_62.as_str()); } if let Some(var_63) = &input.schema_definition { let mut object_64 = object.key("schemaDefinition").start_object(); crate::json_ser::serialize_structure_crate_model_schema_union(&mut object_64, var_63)?; object_64.finish(); } Ok(()) } pub fn serialize_structure_crate_input_update_permission_group_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::UpdatePermissionGroupInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_65) = &input.application_permissions { let mut array_66 = object.key("applicationPermissions").start_array(); for item_67 in var_65 { { array_66.value().string(item_67.as_str()); } } array_66.finish(); } if let Some(var_68) = &input.client_token { object.key("clientToken").string(var_68.as_str()); } if let Some(var_69) = &input.description { object.key("description").string(var_69.as_str()); } if let Some(var_70) = &input.name { object.key("name").string(var_70.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_update_user_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::UpdateUserInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_71) = &input.api_access { object.key("apiAccess").string(var_71.as_str()); } if let Some(var_72) = &input.api_access_principal_arn { object.key("apiAccessPrincipalArn").string(var_72.as_str()); } if let Some(var_73) = &input.client_token { object.key("clientToken").string(var_73.as_str()); } if let Some(var_74) = &input.first_name { object.key("firstName").string(var_74.as_str()); } if let Some(var_75) = &input.last_name { object.key("lastName").string(var_75.as_str()); } if let Some(var_76) = &input.r#type { object.key("type").string(var_76.as_str()); } Ok(()) } pub fn serialize_structure_crate_model_dataset_owner_info( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::DatasetOwnerInfo, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_77) = &input.name { object.key("name").string(var_77.as_str()); } if let Some(var_78) = &input.phone_number { object.key("phoneNumber").string(var_78.as_str()); } if let Some(var_79) = &input.email { object.key("email").string(var_79.as_str()); } Ok(()) } pub fn serialize_structure_crate_model_permission_group_params( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::PermissionGroupParams, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_80) = &input.permission_group_id { object.key("permissionGroupId").string(var_80.as_str()); } if let Some(var_81) = &input.dataset_permissions { let mut array_82 = object.key("datasetPermissions").start_array(); for item_83 in var_81 { { let mut object_84 = array_82.value().start_object(); crate::json_ser::serialize_structure_crate_model_resource_permission( &mut object_84, item_83, )?; object_84.finish(); } } array_82.finish(); } Ok(()) } pub fn serialize_structure_crate_model_schema_union( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::SchemaUnion, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_85) = &input.tabular_schema_config { let mut object_86 = object.key("tabularSchemaConfig").start_object(); crate::json_ser::serialize_structure_crate_model_schema_definition(&mut object_86, var_85)?; object_86.finish(); } Ok(()) } pub fn serialize_structure_crate_model_data_view_destination_type_params( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::DataViewDestinationTypeParams, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_87) = &input.destination_type { object.key("destinationType").string(var_87.as_str()); } if let Some(var_88) = &input.s3_destination_export_file_format { object .key("s3DestinationExportFileFormat") .string(var_88.as_str()); } if let Some(var_89) = &input.s3_destination_export_file_format_options { let mut object_90 = object .key("s3DestinationExportFileFormatOptions") .start_object(); for (key_91, value_92) in var_89 { { object_90.key(key_91).string(value_92.as_str()); } } object_90.finish(); } Ok(()) } pub fn serialize_structure_crate_model_resource_permission( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::ResourcePermission, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_93) = &input.permission { object.key("permission").string(var_93.as_str()); } Ok(()) } pub fn serialize_structure_crate_model_schema_definition( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::SchemaDefinition, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_94) = &input.columns { let mut array_95 = object.key("columns").start_array(); for item_96 in var_94 { { let mut object_97 = array_95.value().start_object(); crate::json_ser::serialize_structure_crate_model_column_definition( &mut object_97, item_96, )?; object_97.finish(); } } array_95.finish(); } if let Some(var_98) = &input.primary_key_columns { let mut array_99 = object.key("primaryKeyColumns").start_array(); for item_100 in var_98 { { array_99.value().string(item_100.as_str()); } } array_99.finish(); } Ok(()) } pub fn serialize_structure_crate_model_column_definition( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::ColumnDefinition, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_101) = &input.data_type { object.key("dataType").string(var_101.as_str()); } if let Some(var_102) = &input.column_name { object.key("columnName").string(var_102.as_str()); } if let Some(var_103) = &input.column_description { object.key("columnDescription").string(var_103.as_str()); } Ok(()) }
pub fn serialize_structure_crate_input_create_changeset_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::CreateChangesetInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_1) = &input.change_type { object.key("changeType").string(var_1.as_str()); } if let Some(var_2) = &input.client_token { object.key("clientToken").string(var_2.as_str()); } if let Some(var_3) = &input.format_params { let mut object_4 = object.key("formatParams").start_object(); for (key_5, value_6) in var_3 { { object_4.key(key_5).string(value_6.as_str()); } } object_4.finish(); } if let Some(var_7) = &input.source_params { let mut object_8 = object.key("sourceParams").start_object(); for (key_9, value_10) in var_7 { { object_8.key(key_9).string(value_10.as_str()); } } object_8.finish(); } Ok(()) } pub fn serialize_structure_crate_input_create_dataset_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::CreateDatasetInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_11) = &input.alias { object.key("alias").string(var_11.as_str()); } if let Some(var_12) = &input.client_token { object.key("clientToken").string(var_12.as_str()); } if let Some(var_13) = &input.dataset_description { object.key("datasetDescription").string(var_13.as_str()); } if let Some(var_14) = &input.dataset_title { object.key("datasetTitle").string(var_14.as_str()); } if let Some(var_15) = &input.kind { object.key("kind").string(var_15.as_str()); } if let Some(var_16) = &input.owner_info { let mut object_17 = object.key("ownerInfo").start_object(); crate::json_ser::serialize_structure_crate_model_dataset_owner_info( &mut object_17, var_16, )?; object_17.finish(); } if let Some(var_18) = &input.permission_group_params { let mut object_19 = object.key("permissionGroupParams").start_object(); crate::json_ser::serialize_structure_crate_model_permission_group_params( &mut object_19, var_18, )?; object_19.finish(); } if let Some(var_20) = &input.schema_definition { let mut object_21 = object.key("schemaDefinition").start_object(); crate::json_ser::serialize_structure_crate_model_schema_union(&mut object_21, var_20)?; object_21.finish(); } Ok(()) } pub fn serialize_structure_crate_input_create_data_view_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::CreateDataViewInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_22) = &input.as_of_timestamp { object.key("asOfTimestamp").number( #[allow(clippy::useless_conversion)] aws_smithy_types::Number::NegInt((*var_22).into()), ); } if input.auto_update { object.key("autoUpdate").boolean(input.auto_update); } if let Some(var_23) = &input.client_token { object.key("clientToken").string(var_23.as_str()); } if let Some(var_24) = &input.destination_type_params { let mut object_25 = object.key("destinationTypeParams").start_object(); crate::json_ser::serialize_structure_crate_model_data_view_destination_type_params( &mut object_25, var_24, )?; object_25.finish(); } if let Some(var_26) = &input.partition_columns { let mut array_27 = object.key("partitionColumns").start_array(); for item_28 in var_26 { { array_27.value().string(item_28.as_str()); } } array_27.finish(); } if let Some(var_29) = &input.sort_columns { let mut array_30 = object.key("sortColumns").start_array(); for item_31 in var_29 { { array_30.value().string(item_31.as_str()); } } array_30.finish(); } Ok(()) } pub fn serialize_structure_crate_input_create_permission_group_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::CreatePermissionGroupInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_32) = &input.application_permissions { let mut array_33 = object.key("applicationPermissions").start_array(); for item_34 in var_32 { { arr
r()); } Ok(()) } pub fn serialize_structure_crate_input_update_user_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::UpdateUserInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_71) = &input.api_access { object.key("apiAccess").string(var_71.as_str()); } if let Some(var_72) = &input.api_access_principal_arn { object.key("apiAccessPrincipalArn").string(var_72.as_str()); } if let Some(var_73) = &input.client_token { object.key("clientToken").string(var_73.as_str()); } if let Some(var_74) = &input.first_name { object.key("firstName").string(var_74.as_str()); } if let Some(var_75) = &input.last_name { object.key("lastName").string(var_75.as_str()); } if let Some(var_76) = &input.r#type { object.key("type").string(var_76.as_str()); } Ok(()) } pub fn serialize_structure_crate_model_dataset_owner_info( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::DatasetOwnerInfo, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_77) = &input.name { object.key("name").string(var_77.as_str()); } if let Some(var_78) = &input.phone_number { object.key("phoneNumber").string(var_78.as_str()); } if let Some(var_79) = &input.email { object.key("email").string(var_79.as_str()); } Ok(()) } pub fn serialize_structure_crate_model_permission_group_params( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::PermissionGroupParams, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_80) = &input.permission_group_id { object.key("permissionGroupId").string(var_80.as_str()); } if let Some(var_81) = &input.dataset_permissions { let mut array_82 = object.key("datasetPermissions").start_array(); for item_83 in var_81 { { let mut object_84 = array_82.value().start_object(); crate::json_ser::serialize_structure_crate_model_resource_permission( &mut object_84, item_83, )?; object_84.finish(); } } array_82.finish(); } Ok(()) } pub fn serialize_structure_crate_model_schema_union( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::SchemaUnion, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_85) = &input.tabular_schema_config { let mut object_86 = object.key("tabularSchemaConfig").start_object(); crate::json_ser::serialize_structure_crate_model_schema_definition(&mut object_86, var_85)?; object_86.finish(); } Ok(()) } pub fn serialize_structure_crate_model_data_view_destination_type_params( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::DataViewDestinationTypeParams, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_87) = &input.destination_type { object.key("destinationType").string(var_87.as_str()); } if let Some(var_88) = &input.s3_destination_export_file_format { object .key("s3DestinationExportFileFormat") .string(var_88.as_str()); } if let Some(var_89) = &input.s3_destination_export_file_format_options { let mut object_90 = object .key("s3DestinationExportFileFormatOptions") .start_object(); for (key_91, value_92) in var_89 { { object_90.key(key_91).string(value_92.as_str()); } } object_90.finish(); } Ok(()) } pub fn serialize_structure_crate_model_resource_permission( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::ResourcePermission, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_93) = &input.permission { object.key("permission").string(var_93.as_str()); } Ok(()) } pub fn serialize_structure_crate_model_schema_definition( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::SchemaDefinition, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_94) = &input.columns { let mut array_95 = object.key("columns").start_array(); for item_96 in var_94 { { let mut object_97 = array_95.value().start_object(); crate::json_ser::serialize_structure_crate_model_column_definition( &mut object_97, item_96, )?; object_97.finish(); } } array_95.finish(); } if let Some(var_98) = &input.primary_key_columns { let mut array_99 = object.key("primaryKeyColumns").start_array(); for item_100 in var_98 { { array_99.value().string(item_100.as_str()); } } array_99.finish(); } Ok(()) } pub fn serialize_structure_crate_model_column_definition( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::model::ColumnDefinition, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_101) = &input.data_type { object.key("dataType").string(var_101.as_str()); } if let Some(var_102) = &input.column_name { object.key("columnName").string(var_102.as_str()); } if let Some(var_103) = &input.column_description { object.key("columnDescription").string(var_103.as_str()); } Ok(()) }
ay_33.value().string(item_34.as_str()); } } array_33.finish(); } if let Some(var_35) = &input.client_token { object.key("clientToken").string(var_35.as_str()); } if let Some(var_36) = &input.description { object.key("description").string(var_36.as_str()); } if let Some(var_37) = &input.name { object.key("name").string(var_37.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_create_user_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::CreateUserInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_38) = &input.api_access { object.key("ApiAccess").string(var_38.as_str()); } if let Some(var_39) = &input.api_access_principal_arn { object.key("apiAccessPrincipalArn").string(var_39.as_str()); } if let Some(var_40) = &input.client_token { object.key("clientToken").string(var_40.as_str()); } if let Some(var_41) = &input.email_address { object.key("emailAddress").string(var_41.as_str()); } if let Some(var_42) = &input.first_name { object.key("firstName").string(var_42.as_str()); } if let Some(var_43) = &input.last_name { object.key("lastName").string(var_43.as_str()); } if let Some(var_44) = &input.r#type { object.key("type").string(var_44.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_disable_user_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::DisableUserInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_45) = &input.client_token { object.key("clientToken").string(var_45.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_enable_user_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::EnableUserInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_46) = &input.client_token { object.key("clientToken").string(var_46.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_get_working_location_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::GetWorkingLocationInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_47) = &input.location_type { object.key("locationType").string(var_47.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_reset_user_password_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::ResetUserPasswordInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_48) = &input.client_token { object.key("clientToken").string(var_48.as_str()); } Ok(()) } pub fn serialize_structure_crate_input_update_changeset_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::UpdateChangesetInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_49) = &input.client_token { object.key("clientToken").string(var_49.as_str()); } if let Some(var_50) = &input.format_params { let mut object_51 = object.key("formatParams").start_object(); for (key_52, value_53) in var_50 { { object_51.key(key_52).string(value_53.as_str()); } } object_51.finish(); } if let Some(var_54) = &input.source_params { let mut object_55 = object.key("sourceParams").start_object(); for (key_56, value_57) in var_54 { { object_55.key(key_56).string(value_57.as_str()); } } object_55.finish(); } Ok(()) } pub fn serialize_structure_crate_input_update_dataset_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::UpdateDatasetInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_58) = &input.alias { object.key("alias").string(var_58.as_str()); } if let Some(var_59) = &input.client_token { object.key("clientToken").string(var_59.as_str()); } if let Some(var_60) = &input.dataset_description { object.key("datasetDescription").string(var_60.as_str()); } if let Some(var_61) = &input.dataset_title { object.key("datasetTitle").string(var_61.as_str()); } if let Some(var_62) = &input.kind { object.key("kind").string(var_62.as_str()); } if let Some(var_63) = &input.schema_definition { let mut object_64 = object.key("schemaDefinition").start_object(); crate::json_ser::serialize_structure_crate_model_schema_union(&mut object_64, var_63)?; object_64.finish(); } Ok(()) } pub fn serialize_structure_crate_input_update_permission_group_input( object: &mut aws_smithy_json::serialize::JsonObjectWriter, input: &crate::input::UpdatePermissionGroupInput, ) -> Result<(), aws_smithy_http::operation::SerializationError> { if let Some(var_65) = &input.application_permissions { let mut array_66 = object.key("applicationPermissions").start_array(); for item_67 in var_65 { { array_66.value().string(item_67.as_str()); } } array_66.finish(); } if let Some(var_68) = &input.client_token { object.key("clientToken").string(var_68.as_str()); } if let Some(var_69) = &input.description { object.key("description").string(var_69.as_str()); } if let Some(var_70) = &input.name { object.key("name").string(var_70.as_st
random
[]
Rust
src/view/rikai.rs
Netdex/niinii
d2fa91f3c16b1bdc20d7799a79e1d354247cd55e
use std::cell::RefCell; use std::collections::{HashMap, HashSet}; use ichiran::romanize::*; use imgui::*; use super::deepl::DeepLView; use super::mixins::*; use super::settings::{DisplayRubyText, SettingsView}; use crate::backend::renderer::Env; use crate::gloss::Gloss; use crate::translation::Translation; use crate::view::{raw::RawView, term::TermView}; #[derive(Debug)] pub struct RikaiView { gloss: Option<Gloss>, translation: Option<Translation>, show_term_window: RefCell<HashSet<Romanized>>, selected_clause: RefCell<HashMap<Segment, i32>>, } impl RikaiView { pub fn new() -> Self { Self { gloss: None, translation: None, show_term_window: RefCell::new(HashSet::new()), selected_clause: RefCell::new(HashMap::new()), } } pub fn set_gloss(&mut self, gloss: Option<Gloss>) { self.gloss = gloss; } pub fn gloss(&self) -> Option<&Gloss> { self.gloss.as_ref() } pub fn set_translation(&mut self, translation: Option<Translation>) { self.translation = translation; } pub fn translation(&self) -> Option<&Translation> { self.translation.as_ref() } fn term_window( &self, env: &mut Env, ui: &Ui, settings: &SettingsView, romanized: &Romanized, ) -> bool { let mut opened = true; Window::new(&romanized.term().text().to_string()) .size_constraints([300.0, 100.0], [1000.0, 1000.0]) .save_settings(false) .focus_on_appearing(true) .opened(&mut opened) .build(ui, || { if let Some(gloss) = &self.gloss { TermView::new(&gloss.jmdict_data, &gloss.kanji_info, romanized, 0.0) .ui(env, ui, settings); } }); opened } fn term_tooltip(&self, env: &mut Env, ui: &Ui, settings: &SettingsView, romanized: &Romanized) { ui.tooltip(|| { if let Some(gloss) = &self.gloss { TermView::new(&gloss.jmdict_data, &gloss.kanji_info, romanized, 30.0) .ui(env, ui, settings) } }); } fn add_skipped(&self, env: &mut Env, ui: &Ui, settings: &SettingsView, skipped: &str) { draw_kanji_text( ui, env, skipped, false, true, UnderlineMode::None, if settings.display_ruby_text() == DisplayRubyText::None { RubyTextMode::None } else { RubyTextMode::Pad }, ); } fn add_romanized( &self, env: &mut Env, ui: &Ui, settings: &SettingsView, romanized: &Romanized, ruby_text: DisplayRubyText, underline: UnderlineMode, ) -> bool { let term = romanized.term(); let fg_text = match ruby_text { DisplayRubyText::None => RubyTextMode::None, DisplayRubyText::Furigana if term.text() != term.kana() => { RubyTextMode::Text(term.kana()) } DisplayRubyText::Romaji => RubyTextMode::Text(romanized.romaji()), _ => RubyTextMode::Pad, }; let ul_hover = draw_kanji_text( ui, env, term.text(), true, settings.stroke_text, underline, fg_text, ); if ui.is_item_hovered() { ui.set_mouse_cursor(Some(MouseCursor::Hand)); self.term_tooltip(env, ui, settings, romanized); } let mut show_term_window = self.show_term_window.borrow_mut(); if ui.is_item_clicked() { show_term_window.insert(romanized.clone()); } ul_hover } fn add_segment(&self, env: &mut Env, ui: &Ui, settings: &SettingsView, segment: &Segment) { match segment { Segment::Skipped(skipped) => { self.add_skipped(env, ui, settings, skipped); } Segment::Clauses(clauses) => { let mut selected_clause = self.selected_clause.borrow_mut(); let mut clause_idx = selected_clause.get(segment).cloned().unwrap_or(0); let clause = clauses.get(clause_idx as usize); if let Some(clause) = clause { let romanized = clause.romanized(); for (idx, rz) in romanized.iter().enumerate() { let ul_hover = self.add_romanized( env, ui, settings, rz, settings.display_ruby_text(), if idx == romanized.len() - 1 { UnderlineMode::Normal } else { UnderlineMode::Pad }, ); if ul_hover { let scroll = ui.io().mouse_wheel as i32; clause_idx -= scroll; clause_idx = clause_idx.clamp(0, clauses.len() as i32 - 1); if scroll != 0 { selected_clause.insert(segment.clone(), clause_idx); } ui.tooltip(|| { ui.text(format!( "Alternate #{}/{} score={} (scroll to cycle)", clause_idx + 1, clauses.len(), clause.score() )); ui.separator(); let _wrap_token = ui.push_text_wrap_pos_with_pos(ui.current_font_size() * 20.0); let romaji = clause .romanized() .iter() .map(|x| x.romaji()) .collect::<Vec<&str>>() .join(" "); ui.text_wrapped(romaji); _wrap_token.pop(ui); }); } } } } } } fn add_root(&self, env: &mut Env, ui: &Ui, settings: &SettingsView, root: &Root) { for segment in root.segments() { self.add_segment(env, ui, settings, segment); } } pub fn ui(&mut self, env: &mut Env, ui: &Ui, settings: &SettingsView, show_raw: &mut bool) { if let Some(gloss) = &self.gloss { ui.text(""); self.add_root(env, ui, settings, &gloss.root); if *show_raw { Window::new("Raw") .size([300., 110.], Condition::FirstUseEver) .opened(show_raw) .build(ui, || { RawView::new(&gloss.root).ui(env, ui); }); } } if let Some(translation) = &self.translation { ui.separator(); DeepLView::new(translation).ui(ui); ui.separator(); } self.show_term_window .borrow_mut() .retain(|romanized| self.term_window(env, ui, settings, romanized)); } }
use std::cell::RefCell; use std::collections::{HashMap, HashSet}; use ichiran::romanize::*; use imgui::*; use super::deepl::DeepLView; use super::mixins::*; use super::settings::{DisplayRubyText, SettingsView}; use crate::backend::renderer::Env; use crate::gloss::Gloss; use crate::translation::Translation; use crate::view::{raw::RawView, term::TermView}; #[derive(Debug)] pub struct RikaiView { gloss: Option<Gloss>, translation: Option<Translation>, show_term_window: RefCell<HashSet<Romanized>>, selected_clause: RefCell<HashMap<Segment, i32>>, } impl RikaiView { pub fn new() -> Self { Self { gloss: None, translation: None, show_term_window: RefCell::new(HashSet::new()), selected_clause: RefCell::new(HashMap::new()), } } pub fn set_gloss(&mut self, gloss: Option<Gloss>) { self.gloss = gloss; } pub fn gloss(&self) -> Option<&Gloss> { self.gloss.as_ref() } pub fn set_translation(&mut self, translation: Option<Translation>) { self.translation = translation; } pub fn translation(&self) -> Option<&Translation> {
text(""); self.add_root(env, ui, settings, &gloss.root); if *show_raw { Window::new("Raw") .size([300., 110.], Condition::FirstUseEver) .opened(show_raw) .build(ui, || { RawView::new(&gloss.root).ui(env, ui); }); } } if let Some(translation) = &self.translation { ui.separator(); DeepLView::new(translation).ui(ui); ui.separator(); } self.show_term_window .borrow_mut() .retain(|romanized| self.term_window(env, ui, settings, romanized)); } }
self.translation.as_ref() } fn term_window( &self, env: &mut Env, ui: &Ui, settings: &SettingsView, romanized: &Romanized, ) -> bool { let mut opened = true; Window::new(&romanized.term().text().to_string()) .size_constraints([300.0, 100.0], [1000.0, 1000.0]) .save_settings(false) .focus_on_appearing(true) .opened(&mut opened) .build(ui, || { if let Some(gloss) = &self.gloss { TermView::new(&gloss.jmdict_data, &gloss.kanji_info, romanized, 0.0) .ui(env, ui, settings); } }); opened } fn term_tooltip(&self, env: &mut Env, ui: &Ui, settings: &SettingsView, romanized: &Romanized) { ui.tooltip(|| { if let Some(gloss) = &self.gloss { TermView::new(&gloss.jmdict_data, &gloss.kanji_info, romanized, 30.0) .ui(env, ui, settings) } }); } fn add_skipped(&self, env: &mut Env, ui: &Ui, settings: &SettingsView, skipped: &str) { draw_kanji_text( ui, env, skipped, false, true, UnderlineMode::None, if settings.display_ruby_text() == DisplayRubyText::None { RubyTextMode::None } else { RubyTextMode::Pad }, ); } fn add_romanized( &self, env: &mut Env, ui: &Ui, settings: &SettingsView, romanized: &Romanized, ruby_text: DisplayRubyText, underline: UnderlineMode, ) -> bool { let term = romanized.term(); let fg_text = match ruby_text { DisplayRubyText::None => RubyTextMode::None, DisplayRubyText::Furigana if term.text() != term.kana() => { RubyTextMode::Text(term.kana()) } DisplayRubyText::Romaji => RubyTextMode::Text(romanized.romaji()), _ => RubyTextMode::Pad, }; let ul_hover = draw_kanji_text( ui, env, term.text(), true, settings.stroke_text, underline, fg_text, ); if ui.is_item_hovered() { ui.set_mouse_cursor(Some(MouseCursor::Hand)); self.term_tooltip(env, ui, settings, romanized); } let mut show_term_window = self.show_term_window.borrow_mut(); if ui.is_item_clicked() { show_term_window.insert(romanized.clone()); } ul_hover } fn add_segment(&self, env: &mut Env, ui: &Ui, settings: &SettingsView, segment: &Segment) { match segment { Segment::Skipped(skipped) => { self.add_skipped(env, ui, settings, skipped); } Segment::Clauses(clauses) => { let mut selected_clause = self.selected_clause.borrow_mut(); let mut clause_idx = selected_clause.get(segment).cloned().unwrap_or(0); let clause = clauses.get(clause_idx as usize); if let Some(clause) = clause { let romanized = clause.romanized(); for (idx, rz) in romanized.iter().enumerate() { let ul_hover = self.add_romanized( env, ui, settings, rz, settings.display_ruby_text(), if idx == romanized.len() - 1 { UnderlineMode::Normal } else { UnderlineMode::Pad }, ); if ul_hover { let scroll = ui.io().mouse_wheel as i32; clause_idx -= scroll; clause_idx = clause_idx.clamp(0, clauses.len() as i32 - 1); if scroll != 0 { selected_clause.insert(segment.clone(), clause_idx); } ui.tooltip(|| { ui.text(format!( "Alternate #{}/{} score={} (scroll to cycle)", clause_idx + 1, clauses.len(), clause.score() )); ui.separator(); let _wrap_token = ui.push_text_wrap_pos_with_pos(ui.current_font_size() * 20.0); let romaji = clause .romanized() .iter() .map(|x| x.romaji()) .collect::<Vec<&str>>() .join(" "); ui.text_wrapped(romaji); _wrap_token.pop(ui); }); } } } } } } fn add_root(&self, env: &mut Env, ui: &Ui, settings: &SettingsView, root: &Root) { for segment in root.segments() { self.add_segment(env, ui, settings, segment); } } pub fn ui(&mut self, env: &mut Env, ui: &Ui, settings: &SettingsView, show_raw: &mut bool) { if let Some(gloss) = &self.gloss { ui.
random
[ { "content": "fn add_gloss(ui: &Ui, gloss: &Gloss) {\n\n TreeNode::new(&format!(\"Gloss ({} {})\", gloss.pos(), gloss.gloss()))\n\n .default_open(false)\n\n .build(ui, || {\n\n wrap_bullet(ui, &format!(\"pos: {}\", gloss.pos()));\n\n wrap_bullet(ui, &format!(\"gloss: {}\", gloss.gloss()));\n\n if let Some(info) = gloss.info() {\n\n wrap_bullet(ui, &format!(\"info: {}\", info));\n\n }\n\n });\n\n}\n", "file_path": "src/view/raw.rs", "rank": 0, "score": 95140.17271370013 }, { "content": "pub fn translate(deepl_api_key: &str, text: &str) -> Result<Translation, deepl_api::Error> {\n\n let text = filter_text(text);\n\n let deepl = DeepL::new(deepl_api_key.to_string());\n\n let deepl_text = deepl\n\n .translate(\n\n None,\n\n TranslatableTextList {\n\n source_language: Some(\"JA\".into()),\n\n target_language: \"EN-US\".into(),\n\n texts: vec![text.to_string()],\n\n },\n\n )?\n\n .first()\n\n .unwrap()\n\n .text\n\n .clone();\n\n let deepl_usage = deepl.usage_information()?;\n\n Ok(Translation::DeepL {\n\n deepl_text,\n\n deepl_usage,\n\n })\n\n}\n", "file_path": "src/translation.rs", "rank": 1, "score": 89752.36829543547 }, { "content": "struct Shared {\n\n ichiran: Ichiran,\n\n _pg_daemon: Option<PostgresDaemon>,\n\n}\n\nimpl Glossator {\n\n pub fn new(settings: &SettingsView) -> Self {\n\n let ichiran = Ichiran::new(settings.ichiran_path.clone());\n\n let pg_daemon = match ichiran.conn_params() {\n\n Ok(conn_params) => {\n\n let pg_daemon = PostgresDaemon::new(\n\n &settings.postgres_path,\n\n &settings.db_path,\n\n conn_params,\n\n false,\n\n );\n\n Some(pg_daemon)\n\n }\n\n Err(_) => {\n\n log::warn!(\"could not get db conn params from ichiran\");\n\n None\n", "file_path": "src/gloss.rs", "rank": 2, "score": 85041.5906173569 }, { "content": "pub fn draw_kanji_text(\n\n ui: &Ui,\n\n env: &Env,\n\n text: &str,\n\n highlight: bool,\n\n stroke: bool,\n\n underline: UnderlineMode,\n\n ruby_text: RubyTextMode,\n\n) -> bool {\n\n let ruby_sz = match ruby_text {\n\n RubyTextMode::Text(text) => ui.calc_text_size(text),\n\n RubyTextMode::Pad => [0.0, ui.text_line_height()],\n\n RubyTextMode::None => [0.0, 0.0],\n\n };\n\n\n\n let _kanji_font_token = ui.push_font(env.get_font(TextStyle::Kanji));\n\n let kanji_sz = ui.calc_text_size(text);\n\n drop(_kanji_font_token);\n\n\n\n let vpad = match ruby_text {\n", "file_path": "src/view/mixins.rs", "rank": 3, "score": 84281.00024073922 }, { "content": "pub fn init() -> Option<ClipboardSupport> {\n\n ClipboardContext::new().ok().map(ClipboardSupport)\n\n}\n\n\n\nimpl ClipboardBackend for ClipboardSupport {\n\n fn get(&mut self) -> Option<String> {\n\n self.0.get_contents().ok()\n\n }\n\n fn set(&mut self, text: &str) {\n\n let _ = self.0.set_contents(text.to_owned());\n\n }\n\n}\n", "file_path": "src/clipboard.rs", "rank": 4, "score": 78390.10122770324 }, { "content": "pub fn setup() -> Option<JobObject> {\n\n unsafe { imp::setup() }\n\n}\n\n\n\n#[cfg(unix)]\n\nmod imp {\n\n use libc;\n\n use std::env;\n\n\n\n pub type Setup = ();\n\n\n\n pub unsafe fn setup() -> Option<()> {\n\n // There's a test case for the behavior of\n\n // when-cargo-is-killed-subprocesses-are-also-killed, but that requires\n\n // one cargo spawned to become its own session leader, so we do that\n\n // here.\n\n if env::var(\"__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE\").is_ok() {\n\n libc::setsid();\n\n }\n\n Some(())\n", "file_path": "third-party/ichiran/src/job.rs", "rank": 5, "score": 72713.4840223174 }, { "content": "pub fn is_katakana(c: &char) -> bool {\n\n *c >= '\\u{30a0}' && *c <= '\\u{30ff}'\n\n}\n\n\n", "file_path": "third-party/ichiran/src/charset.rs", "rank": 6, "score": 72216.97541154946 }, { "content": "pub fn is_hiragana(c: &char) -> bool {\n\n *c >= '\\u{3041}' && *c <= '\\u{3096}'\n\n}\n\n\n", "file_path": "third-party/ichiran/src/charset.rs", "rank": 7, "score": 72216.97541154946 }, { "content": "pub fn is_japanese(c: &char) -> bool {\n\n is_hiragana(c)\n\n || is_katakana(c)\n\n || is_kanji(c)\n\n || is_radical(c)\n\n || is_half_katakana(c)\n\n || is_jp_symbol(c)\n\n || is_jp_misc(c)\n\n || is_full_alphanum(c)\n\n}\n", "file_path": "third-party/ichiran/src/charset.rs", "rank": 8, "score": 72216.97541154946 }, { "content": "pub fn is_radical(c: &char) -> bool {\n\n *c >= '\\u{2e80}' && *c <= '\\u{2fd5}'\n\n}\n\n\n", "file_path": "third-party/ichiran/src/charset.rs", "rank": 9, "score": 72216.97541154946 }, { "content": "pub fn is_kanji(c: &char) -> bool {\n\n (*c >= '\\u{3400}' && *c <= '\\u{4db5}')\n\n || (*c >= '\\u{4e00}' && *c <= '\\u{9fcb}')\n\n || (*c >= '\\u{f900}' && *c <= '\\u{fa6a}')\n\n}\n\n\n", "file_path": "third-party/ichiran/src/charset.rs", "rank": 10, "score": 72216.97541154946 }, { "content": "pub fn ichiran() -> (Ichiran, PostgresDaemon) {\n\n let ichiran_path =\n\n PathBuf::from(\"../../compat/ichiran-cli\").with_extension(std::env::consts::EXE_EXTENSION);\n\n let ichiran = Ichiran::new(ichiran_path);\n\n let conn_params = ichiran.conn_params().unwrap();\n\n let pgdaemon = PostgresDaemon::new(\n\n \"../../compat/pgsql/bin\",\n\n \"../../compat/pgsql/data\",\n\n conn_params,\n\n true,\n\n );\n\n (ichiran, pgdaemon)\n\n}\n", "file_path": "third-party/ichiran/src/fixture/mod.rs", "rank": 11, "score": 71095.72317284715 }, { "content": "pub fn is_jp_misc(c: &char) -> bool {\n\n (*c >= '\\u{31f0}' && *c <= '\\u{31ff}')\n\n || (*c >= '\\u{3220}' && *c <= '\\u{3243}')\n\n || (*c >= '\\u{3280}' && *c <= '\\u{337f}')\n\n}\n\n\n", "file_path": "third-party/ichiran/src/charset.rs", "rank": 12, "score": 70472.72156544967 }, { "content": "pub fn is_full_alphanum(c: &char) -> bool {\n\n *c >= '\\u{ff01}' && *c <= '\\u{ff5e}'\n\n}\n\n\n", "file_path": "third-party/ichiran/src/charset.rs", "rank": 13, "score": 70472.72156544967 }, { "content": "pub fn is_half_katakana(c: &char) -> bool {\n\n *c >= '\\u{ff5f}' && *c <= '\\u{ff9f}'\n\n}\n\n\n", "file_path": "third-party/ichiran/src/charset.rs", "rank": 14, "score": 70472.72156544967 }, { "content": "pub fn is_jp_symbol(c: &char) -> bool {\n\n *c >= '\\u{3000}' && *c <= '\\u{303f}'\n\n}\n\n\n", "file_path": "third-party/ichiran/src/charset.rs", "rank": 15, "score": 70472.72156544967 }, { "content": "pub fn help_marker(ui: &Ui, text: &str) {\n\n ui.text_colored(ui.style_color(StyleColor::TextDisabled), \"[?]\");\n\n if ui.is_item_hovered() {\n\n ui.tooltip_text(text);\n\n }\n\n}\n\n\n\npub enum UnderlineMode {\n\n Normal,\n\n Pad,\n\n None,\n\n}\n\n\n\npub enum RubyTextMode<'a> {\n\n Text(&'a str),\n\n Pad,\n\n None,\n\n}\n\n\n", "file_path": "src/view/mixins.rs", "rank": 16, "score": 68562.95803909507 }, { "content": "fn main() {\n\n let out_dir = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n\n\n let bindings = bindgen::Builder::default()\n\n .header(\"src/wrapper.h\")\n\n .header(\"imgui/backends/imgui_impl_win32.h\")\n\n .clang_args([\"-I\", \"imgui\"])\n\n .clang_args([\"-x\", \"c++\"])\n\n .whitelist_function(\"ImGui_ImplWin32_.*\")\n\n .blacklist_type(\"LPARAM\") // ugh\n\n .blacklist_type(\"LRESULT\")\n\n .blacklist_type(\"UINT\")\n\n .blacklist_type(\"WPARAM\")\n\n .blacklist_type(\"HWND\")\n\n .parse_callbacks(Box::new(bindgen::CargoCallbacks))\n\n .generate()\n\n .expect(\"unable to generate bindings\");\n\n bindings\n\n .write_to_file(out_dir.join(\"bindings.rs\"))\n\n .expect(\"could not write bindings\");\n\n\n\n cc::Build::new()\n\n .include(\"imgui\")\n\n .file(\"imgui/backends/imgui_impl_win32.cpp\")\n\n .compile(\"imgui-win32-sys\");\n\n}\n", "file_path": "third-party/imgui-win32-sys/build.rs", "rank": 17, "score": 67319.27084741686 }, { "content": "pub fn wrap_line(ui: &Ui, expected_width: f32) -> bool {\n\n let visible_x = ui.window_pos()[0] + ui.window_content_region_max()[0];\n\n let last_x = ui.item_rect_max()[0];\n\n let style = ui.clone_style();\n\n let next_x = last_x + style.item_spacing[0] + expected_width;\n\n if next_x < visible_x {\n\n ui.same_line();\n\n false\n\n } else {\n\n true\n\n }\n\n}\n", "file_path": "src/view/mixins.rs", "rank": 18, "score": 63793.58935183465 }, { "content": "fn filter_text(text: &str) -> &str {\n\n let pattern: &[_] = &['\"', '[', ']', '«', '»', ' '];\n\n text.trim_matches(pattern)\n\n}\n\n\n", "file_path": "src/translation.rs", "rank": 19, "score": 63001.124504016494 }, { "content": "pub fn lisp_escape_string<T: AsRef<str>>(text: T) -> String {\n\n let text = text.as_ref();\n\n let mut output = String::with_capacity(text.len());\n\n for c in text.chars() {\n\n match c {\n\n '\"' => {\n\n output += r#\"\\\"\"#;\n\n }\n\n '\\\\' => {\n\n output += r#\"\\\\\"#;\n\n }\n\n x => output.push(x),\n\n }\n\n }\n\n output\n\n}\n", "file_path": "third-party/ichiran/src/lisp.rs", "rank": 20, "score": 58416.60703854774 }, { "content": "pub fn lisp_interpret<T>(expr: &str) -> Result<T, IchiranError>\n\nwhere\n\n T: FromValue,\n\n{\n\n let interp = ketos::Interpreter::new();\n\n let result = interp.run_single_expr(expr, None)?;\n\n Ok(T::from_value(result).map_err(ketos::Error::ExecError)?)\n\n}\n\n\n", "file_path": "third-party/ichiran/src/lisp.rs", "rank": 21, "score": 58416.60703854774 }, { "content": "struct Inner {\n\n window: winit::window::Window,\n\n platform: WinitPlatform,\n\n imgui: imgui::Context,\n\n env: Env,\n\n renderer: imgui_dx11_renderer::Renderer,\n\n context: ComPtr<ID3D11DeviceContext>,\n\n main_rtv: ComPtr<ID3D11RenderTargetView>,\n\n swapchain: ComPtr<IDXGISwapChain>,\n\n device: ComPtr<ID3D11Device>,\n\n mousellhook: Option<HHOOK>,\n\n}\n\nimpl D3D11Renderer {\n\n pub fn new(settings: &SettingsView) -> Self {\n\n let event_loop = EventLoop::new();\n\n let window = Self::create_window_builder(settings)\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n let hwnd = if let RawWindowHandle::Win32(handle) = window.raw_window_handle() {\n", "file_path": "src/backend/d3d11.rs", "rank": 22, "score": 52615.86405146819 }, { "content": "struct Shared {\n\n inner: RefCell<Inner>,\n\n event_loop: Cell<Option<EventLoop<()>>>,\n\n}\n", "file_path": "src/backend/d3d11.rs", "rank": 23, "score": 52615.86405146819 }, { "content": "struct State {\n\n kanji_cache: LruCache<char, Kanji>,\n\n jmdict: Option<JmDictData>,\n\n}\n\n\n\nimpl Ichiran {\n\n pub fn new<P: Into<PathBuf>>(path: P) -> Self {\n\n Self {\n\n shared: Arc::new(Shared {\n\n path: path.into(),\n\n state: Mutex::new(State {\n\n kanji_cache: LruCache::new(512),\n\n jmdict: None,\n\n }),\n\n }),\n\n }\n\n }\n\n\n\n pub fn romanize<T: AsRef<str>>(&self, text: T, limit: u32) -> Result<Root, IchiranError> {\n\n assert!(limit > 0);\n", "file_path": "third-party/ichiran/src/lib.rs", "rank": 24, "score": 50007.723985272736 }, { "content": "struct Shared {\n\n path: PathBuf,\n\n state: Mutex<State>,\n\n}\n", "file_path": "third-party/ichiran/src/lib.rs", "rank": 25, "score": 50007.723985272736 }, { "content": "fn main() {\n\n if cfg!(target_os = \"windows\") {\n\n let mut res = winres::WindowsResource::new();\n\n res.set_icon(\"res/niinii.ico\");\n\n res.compile().unwrap();\n\n }\n\n}\n", "file_path": "build.rs", "rank": 26, "score": 48302.347414979056 }, { "content": "pub trait Renderer {\n\n fn main_loop(&mut self, app: &mut App);\n\n\n\n fn create_window_builder(settings: &SettingsView) -> window::WindowBuilder\n\n where\n\n Self: Sized,\n\n {\n\n let on_top = settings.on_top || settings.overlay_mode;\n\n let maximized = settings.overlay_mode;\n\n let decorations = !settings.overlay_mode;\n\n\n\n window::WindowBuilder::new()\n\n .with_title(\"niinii\")\n\n .with_inner_size(glutin::dpi::LogicalSize::new(768, 768))\n\n .with_transparent(true)\n\n .with_drag_and_drop(false)\n\n .with_maximized(maximized)\n\n .with_decorations(decorations)\n\n .with_always_on_top(on_top)\n\n }\n", "file_path": "src/backend/renderer.rs", "rank": 27, "score": 46784.34716964339 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let settings: SettingsView = File::open(STATE_PATH)\n\n .ok()\n\n .map(BufReader::new)\n\n .and_then(|x| serde_json::from_reader(x).ok())\n\n .unwrap_or_default();\n\n\n\n let mut app = App::new(settings);\n\n let mut renderer: Box<dyn Renderer> = match app.settings().active_renderer() {\n\n SupportedRenderer::Glow => Box::new(GlowRenderer::new(app.settings())),\n\n SupportedRenderer::Direct3D11 => Box::new(D3D11Renderer::new(app.settings())),\n\n };\n\n renderer.main_loop(&mut app);\n\n\n\n let writer = BufWriter::new(File::create(STATE_PATH).unwrap());\n\n serde_json::to_writer(writer, &app.settings()).unwrap();\n\n}\n", "file_path": "src/main.rs", "rank": 28, "score": 46687.91919969416 }, { "content": "extern IMGUI_IMPL_API LRESULT ImGui_ImplWin32_WndProcHandler(HWND hWnd,\n\n UINT msg,\n\n WPARAM wParam,\n", "file_path": "third-party/imgui-win32-sys/src/wrapper.h", "rank": 29, "score": 37278.34786287907 }, { "content": "fn id<T>(x: &T) -> *const c_void {\n\n x as *const _ as *const _\n\n}\n", "file_path": "src/view/mod.rs", "rank": 30, "score": 34421.328353801284 }, { "content": "fn add_word(ui: &Ui, word: &Word) {\n\n let meta = word.meta();\n\n TreeNode::new(&format!(\"Word ({})\", meta.text()))\n\n .default_open(true)\n\n .build(ui, || match word {\n\n Word::Plain(plain) => {\n\n add_plain(ui, plain);\n\n }\n\n Word::Compound(compound) => {\n\n add_compound(ui, compound);\n\n }\n\n });\n\n}\n", "file_path": "src/view/raw.rs", "rank": 31, "score": 34326.93282837708 }, { "content": "fn add_plain(ui: &Ui, plain: &Plain) {\n\n let meta = plain.meta();\n\n TreeNode::new(&format!(\"Plain ({})\", meta.reading()))\n\n .default_open(true)\n\n .build(ui, || {\n\n add_meta(ui, meta);\n\n if let Some(seq) = plain.seq() {\n\n wrap_bullet(ui, &format!(\"seq: {}\", seq));\n\n }\n\n if let Some(suffix) = plain.suffix() {\n\n wrap_bullet(ui, &format!(\"suffix: {}\", suffix));\n\n }\n\n if let Some(counter) = plain.counter() {\n\n add_counter(ui, counter);\n\n }\n\n TreeNode::new(&format!(\"Glosses ({})\", plain.gloss().len()))\n\n .default_open(false)\n\n .build(ui, || {\n\n for gloss in plain.gloss() {\n\n let _id_token = ui.push_id(id(gloss));\n", "file_path": "src/view/raw.rs", "rank": 32, "score": 34326.93282837708 }, { "content": "fn add_clause(ui: &Ui, clause: &Clause) {\n\n TreeNode::new(&format!(\"Clause (score: {})\", clause.score()))\n\n .default_open(true)\n\n .build(ui, || {\n\n for romanized in clause.romanized() {\n\n let _id_token = ui.push_id(id(romanized));\n\n TreeNode::new(&format!(\"Romanized ({})\", romanized.romaji()))\n\n .default_open(true)\n\n .build(ui, || {\n\n add_term(ui, romanized.term());\n\n });\n\n }\n\n });\n\n}\n", "file_path": "src/view/raw.rs", "rank": 33, "score": 34326.93282837708 }, { "content": "fn add_prop(ui: &Ui, prop: &Property) {\n\n wrap_bullet(\n\n ui,\n\n &format!(\n\n \"Property ([{}] {} {} {})\",\n\n prop.pos(),\n\n prop.kind(),\n\n if prop.fml() { \"fml\" } else { \"non-fml\" },\n\n if prop.neg() { \"neg\" } else { \"non-neg\" }\n\n ),\n\n );\n\n}\n", "file_path": "src/view/raw.rs", "rank": 34, "score": 34326.93282837708 }, { "content": "fn add_conj(ui: &Ui, conj: &Conjugation) {\n\n TreeNode::new(&\"Conjugation\".to_string())\n\n .default_open(true)\n\n .build(ui, || {\n\n if let Some(reading) = conj.reading() {\n\n wrap_bullet(ui, &format!(\"reading: {}\", reading));\n\n }\n\n TreeNode::new(&format!(\"Properties ({})\", conj.prop().len()))\n\n .default_open(false)\n\n .build(ui, || {\n\n for prop in conj.prop() {\n\n let _id_token = ui.push_id(id(prop));\n\n add_prop(ui, prop);\n\n }\n\n });\n\n TreeNode::new(&format!(\"Glosses ({})\", conj.gloss().len()))\n\n .default_open(false)\n\n .build(ui, || {\n\n for gloss in conj.gloss() {\n\n let _id_token = ui.push_id(id(gloss));\n", "file_path": "src/view/raw.rs", "rank": 35, "score": 34326.93282837708 }, { "content": "fn add_meta(ui: &Ui, meta: &Meta) {\n\n wrap_bullet(ui, &format!(\"reading: {}\", meta.reading()));\n\n wrap_bullet(ui, &format!(\"text: {}\", meta.text()));\n\n wrap_bullet(ui, &format!(\"kana: {}\", meta.kana()));\n\n wrap_bullet(ui, &format!(\"score: {}\", meta.score()));\n\n}\n", "file_path": "src/view/raw.rs", "rank": 36, "score": 34326.93282837708 }, { "content": "fn add_compound(ui: &Ui, compound: &Compound) {\n\n let meta = compound.meta();\n\n TreeNode::new(&format!(\"Compound ({})\", compound.compound().join(\" + \")))\n\n .default_open(true)\n\n .build(ui, || {\n\n add_meta(ui, meta);\n\n TreeNode::new(&format!(\"Components ({})\", compound.components().len()))\n\n .default_open(false)\n\n .build(ui, || {\n\n for component in compound.components() {\n\n let _id_token = ui.push_id(id(component));\n\n add_term(ui, component);\n\n }\n\n });\n\n });\n\n}\n", "file_path": "src/view/raw.rs", "rank": 37, "score": 34326.93282837708 }, { "content": "fn add_segment(ui: &Ui, segment: &Segment) {\n\n match segment {\n\n Segment::Skipped(_) => wrap_bullet(ui, &format!(\"{:?}\", segment)),\n\n Segment::Clauses(clauses) => {\n\n TreeNode::new(\"Segment\").default_open(true).build(ui, || {\n\n for clause in clauses {\n\n let _id_token = ui.push_id(id(clause));\n\n add_clause(ui, clause);\n\n }\n\n });\n\n }\n\n }\n\n}\n", "file_path": "src/view/raw.rs", "rank": 38, "score": 34326.93282837708 }, { "content": "fn add_term(ui: &Ui, term: &Term) {\n\n match term {\n\n Term::Word(word) => {\n\n add_word(ui, word);\n\n }\n\n Term::Alternative(alts) => {\n\n TreeNode::new(\"Alternative\")\n\n .default_open(true)\n\n .build(ui, || {\n\n for alt in alts.alts() {\n\n let _id_token = ui.push_id(id(alt));\n\n add_word(ui, alt);\n\n }\n\n });\n\n }\n\n }\n\n}\n", "file_path": "src/view/raw.rs", "rank": 39, "score": 34326.93282837708 }, { "content": "fn glow_context(window: &Window) -> glow::Context {\n\n unsafe { glow::Context::from_loader_function(|s| window.get_proc_address(s).cast()) }\n\n}\n", "file_path": "src/backend/glow.rs", "rank": 40, "score": 34326.93282837708 }, { "content": "fn wrap_bullet(ui: &Ui, text: &str) {\n\n ui.bullet();\n\n ui.text_wrapped(text);\n\n}\n\n\n\npub struct RawView<'a> {\n\n root: &'a Root,\n\n}\n\nimpl<'a> RawView<'a> {\n\n pub fn new(root: &'a Root) -> Self {\n\n Self { root }\n\n }\n\n pub fn ui(&mut self, _env: &mut Env, ui: &Ui) {\n\n let _wrap_token = ui.push_text_wrap_pos();\n\n add_root(ui, self.root);\n\n }\n\n}\n\n\n", "file_path": "src/view/raw.rs", "rank": 41, "score": 34326.93282837708 }, { "content": "fn add_root(ui: &Ui, root: &Root) {\n\n TreeNode::new(\"Root\").default_open(true).build(ui, || {\n\n for segment in root.segments() {\n\n let _id_token = ui.push_id(id(segment));\n\n add_segment(ui, segment);\n\n }\n\n });\n\n}\n", "file_path": "src/view/raw.rs", "rank": 42, "score": 34326.93282837708 }, { "content": "fn add_counter(ui: &Ui, counter: &Counter) {\n\n wrap_bullet(\n\n ui,\n\n &format!(\n\n \"Counter ({} {})\",\n\n counter.value(),\n\n if counter.ordinal() {\n\n \"ordinal\"\n\n } else {\n\n \"non-ordinal\"\n\n }\n\n ),\n\n );\n\n}\n", "file_path": "src/view/raw.rs", "rank": 43, "score": 34326.93282837708 }, { "content": "use deepl_api::{DeepL, TranslatableTextList};\n\n\n\n#[derive(Debug)]\n\npub enum Translation {\n\n DeepL {\n\n deepl_text: String,\n\n deepl_usage: deepl_api::UsageInformation,\n\n },\n\n}\n\n\n", "file_path": "src/translation.rs", "rank": 44, "score": 32677.995208941302 }, { "content": "fn decompress_gzip_font(font_data: &[u8]) -> Vec<u8> {\n\n let mut decoder = GzDecoder::new(font_data);\n\n let mut font_buf = vec![];\n\n decoder.read_to_end(&mut font_buf).unwrap();\n\n font_buf\n\n}\n\n\n", "file_path": "src/backend/renderer.rs", "rank": 45, "score": 32662.005923496108 }, { "content": " }\n\n };\n\n Self {\n\n shared: Arc::new(Shared {\n\n ichiran,\n\n _pg_daemon: pg_daemon,\n\n }),\n\n }\n\n }\n\n pub fn gloss(&self, text: &str, variants: u32) -> Result<Gloss, GlossError> {\n\n if text.len() > MAX_TEXT_LENGTH {\n\n return Err(GlossError::TextTooLong { length: text.len() });\n\n }\n\n let ichiran = &self.shared.ichiran;\n\n\n\n let mut root = None;\n\n let mut kanji_info = None;\n\n let mut jmdict_data = None;\n\n\n\n let start = Instant::now();\n", "file_path": "src/gloss.rs", "rank": 46, "score": 32598.821240378584 }, { "content": "use std::{\n\n collections::HashMap,\n\n sync::Arc,\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse ichiran::{\n\n kanji::Kanji, pgdaemon::PostgresDaemon, romanize::Root, Ichiran, IchiranError, JmDictData,\n\n};\n\nuse thiserror::Error;\n\n\n\nuse crate::view::settings::SettingsView;\n\n\n\nconst MAX_TEXT_LENGTH: usize = 512;\n\n\n\n#[derive(Error, Debug)]\n\npub enum GlossError {\n\n #[error(transparent)]\n\n Ichiran(#[from] IchiranError),\n\n #[error(\"Text too long ({length}/{MAX_TEXT_LENGTH} chars)\")]\n", "file_path": "src/gloss.rs", "rank": 47, "score": 32593.182549540048 }, { "content": " TextTooLong { length: usize },\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Gloss {\n\n pub elapsed: Duration,\n\n\n\n pub root: Root,\n\n pub kanji_info: HashMap<char, Kanji>,\n\n pub jmdict_data: JmDictData,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Glossator {\n\n shared: Arc<Shared>,\n\n}\n", "file_path": "src/gloss.rs", "rank": 48, "score": 32592.43616581896 }, { "content": " rayon::scope(|s| {\n\n s.spawn(|_| root = Some(ichiran.romanize(&text, variants)));\n\n s.spawn(|_| kanji_info = Some(ichiran.kanji_from_str(&text)));\n\n s.spawn(|_| jmdict_data = Some(ichiran.jmdict_data()));\n\n });\n\n let elapsed = start.elapsed();\n\n\n\n Ok(Gloss {\n\n elapsed,\n\n root: root.unwrap()?,\n\n kanji_info: kanji_info.unwrap()?,\n\n jmdict_data: jmdict_data.unwrap()?,\n\n })\n\n }\n\n}\n", "file_path": "src/gloss.rs", "rank": 49, "score": 32588.373266174785 }, { "content": "fn create_window(window: winit::window::WindowBuilder) -> (EventLoop<()>, Window) {\n\n let event_loop = EventLoop::new();\n\n let window = glutin::ContextBuilder::new()\n\n .with_vsync(true)\n\n .build_windowed(window, &event_loop)\n\n .expect(\"could not create window\");\n\n let window = unsafe {\n\n window\n\n .make_current()\n\n .expect(\"could not make window context current\")\n\n };\n\n (event_loop, window)\n\n}\n\n\n", "file_path": "src/backend/glow.rs", "rank": 50, "score": 29658.383034442206 }, { "content": "use std::{env, path::PathBuf};\n\n\n", "file_path": "third-party/imgui-win32-sys/build.rs", "rank": 51, "score": 26862.17753156947 }, { "content": "#![allow(non_upper_case_globals)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(non_snake_case)]\n\n\n\nuse winapi::shared::{\n\n minwindef::{LPARAM, LRESULT, UINT, WPARAM},\n\n windef::HWND,\n\n};\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/bindings.rs\"));\n", "file_path": "third-party/imgui-win32-sys/src/lib.rs", "rank": 52, "score": 25722.067080725068 }, { "content": "use imgui::*;\n\n\n\nuse crate::translation::Translation;\n\n\n\npub struct DeepLView<'a>(&'a Translation);\n\n\n\nimpl<'a> DeepLView<'a> {\n\n pub fn new(translation: &'a Translation) -> Self {\n\n DeepLView(translation)\n\n }\n\n pub fn ui(&self, ui: &Ui) {\n\n let _wrap_token = ui.push_text_wrap_pos_with_pos(0.0);\n\n let Translation::DeepL { deepl_text, .. } = self.0;\n\n ui.text(deepl_text);\n\n }\n\n}\n", "file_path": "src/view/deepl.rs", "rank": 54, "score": 18.46779480880167 }, { "content": " settings: SettingsView,\n\n state: State,\n\n glossator: Glossator,\n\n rikai: RikaiView,\n\n}\n\n\n\nimpl App {\n\n pub fn new(settings: SettingsView) -> Self {\n\n let (channel_tx, channel_rx) = mpsc::channel();\n\n let glossator = Glossator::new(&settings);\n\n App {\n\n channel_tx,\n\n channel_rx,\n\n input_text: \"\".into(),\n\n last_clipboard: \"\".into(),\n\n request_gloss_text: None,\n\n show_imgui_demo: false,\n\n show_settings: false,\n\n show_raw: false,\n\n show_metrics_window: false,\n", "file_path": "src/app.rs", "rank": 56, "score": 13.737676773528094 }, { "content": " shared: Rc::downgrade(&self.shared),\n\n }\n\n }\n\n}\n\nimpl WeakD3D11Renderer {\n\n pub fn new() -> Self {\n\n WeakD3D11Renderer {\n\n shared: Weak::new(),\n\n }\n\n }\n\n pub fn upgrade(&self) -> Option<D3D11Renderer> {\n\n let shared = self.shared.upgrade()?;\n\n Some(D3D11Renderer { shared })\n\n }\n\n}\n\nimpl Default for WeakD3D11Renderer {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n", "file_path": "src/backend/d3d11.rs", "rank": 57, "score": 12.60750565153447 }, { "content": "use ichiran::kanji::Kanji;\n\nuse imgui::*;\n\n\n\nuse crate::backend::renderer::{Env, TextStyle};\n\n\n\npub struct KanjiView<'a> {\n\n kanji: &'a Kanji,\n\n wrap_x: f32,\n\n}\n\nimpl<'a> KanjiView<'a> {\n\n pub fn new(kanji: &'a Kanji, wrap_x: f32) -> Self {\n\n KanjiView { kanji, wrap_x }\n\n }\n\n\n\n fn add_kanji(&mut self, env: &mut Env, ui: &Ui, kanji: &Kanji) {\n\n {\n\n let _kanji_font_token = ui.push_font(env.get_font(TextStyle::Kanji));\n\n ui.text(kanji.text());\n\n }\n\n ui.same_line();\n", "file_path": "src/view/kanji.rs", "rank": 58, "score": 12.442562023889877 }, { "content": "use std::sync::mpsc;\n\n\n\nuse imgui::*;\n\n\n\nuse crate::{\n\n backend::renderer::Env,\n\n gloss::{Gloss, GlossError, Glossator},\n\n translation::{self, Translation},\n\n view::{mixins::help_marker, rikai::RikaiView, settings::SettingsView},\n\n};\n\n\n\nconst ERROR_MODAL_TITLE: &str = \"Error\";\n\n\n\n#[derive(thiserror::Error, Debug)]\n", "file_path": "src/app.rs", "rank": 59, "score": 12.43237079335756 }, { "content": "\n\n#[macro_use]\n\npub mod util;\n\n\n\npub mod app;\n\npub mod backend;\n\npub mod clipboard;\n\npub mod view;\n\npub mod gloss;\n\npub mod translation;\n", "file_path": "src/lib.rs", "rank": 60, "score": 12.137312496203847 }, { "content": " env: Env,\n\n renderer: AutoRenderer,\n\n}\n\nimpl GlowRenderer {\n\n pub fn new(settings: &SettingsView) -> Self {\n\n let (event_loop, window) = create_window(Self::create_window_builder(settings));\n\n let mut imgui = Self::create_imgui(settings);\n\n let platform = Self::create_platform(&mut imgui, window.window());\n\n let mut env = Env::default();\n\n Self::create_fonts(&mut imgui, &mut env, &platform);\n\n\n\n let gl = glow_context(&window);\n\n\n\n let renderer = imgui_glow_renderer::AutoRenderer::initialize(gl, &mut imgui)\n\n .expect(\"failed to create renderer\");\n\n\n\n Self {\n\n event_loop,\n\n window,\n\n platform,\n", "file_path": "src/backend/glow.rs", "rank": 61, "score": 12.069431716548642 }, { "content": "use super::IchiranError;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{collections::HashMap, path::Path};\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct KwPos {\n\n pub id: u32,\n\n pub kw: String,\n\n pub descr: String,\n\n pub ents: String,\n\n}\n\n\n\n#[derive(Debug, Clone, Default, Deserialize, Serialize)]\n\npub struct JmDictData {\n\n pub kwpos_by_kw: HashMap<String, KwPos>,\n\n}\n\nimpl JmDictData {\n\n pub fn new(jmdict_path: &Path) -> Result<Self, IchiranError> {\n\n let mut kwpos_by_kw: HashMap<String, KwPos> = HashMap::new();\n\n\n", "file_path": "third-party/ichiran/src/jmdict_data.rs", "rank": 62, "score": 11.88908266489989 }, { "content": "use std::collections::HashMap;\n\n\n\nuse ichiran::{kanji::Kanji, romanize::*, JmDictData};\n\nuse imgui::*;\n\n\n\nuse crate::backend::renderer::Env;\n\n\n\nuse super::kanji::KanjiView;\n\nuse super::mixins::*;\n\nuse super::settings::SettingsView;\n\n\n\npub struct TermView<'a> {\n\n jmdict_data: &'a JmDictData,\n\n kanji_info: &'a HashMap<char, Kanji>,\n\n romaji: &'a Romanized,\n\n wrap_x: f32,\n\n}\n\nimpl<'a> TermView<'a> {\n\n pub fn new(\n\n jmdict_data: &'a JmDictData,\n", "file_path": "src/view/term.rs", "rank": 63, "score": 11.59136251127097 }, { "content": " pub auto_translate: bool,\n\n pub deepl_api_key: String,\n\n\n\n pub watch_clipboard: bool,\n\n pub show_manual_input: bool,\n\n pub style: Option<Vec<u8>>,\n\n}\n\nimpl Default for SettingsView {\n\n fn default() -> Self {\n\n Self {\n\n ichiran_path: Default::default(),\n\n postgres_path: Default::default(),\n\n db_path: Default::default(),\n\n\n\n renderer_type_idx: Default::default(),\n\n transparent: Default::default(),\n\n on_top: false,\n\n overlay_mode: false,\n\n\n\n ruby_text_type_idx: DisplayRubyText::None as usize,\n", "file_path": "src/view/settings.rs", "rank": 64, "score": 11.154043113467376 }, { "content": " let should_translate = !gloss.root.is_flat();\n\n if self.settings.auto_translate && should_translate {\n\n self.request_translation(&gloss.root.text_flatten());\n\n } else {\n\n self.transition(ui, State::None);\n\n self.rikai.set_translation(None);\n\n }\n\n self.rikai.set_gloss(Some(gloss));\n\n }\n\n Ok(Message::Translation(Ok(translation))) => {\n\n self.rikai.set_translation(Some(translation));\n\n self.transition(ui, State::None)\n\n }\n\n Ok(Message::Gloss(Err(err))) => {\n\n self.transition(ui, State::Error(err.into()));\n\n }\n\n Ok(Message::Translation(Err(err))) => {\n\n self.transition(ui, State::Error(err.into()));\n\n }\n\n Err(mpsc::TryRecvError::Empty) => {}\n", "file_path": "src/app.rs", "rank": 65, "score": 10.908710564121638 }, { "content": "use imgui::*;\n\nuse num_derive::FromPrimitive;\n\nuse num_traits::FromPrimitive;\n\nuse serde::{Deserialize, Serialize};\n\nuse strum::VariantNames;\n\nuse strum_macros::{EnumString, EnumVariantNames};\n\n\n\nuse super::mixins;\n\n\n\n#[derive(FromPrimitive, EnumString, EnumVariantNames)]\n\npub enum SupportedRenderer {\n\n Glow = 0,\n\n #[cfg(windows)]\n\n Direct3D11 = 1,\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, FromPrimitive, EnumString, EnumVariantNames)]\n\npub enum DisplayRubyText {\n\n None = 0,\n\n Furigana = 1,\n", "file_path": "src/view/settings.rs", "rank": 68, "score": 9.817746101314263 }, { "content": " more_variants: true,\n\n stroke_text: true,\n\n\n\n auto_translate: false,\n\n deepl_api_key: Default::default(),\n\n\n\n watch_clipboard: true,\n\n show_manual_input: true,\n\n style: None,\n\n }\n\n }\n\n}\n\nimpl SettingsView {\n\n pub fn ui(&mut self, ui: &mut Ui) {\n\n if CollapsingHeader::new(\"Ichiran\")\n\n .default_open(true)\n\n .build(ui)\n\n {\n\n ui.input_text(\"ichiran-cli*\", &mut self.ichiran_path)\n\n .build();\n", "file_path": "src/view/settings.rs", "rank": 69, "score": 9.733126029852176 }, { "content": " show_style_editor: false,\n\n settings,\n\n state: State::None,\n\n glossator,\n\n rikai: RikaiView::new(),\n\n }\n\n }\n\n\n\n fn request_gloss(&self, text: &str) {\n\n let channel_tx = self.channel_tx.clone();\n\n let glossator = &self.glossator;\n\n let text = text.to_owned();\n\n let variants = if self.settings.more_variants { 5 } else { 1 };\n\n rayon::spawn(enclose! { (glossator) move || {\n\n let gloss = glossator.gloss(&text, variants);\n\n let _ = channel_tx.send(Message::Gloss(gloss));\n\n }});\n\n }\n\n\n\n fn request_translation(&self, text: &str) {\n", "file_path": "src/app.rs", "rank": 70, "score": 9.671139001510156 }, { "content": " handle.hwnd\n\n } else {\n\n unreachable!()\n\n };\n\n\n\n let (swapchain, device, context) = unsafe { create_device(hwnd.cast()) }.unwrap();\n\n let main_rtv = unsafe { create_render_target(&swapchain, &device) };\n\n\n\n let mut imgui = Self::create_imgui(settings);\n\n let platform = Self::create_platform(&mut imgui, &window);\n\n let mut env = Env::default();\n\n Self::create_fonts(&mut imgui, &mut env, &platform);\n\n\n\n let renderer =\n\n unsafe { imgui_dx11_renderer::Renderer::new(&mut imgui, device.clone()).unwrap() };\n\n\n\n let mut mousellhook: Option<HHOOK> = None;\n\n if settings.overlay_mode {\n\n unsafe {\n\n let style = winuser::GetWindowLongA(hwnd as *mut _, winuser::GWL_EXSTYLE);\n", "file_path": "src/backend/d3d11.rs", "rank": 71, "score": 9.665798214137736 }, { "content": " }\n\n unsafe impl Send for JobObject {}\n\n unsafe impl Sync for JobObject {}\n\n\n\n pub struct Handle {\n\n inner: HANDLE,\n\n }\n\n\n\n fn last_err() -> io::Error {\n\n io::Error::last_os_error()\n\n }\n\n\n\n pub unsafe fn setup() -> Option<JobObject> {\n\n // Creates a new job object for us to use and then adds ourselves to it.\n\n // Note that all errors are basically ignored in this function,\n\n // intentionally. Job objects are \"relatively new\" in Windows,\n\n // particularly the ability to support nested job objects. Older\n\n // Windows installs don't support this ability. We probably don't want\n\n // to force Cargo to abort in this situation or force others to *not*\n\n // use job objects, so we instead just ignore errors and assume that\n", "file_path": "third-party/ichiran/src/job.rs", "rank": 72, "score": 9.58972554878635 }, { "content": " gloss: Vec<Gloss>,\n\n #[serde(default)]\n\n conj: Vec<Conjugation>,\n\n\n\n counter: Option<Counter>,\n\n suffix: Option<String>,\n\n}\n\nimpl Plain {\n\n // Get the meta of this word.\n\n pub fn meta(&self) -> &Meta {\n\n &self.meta\n\n }\n\n /// Get the sequence number of this word.\n\n pub fn seq(&self) -> Option<u32> {\n\n self.seq\n\n }\n\n /// Get a list of glosses.\n\n pub fn gloss(&self) -> &[Gloss] {\n\n &self.gloss\n\n }\n", "file_path": "third-party/ichiran/src/romanize.rs", "rank": 73, "score": 9.555777836596468 }, { "content": "use clipboard::{ClipboardContext, ClipboardProvider};\n\nuse imgui::ClipboardBackend;\n\n\n\npub struct ClipboardSupport(ClipboardContext);\n\n\n", "file_path": "src/clipboard.rs", "rank": 74, "score": 9.448663296291183 }, { "content": "use std::time::Instant;\n\n\n\nuse glow::HasContext;\n\nuse glutin::{\n\n event_loop::{ControlFlow, EventLoop},\n\n platform::run_return::EventLoopExtRunReturn,\n\n};\n\nuse imgui_glow_renderer::AutoRenderer;\n\nuse imgui_winit_support::WinitPlatform;\n\n\n\nuse super::renderer::{Env, Renderer};\n\nuse crate::{app::App, view::settings::SettingsView};\n\n\n\npub type Window = glutin::WindowedContext<glutin::PossiblyCurrent>;\n\n\n\npub struct GlowRenderer {\n\n event_loop: EventLoop<()>,\n\n window: Window,\n\n platform: WinitPlatform,\n\n imgui: imgui::Context,\n", "file_path": "src/backend/glow.rs", "rank": 75, "score": 9.373927310597335 }, { "content": "use std::{collections::HashMap, io::Read, path::PathBuf};\n\n\n\nuse flate2::read::GzDecoder;\n\nuse glutin::{platform::windows::WindowBuilderExtWindows, window};\n\nuse imgui::{FontConfig, FontGlyphRanges, FontId, FontSource};\n\nuse imgui_winit_support::WinitPlatform;\n\n\n\nuse crate::clipboard;\n\nuse crate::{app::App, view::settings::SettingsView};\n\n\n\nstatic SARASA_MONO_J_REGULAR: &[u8] = include_bytes!(\"../../res/sarasa-mono-j-regular.ttf.gz\");\n\n\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\n\npub enum TextStyle {\n\n Kanji,\n\n Body,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Env {\n\n fonts: HashMap<TextStyle, FontId>,\n\n}\n\nimpl Env {\n\n pub fn get_font(&self, style: TextStyle) -> FontId {\n\n *self.fonts.get(&style).unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/backend/renderer.rs", "rank": 76, "score": 9.155218066282684 }, { "content": " winuser::SetWindowLongA(\n\n hwnd as *mut _,\n\n winuser::GWL_EXSTYLE,\n\n (style as u32 | winuser::WS_EX_LAYERED) as i32,\n\n );\n\n mousellhook.replace(SetWindowsHookExA(\n\n WH_MOUSE_LL,\n\n Some(low_level_mouse_proc),\n\n ptr::null_mut(),\n\n 0,\n\n ));\n\n }\n\n }\n\n\n\n let d3d11_renderer = Self {\n\n shared: Rc::new(Shared {\n\n inner: RefCell::new(Inner {\n\n window,\n\n platform,\n\n imgui,\n", "file_path": "src/backend/d3d11.rs", "rank": 77, "score": 8.960855801882852 }, { "content": " }\n\n /// Get the score of this term.\n\n pub fn score(&self) -> u32 {\n\n self.score\n\n }\n\n}\n\n\n\n/// Gloss for a word.\n\n#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\n#[serde(deny_unknown_fields)]\n\npub struct Gloss {\n\n pos: String,\n\n gloss: String,\n\n info: Option<String>,\n\n}\n\nimpl Gloss {\n\n /// Get part-of-speech info.\n\n pub fn pos(&self) -> &str {\n\n self.pos.as_str()\n\n }\n", "file_path": "third-party/ichiran/src/romanize.rs", "rank": 78, "score": 8.873157516289059 }, { "content": "## Why not use...\n\nThis is a tool created to service a personal need, and may not be useful to you.\n\nBelow, I laid out my personal justification for investing time into creating\n\nthis tool. If you agree, then this tool may be useful for you.\n\n\n\n**Why not use MeCab, JParser, ChaSen, Jisho etc.?**: In my experience ichiran is\n\nmuch better at segmentation, provides more metadata, and makes fewer mistakes.\n\n\n\n**Why not use rikai(kun|chan), JGlossator?**: They don't do segmentation.\n\n\n\n**Why not use DeepL, Google Translate, etc.?**: I want a gloss, not a\n\ntranslation tool. If I ever integrate translation features, I'd like to do so in\n\na way that supplements the gloss rather than dumping text.\n\n\n\n**Why not use the web frontend [ichi.moe](https://ichi.moe)?**: \n\nThere are some features I'd like to experiment with to improve the glossing\n\nexperience.\n\n\n\n## Build\n\nTODO\n\n\n\n## Known Issues\n\n- High CPU usage when out of focus\n\n- Missing characters when rendering (displays as <?>)\n\n\n\n## Troubleshooting\n\nTODO\n\n\n\n## Third-party\n", "file_path": "README.md", "rank": 79, "score": 8.728538610754999 }, { "content": "use std::{\n\n path::{Path, PathBuf},\n\n process::{Command, Stdio},\n\n};\n\n\n\nuse crate::{\n\n job::{self, JobObject},\n\n ConnParams,\n\n};\n\n\n\npub struct PostgresDaemon {\n\n pg_bin_dir: PathBuf,\n\n data_path: PathBuf,\n\n pg_proc: Result<std::process::Child, std::io::Error>,\n\n _job_obj: Option<JobObject>,\n\n silent: bool,\n\n}\n\nimpl PostgresDaemon {\n\n pub fn new<N: Into<PathBuf>, M: Into<PathBuf>>(\n\n pg_bin_dir: N,\n", "file_path": "third-party/ichiran/src/pgdaemon.rs", "rank": 80, "score": 8.619721939228839 }, { "content": " )\n\n }\n\n .to_vec(),\n\n );\n\n } else {\n\n self.style = None;\n\n }\n\n }\n\n pub fn style(&self) -> Option<imgui::Style> {\n\n self.style\n\n .as_ref()\n\n .map(|style| unsafe { std::ptr::read(style.as_ptr() as *const _) })\n\n }\n\n}\n", "file_path": "src/view/settings.rs", "rank": 81, "score": 8.375430674885298 }, { "content": " use super::*;\n\n use crate::fixture;\n\n\n\n #[test]\n\n fn test_pos_split() {\n\n let gloss = Gloss {\n\n pos: \"[n,n-adv,prt]\".to_owned(),\n\n gloss: \"\".to_owned(),\n\n info: None,\n\n };\n\n assert_eq!(gloss.pos_split(), vec![\"n\", \"n-adv\", \"prt\"]);\n\n }\n\n\n\n #[test]\n\n fn test_match() {\n\n let (ichiran, _pg) = fixture::ichiran();\n\n let nikaime = ichiran.romanize(\"2回目\", 1).unwrap();\n\n let nikaime_gold = Root(vec![Segment::Clauses(vec![Clause(\n\n vec![Romanized(\n\n \"nikaime\".into(),\n", "file_path": "third-party/ichiran/src/romanize.rs", "rank": 82, "score": 8.374994718168809 }, { "content": "pub enum Segment {\n\n Skipped(String),\n\n Clauses(Vec<Clause>),\n\n}\n\n\n\n/// A clause, representing a segmented romanization.\n\n#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\n#[serde(deny_unknown_fields)]\n\npub struct Clause(Vec<Romanized>, i32);\n\nimpl Clause {\n\n /// Get all romanized blocks in this clause.\n\n pub fn romanized(&self) -> &[Romanized] {\n\n &self.0\n\n }\n\n /// Get the cumulative score of this clause.\n\n pub fn score(&self) -> i32 {\n\n self.1\n\n }\n\n}\n\n\n", "file_path": "third-party/ichiran/src/romanize.rs", "rank": 83, "score": 8.31790778608575 }, { "content": " /// Get individual part-of-speech info.\n\n pub fn pos_split(&self) -> Vec<&str> {\n\n lazy_static! {\n\n static ref RE: Regex = Regex::new(r\"[\\w-]+\").unwrap();\n\n }\n\n let captures = RE.captures_iter(self.pos.as_str());\n\n captures\n\n .filter_map(|cap| cap.get(0))\n\n .map(|m| m.as_str())\n\n .collect::<Vec<&str>>()\n\n }\n\n /// Get the gloss explanation.\n\n pub fn gloss(&self) -> &str {\n\n self.gloss.as_str()\n\n }\n\n /// Get additional information.\n\n pub fn info(&self) -> Option<&str> {\n\n self.info.as_deref()\n\n }\n\n}\n", "file_path": "third-party/ichiran/src/romanize.rs", "rank": 84, "score": 8.112943899852368 }, { "content": " }\n\n if ui.button_with_size(\"Gloss\", [120.0, 0.0]) {\n\n self.request_gloss_text = Some(self.input_text.clone());\n\n }\n\n ui.same_line();\n\n\n\n let should_translate = self\n\n .rikai\n\n .gloss()\n\n .map_or_else(|| false, |x| !x.root.is_flat());\n\n {\n\n let mut _disable_tl =\n\n ui.begin_disabled(!should_translate || self.rikai.translation().is_some());\n\n if ui.button_with_size(\"Translate\", [120.0, 0.0]) {\n\n self.transition(ui, State::Processing);\n\n if let Some(gloss) = self.rikai.gloss() {\n\n self.request_translation(&gloss.root.text_flatten());\n\n }\n\n }\n\n }\n", "file_path": "src/app.rs", "rank": 85, "score": 7.95736970628202 }, { "content": "};\n\n\n\nuse lru::LruCache;\n\n\n\nuse charset::is_kanji;\n\npub use error::IchiranError;\n\npub use jmdict_data::JmDictData;\n\nuse kanji::Kanji;\n\nuse lisp::*;\n\nuse romanize::Root;\n\n\n\n#[derive(Debug)]\n\npub struct ConnParams {\n\n pub database: String,\n\n pub user: String,\n\n pub password: String,\n\n pub hostname: String,\n\n pub port: u16,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Ichiran {\n\n shared: Arc<Shared>,\n\n}\n", "file_path": "third-party/ichiran/src/lib.rs", "rank": 86, "score": 7.924084971679597 }, { "content": " irr_perc: String,\n\n /// Readings for this kanji\n\n readings: Vec<Reading>,\n\n /// Meanings of this kanji\n\n meanings: Vec<String>,\n\n /// Frequency out of 2501 most-used characters\n\n freq: Option<u32>,\n\n /// \"Grade\" of this kanji\n\n grade: Option<u32>,\n\n}\n\n\n\nimpl Kanji {\n\n pub fn text(&self) -> &str {\n\n self.text.as_str()\n\n }\n\n pub fn radical_code(&self) -> u32 {\n\n self.rc\n\n }\n\n pub fn stroke_count(&self) -> u32 {\n\n self.strokes\n", "file_path": "third-party/ichiran/src/kanji.rs", "rank": 87, "score": 7.478462129024885 }, { "content": "use ichiran::romanize::*;\n\nuse imgui::*;\n\n\n\nuse crate::backend::renderer::Env;\n\n\n\nuse super::id;\n\n\n", "file_path": "src/view/raw.rs", "rank": 88, "score": 7.33735282175704 }, { "content": "use lazy_static::lazy_static;\n\nuse regex::Regex;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::coerce::*;\n\n\n\n// Reverse-engineered from the JSON output of ichiran-cli since I can't read lisp.\n\n// Disclaimer: Might be wrong in several ways. I pulled names for some of the\n\n// grammatical structures out of my ass.\n\n\n\n/// The root of a parse tree.\n\n#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\n#[serde(deny_unknown_fields)]\n\npub struct Root(Vec<Segment>);\n\nimpl Root {\n\n /// Get all segments under the parse tree.\n\n pub fn segments(&self) -> &[Segment] {\n\n &self.0\n\n }\n\n\n", "file_path": "third-party/ichiran/src/romanize.rs", "rank": 89, "score": 7.259898705363181 }, { "content": " imgui,\n\n env,\n\n renderer,\n\n }\n\n }\n\n}\n\nimpl Renderer for GlowRenderer {\n\n fn main_loop(&mut self, app: &mut App) {\n\n let GlowRenderer {\n\n event_loop,\n\n window,\n\n platform,\n\n imgui,\n\n env,\n\n renderer,\n\n } = self;\n\n let mut last_frame = Instant::now();\n\n\n\n event_loop.run_return(|event, _, control_flow| match event {\n\n glutin::event::Event::NewEvents(_) => {\n", "file_path": "src/backend/glow.rs", "rank": 90, "score": 7.240363406859837 }, { "content": "use serde::{\n\n de::{self, Visitor},\n\n Deserializer,\n\n};\n\nuse std::fmt;\n\n\n\n/// Special deserializer which removes zero-width non-joiner characters.\n\npub(crate) fn deserialize<'de, D>(deserializer: D) -> Result<String, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct StringVisitor;\n\n impl<'de> Visitor<'de> for StringVisitor {\n\n type Value = String;\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"str\")\n\n }\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(v.replace('\\u{200C}', \"\"))\n\n }\n\n }\n\n deserializer.deserialize_str(StringVisitor)\n\n}\n", "file_path": "third-party/ichiran/src/coerce/no_zwnj.rs", "rank": 91, "score": 7.236694838422527 }, { "content": " imgui\n\n }\n\n\n\n fn create_platform(imgui: &mut imgui::Context, window: &winit::window::Window) -> WinitPlatform\n\n where\n\n Self: Sized,\n\n {\n\n let mut platform = WinitPlatform::init(imgui);\n\n platform.attach_window(\n\n imgui.io_mut(),\n\n window,\n\n imgui_winit_support::HiDpiMode::Default,\n\n );\n\n platform\n\n }\n\n\n\n fn create_fonts(imgui: &mut imgui::Context, env: &mut Env, platform: &WinitPlatform)\n\n where\n\n Self: Sized,\n\n {\n", "file_path": "src/backend/renderer.rs", "rank": 92, "score": 7.041660251683373 }, { "content": " }\n\n /// Get the reading for this conjugation.\n\n pub fn reading(&self) -> Option<&str> {\n\n self.reading.as_deref()\n\n }\n\n /// Get a list of glosses.\n\n pub fn gloss(&self) -> &[Gloss] {\n\n &self.gloss\n\n }\n\n /// Get the source of the conjugation.\n\n pub fn vias(&self) -> Vec<&Conjugation> {\n\n self.via.iter().map(Box::as_ref).collect()\n\n }\n\n /// TODO no idea what this is\n\n pub fn readok(&self) -> bool {\n\n self.readok\n\n }\n\n /// Convert the via tree into a list of reverse root-to-leaf sequences,\n\n /// representing all possible via paths to reach this conjugation.\n\n /// e.g.\n", "file_path": "third-party/ichiran/src/romanize.rs", "rank": 93, "score": 6.978409116038896 }, { "content": "use serde::{\n\n de::{self, IgnoredAny, SeqAccess, Visitor},\n\n Deserializer,\n\n};\n\nuse std::fmt;\n\n\n\n/// Special deserializer to handle `Counter::ordinal` which is either `[]` (false) or `bool`.\n\npub(crate) fn deserialize<'de, D>(deserializer: D) -> Result<bool, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct BoolVisitor;\n\n impl<'de> Visitor<'de> for BoolVisitor {\n\n type Value = bool;\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"[] or bool\")\n\n }\n\n fn visit_seq<A: SeqAccess<'de>>(self, mut seq: A) -> Result<Self::Value, A::Error> {\n\n match seq.next_element()? {\n\n Some(IgnoredAny) => Err(de::Error::invalid_length(0, &self)),\n", "file_path": "third-party/ichiran/src/coerce/bool_seq.rs", "rank": 94, "score": 6.9742383645160855 }, { "content": "use imgui::{StyleColor, Ui};\n\n\n\nuse crate::backend::renderer::{Env, TextStyle};\n\n\n", "file_path": "src/view/mixins.rs", "rank": 95, "score": 6.913827637287719 }, { "content": " x => {\n\n log::error!(\"unhandled message: {:?}\", x);\n\n }\n\n }\n\n\n\n match &self.state {\n\n State::Error(_) | State::None => {\n\n if let Some(request_gloss_text) = self.request_gloss_text.clone() {\n\n self.request_gloss_text = None;\n\n self.transition(ui, State::Processing);\n\n self.request_gloss(&request_gloss_text);\n\n }\n\n }\n\n _ => (),\n\n };\n\n\n\n if self.settings.watch_clipboard {\n\n if let Some(clipboard) = ui.clipboard_text() {\n\n if clipboard != self.last_clipboard {\n\n self.input_text = clipboard.clone();\n", "file_path": "src/app.rs", "rank": 96, "score": 6.8308028197733215 }, { "content": " Term::Word(Word::Plain(Plain {\n\n meta: Meta {\n\n reading: \"2回目 【にかいめ】\".into(),\n\n text: \"2回目\".into(),\n\n kana: \"にかいめ\".into(),\n\n score: 696,\n\n },\n\n seq: Some(1199330),\n\n gloss: vec![Gloss {\n\n pos: \"[ctr]\".into(),\n\n gloss: \"counter for occurrences\".into(),\n\n info: None,\n\n }],\n\n conj: vec![],\n\n counter: Some(Counter {\n\n value: \"Value: 2nd\".into(),\n\n ordinal: true,\n\n }),\n\n suffix: None,\n\n })),\n", "file_path": "third-party/ichiran/src/romanize.rs", "rank": 97, "score": 6.821026512584934 }, { "content": "use std::{io, process::ExitStatus};\n\n\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum IchiranError {\n\n #[error(\"I/O Error: {0}\")]\n\n Io(#[from] io::Error),\n\n #[error(\"Serde Error: {0}\")]\n\n Serde(#[from] serde_json::Error),\n\n #[error(\"ichiran-cli exited w/ {status}\\n{stderr}\")]\n\n Failure { status: ExitStatus, stderr: String },\n\n #[error(\"Parse Error:\\n{0}\")]\n\n Parsing(String),\n\n #[error(\"CSV Error: {0}\")]\n\n CsvError(#[from] csv::Error),\n\n #[error(\"Lisp Error:\\n{0}\")]\n\n KetosError(String),\n\n}\n\n\n\n// ketos::Error is non-Send so we need to serialize it.\n\nimpl From<ketos::Error> for IchiranError {\n\n fn from(err: ketos::Error) -> Self {\n\n IchiranError::KetosError(format!(\"{:#?}\", err))\n\n }\n\n}\n", "file_path": "third-party/ichiran/src/error.rs", "rank": 98, "score": 6.777783585909837 }, { "content": " if !should_translate\n\n && ui.is_item_hovered_with_flags(ItemHoveredFlags::ALLOW_WHEN_DISABLED)\n\n {\n\n ui.tooltip(|| ui.text(\"Text does not require translation\"));\n\n }\n\n }\n\n self.show_deepl_usage(ui);\n\n {\n\n let _disable_ready = ui.begin_disabled(!matches!(self.state, State::None));\n\n self.rikai.ui(env, ui, &self.settings, &mut self.show_raw);\n\n if let State::Processing = &self.state {\n\n ui.set_mouse_cursor(Some(MouseCursor::NotAllowed));\n\n }\n\n }\n\n self.show_error_modal(env, ui);\n\n self.poll(ui);\n\n });\n\n\n\n if self.show_imgui_demo {\n\n ui.show_demo_window(&mut self.show_imgui_demo);\n", "file_path": "src/app.rs", "rank": 99, "score": 6.750832744704585 } ]
Rust
src/pixel_task.rs
SharpCoder/hexagon2
0a565a263e5a3513c15e7e10ef75a9b6cd64efdb
use teensycore::*; use teensycore::clock::*; use teensycore::debug::blink_accumulate; use teensycore::math::rand; use teensycore::system::str::Str; use teensycore::system::str::StringOps; use teensycore::system::vector::Array; use teensycore::system::vector::Vector; use crate::date_time::DateTime; use crate::get_shader_configs; use crate::get_tranasition_delay; use crate::shaders::*; use crate::effects::*; use crate::pixel_engine::color::*; use crate::pixel_engine::math::interpolate; use crate::pixel_engine::shader::*; use crate::pixel_engine::effect::*; use crate::pixel_engine::context::*; use crate::drivers::ws2812::*; const LEDS_PER_UNIT: usize = 3; const LEDS: usize = crate::HEX_UNITS * LEDS_PER_UNIT; const TRANSITION_TIME: uNano = 1000 * crate::WORLD_MUTIPLIER; enum PixelState { Loading, Transitioning, MainSequence, } pub struct PixelTask { state: PixelState, shader: Option<Shader>, next_shader: Option<Shader>, shaders: Vector<Shader>, contexts: [Context; crate::HEX_UNITS], effect: Option<Effect>, effects: Vector<Effect>, driver: WS2812Driver<LEDS>, target: uNano, day_target: uNano, day_processed: uNano, cycles: u64, randomize_target: uNano, ready: bool, color_buffer: [Color; crate::HEX_UNITS], transition_start: uNano, transition_offset: uNano, cycle_offset: uNano, } impl PixelTask { pub fn new() -> Self { return PixelTask { state: PixelState::Loading, target: 0, day_target: 0, randomize_target: 0, day_processed: 0, transition_start: 0, transition_offset: 0, cycle_offset: 0, cycles: 0, ready: false, shader: None, effect: None, next_shader: None, shaders: initialize_shaders(), effects: initialize_effects(), driver: WS2812Driver::<LEDS>::new( 18, ), color_buffer: [Color::blank(); crate::HEX_UNITS], contexts: [Context::empty(); crate::HEX_UNITS], }; } #[allow(dead_code)] fn find_effect(&self, name: &'static [u8]) -> Option<Effect> { for effect in self.effects.into_iter() { if effect.name == name { return Some(effect); } } return None; } fn find_shader(&self, name: &Str) -> Option<Shader> { for shader in self.shaders.into_iter() { let mut shader_name = Str::new(); shader_name.append(shader.name); if name.contains(&shader_name) { shader_name.drop(); return Some(shader); } shader_name.drop(); } return None; } fn cycle_next_shader(&mut self) { let idx = (self.cycles / 3) as usize % self.shaders.size(); let shader = self.shaders.get(idx).unwrap(); self.transition_to(shader); } fn get_next_shader(&self) -> Shader { if crate::USE_WIFI { let appropriate_shader = get_shader_configs().get_shader(crate::get_world_time()); match self.find_shader(&appropriate_shader) { None => return self.shaders.get(0).unwrap(), Some(shader) => { if shader.disabled { return self.get_next_shader(); } return shader; } } } else { let idx = rand() % self.shaders.size() as u64; let next_shader = self.shaders.get(idx as usize).unwrap(); if next_shader.wifi_only || next_shader.disabled { return self.get_next_shader(); } else { return next_shader; } } } fn get_next_effect(&self, shader: &Shader) -> Effect { let idx = rand() % self.effects.size() as u64; let next_effect = self.effects.get(idx as usize).unwrap(); if next_effect.disabled || next_effect.max_color_segments.unwrap_or(usize::MAX) < shader.total_segments || next_effect.min_hex_units.unwrap_or(0) > crate::HEX_UNITS { return self.get_next_effect(shader); } else { return next_effect; } } pub fn init(&mut self) { self.driver.init(); for node_id in 0 .. crate::HEX_UNITS { self.contexts[node_id].node_id = node_id as uNano; self.contexts[node_id].total_nodes = crate::HEX_UNITS as uNano; self.contexts[node_id].initialized = false; } self.day_target = nanos() + (S_TO_NANO * 60 * 30); self.shader = self.find_shader(&str!(b"Medbay")); self.effect = Some(self.find_effect(b"Randomized").unwrap()); } pub fn transition_to(&mut self, next_shader: Shader) { self.next_shader = Some(next_shader); for node_id in 0 .. crate::HEX_UNITS { self.contexts[node_id].initialized = false; self.contexts[node_id].node_id = node_id as uNano; } self.effect = Some(self.get_next_effect(&next_shader)); self.transition_start = nanos(); self.transition_offset = 0; self.state = PixelState::Transitioning; } pub fn randomize(&mut self) { self.randomize_target = nanos() + get_tranasition_delay(); self.transition_to(self.get_next_shader()); } /* This method will reset variables if the world uptime counter overflows. */ pub fn overflow_watch(&mut self) { let now = nanos(); if self.transition_offset > 0 && self.transition_start > 0 && self.randomize_target > 0 && ( now < self.transition_offset || now < self.transition_start ) { self.randomize(); } } pub fn system_loop(&mut self) { let time = nanos() - self.transition_offset; let cycle_time = (time - self.cycle_offset) / teensycore::MS_TO_NANO; let elapsed_ms = time / teensycore::MS_TO_NANO; let mut should_cycle = false; if time > self.target { let shader = self.shader.as_mut().unwrap(); let effect = self.effect.as_mut().unwrap(); match self.state { PixelState::Transitioning => { if time > (self.transition_start + TRANSITION_TIME * MS_TO_NANO) { self.state = PixelState::MainSequence; self.shader = self.next_shader; self.transition_offset = self.transition_start; } else { for node_id in 0 .. crate::HEX_UNITS { let mut ctx = self.contexts[node_id]; let next_shader = self.next_shader.as_mut().unwrap(); let transition_time_elapsed = (time - self.transition_start) / MS_TO_NANO; let (effect_time, next_context) = effect.process(&mut ctx, transition_time_elapsed); let time_t = ((effect_time as f64 / 100.0) * next_shader.total_time as f64) as uNano; let next_color = next_shader.get_color(time_t); self.contexts[node_id] = next_context; let color = rgb( interpolate(self.color_buffer[node_id].r as u32, next_color.r as u32, transition_time_elapsed, TRANSITION_TIME) as u8, interpolate(self.color_buffer[node_id].g as u32, next_color.g as u32, transition_time_elapsed, TRANSITION_TIME) as u8, interpolate(self.color_buffer[node_id].b as u32, next_color.b as u32, transition_time_elapsed, TRANSITION_TIME) as u8, ).as_hex(); for pixel_id in 0 .. LEDS_PER_UNIT { self.driver.set_color(node_id * LEDS_PER_UNIT + pixel_id, color); } } } }, PixelState::MainSequence | PixelState::Loading => { if cycle_time > effect.total_time { self.cycles += 1; self.cycle_offset = nanos() - self.transition_offset; if crate::CYCLE_MODE && self.cycles % 3 == 0 { should_cycle = true; } } for node_id in 0 .. crate::HEX_UNITS { let mut ctx = self.contexts[node_id]; let (effect_time, next_context) = effect.process(&mut ctx, elapsed_ms); let time_t = (( effect_time as f64 / 100.0) * shader.total_time as f64) as uNano; self.color_buffer[node_id] = shader.get_color(time_t); let color = self.color_buffer[node_id].as_hex(); self.contexts[node_id] = next_context; for pixel_id in 0 .. LEDS_PER_UNIT { self.driver.set_color(node_id * LEDS_PER_UNIT + pixel_id, color); } } }, } match self.state { PixelState::MainSequence => { if crate::USE_WIFI && nanos() > self.day_target { let datetime = DateTime::now(); if self.day_processed != datetime.days && datetime.hour >= 6 { self.day_processed = datetime.days; self.randomize(); } self.day_target = nanos() + S_TO_NANO; } else if nanos() > self.randomize_target { self.randomize(); } }, _ => {}, } self.driver.flush(); } if should_cycle { self.cycle_next_shader(); } self.overflow_watch(); } pub fn ready(&mut self) { if !self.ready { self.ready = true; self.randomize(); } } }
use teensycore::*; use teensycore::clock::*; use teensycore::debug::blink_accumulate; use teensycore::math::rand; use teensycore::system::str::Str; use teensycore::system::str::StringOps; use teensycore::system::vector::Array; use teensycore::system::vector::Vector; use crate::date_time::DateTime; use crate::get_shader_configs; use crate::get_tranasition_delay; use crate::shaders::*; use crate::effects::*; use crate::pixel_engine::color::*; use crate::pixel_engine::math::interpolate; use crate::pixel_engine::shader::*; use crate::pixel_engine::effect::*; use crate::pixel_engine::context::*; use crate::drivers::ws2812::*; const LEDS_PER_UNIT: usize = 3; const LEDS: usize = crate::HEX_UNITS * LEDS_PER_UNIT; const TRANSITION_TIME: uNano = 1000 * crate::WORLD_MUTIPLIER; enum PixelState { Loading, Transitioning, MainSequence, } pub struct PixelTask { state: PixelState, shader: Option<Shader>, next_shader: Option<Shader>, shaders: Vector<Shader>, contexts: [Context; crate::HEX_UNITS], effect: Option<Effect>, effects: Vector<Effect>, driver: WS2812Driver<LEDS>, target: uNano, day_target: uNano, day_processed: uNano, cycles: u64, randomize_target: uNano, ready: bool, color_buffer: [Color; crate::HEX_UNITS], transition_start: uNano, transition_offset: uNano, cycle_offset: uNano, } impl PixelTask { pub fn new() -> Self { return PixelTask { state: PixelState::Loading, target: 0, day_target: 0, randomize_target: 0, day_processed: 0, transition_start: 0, transition_offset: 0, cycle_offset: 0, cycles: 0, ready: false, shader: None, effect: None, next_shader: None, shaders: initialize_shaders(), effects: initialize_effects(), driver: WS2812Driver::<LEDS>::new( 18, ), color_buffer: [Color::blank(); crate::HEX_UNITS], contexts: [Context::empty(); crate::HEX_UNITS], }; } #[allow(dead_code)] fn find_effect(&self, name: &'static [u8]) -> Option<Effect> { for effect in self.effects.into_iter() { if effect.name == name { return Some(effect); } } return None; } fn find_shader(&self, name: &Str) -> Option<Shader> { for shader in self.shaders.into_iter() { let mut shader_name = Str::new(); shader_name.append(shader.name); if name.contains(&shader_name) { shader_name.drop(); return Some(shader); } shader_name.drop(); } return None; } fn cycle_next_shader(&mut self) { let idx = (self.cycles / 3) as usize % self.shaders.size(); let shader = self.shaders.get(idx).unwrap(); self.transition_to(shader); } fn get_next_shader(&self) -> Shader {
fn get_next_effect(&self, shader: &Shader) -> Effect { let idx = rand() % self.effects.size() as u64; let next_effect = self.effects.get(idx as usize).unwrap(); if next_effect.disabled || next_effect.max_color_segments.unwrap_or(usize::MAX) < shader.total_segments || next_effect.min_hex_units.unwrap_or(0) > crate::HEX_UNITS { return self.get_next_effect(shader); } else { return next_effect; } } pub fn init(&mut self) { self.driver.init(); for node_id in 0 .. crate::HEX_UNITS { self.contexts[node_id].node_id = node_id as uNano; self.contexts[node_id].total_nodes = crate::HEX_UNITS as uNano; self.contexts[node_id].initialized = false; } self.day_target = nanos() + (S_TO_NANO * 60 * 30); self.shader = self.find_shader(&str!(b"Medbay")); self.effect = Some(self.find_effect(b"Randomized").unwrap()); } pub fn transition_to(&mut self, next_shader: Shader) { self.next_shader = Some(next_shader); for node_id in 0 .. crate::HEX_UNITS { self.contexts[node_id].initialized = false; self.contexts[node_id].node_id = node_id as uNano; } self.effect = Some(self.get_next_effect(&next_shader)); self.transition_start = nanos(); self.transition_offset = 0; self.state = PixelState::Transitioning; } pub fn randomize(&mut self) { self.randomize_target = nanos() + get_tranasition_delay(); self.transition_to(self.get_next_shader()); } /* This method will reset variables if the world uptime counter overflows. */ pub fn overflow_watch(&mut self) { let now = nanos(); if self.transition_offset > 0 && self.transition_start > 0 && self.randomize_target > 0 && ( now < self.transition_offset || now < self.transition_start ) { self.randomize(); } } pub fn system_loop(&mut self) { let time = nanos() - self.transition_offset; let cycle_time = (time - self.cycle_offset) / teensycore::MS_TO_NANO; let elapsed_ms = time / teensycore::MS_TO_NANO; let mut should_cycle = false; if time > self.target { let shader = self.shader.as_mut().unwrap(); let effect = self.effect.as_mut().unwrap(); match self.state { PixelState::Transitioning => { if time > (self.transition_start + TRANSITION_TIME * MS_TO_NANO) { self.state = PixelState::MainSequence; self.shader = self.next_shader; self.transition_offset = self.transition_start; } else { for node_id in 0 .. crate::HEX_UNITS { let mut ctx = self.contexts[node_id]; let next_shader = self.next_shader.as_mut().unwrap(); let transition_time_elapsed = (time - self.transition_start) / MS_TO_NANO; let (effect_time, next_context) = effect.process(&mut ctx, transition_time_elapsed); let time_t = ((effect_time as f64 / 100.0) * next_shader.total_time as f64) as uNano; let next_color = next_shader.get_color(time_t); self.contexts[node_id] = next_context; let color = rgb( interpolate(self.color_buffer[node_id].r as u32, next_color.r as u32, transition_time_elapsed, TRANSITION_TIME) as u8, interpolate(self.color_buffer[node_id].g as u32, next_color.g as u32, transition_time_elapsed, TRANSITION_TIME) as u8, interpolate(self.color_buffer[node_id].b as u32, next_color.b as u32, transition_time_elapsed, TRANSITION_TIME) as u8, ).as_hex(); for pixel_id in 0 .. LEDS_PER_UNIT { self.driver.set_color(node_id * LEDS_PER_UNIT + pixel_id, color); } } } }, PixelState::MainSequence | PixelState::Loading => { if cycle_time > effect.total_time { self.cycles += 1; self.cycle_offset = nanos() - self.transition_offset; if crate::CYCLE_MODE && self.cycles % 3 == 0 { should_cycle = true; } } for node_id in 0 .. crate::HEX_UNITS { let mut ctx = self.contexts[node_id]; let (effect_time, next_context) = effect.process(&mut ctx, elapsed_ms); let time_t = (( effect_time as f64 / 100.0) * shader.total_time as f64) as uNano; self.color_buffer[node_id] = shader.get_color(time_t); let color = self.color_buffer[node_id].as_hex(); self.contexts[node_id] = next_context; for pixel_id in 0 .. LEDS_PER_UNIT { self.driver.set_color(node_id * LEDS_PER_UNIT + pixel_id, color); } } }, } match self.state { PixelState::MainSequence => { if crate::USE_WIFI && nanos() > self.day_target { let datetime = DateTime::now(); if self.day_processed != datetime.days && datetime.hour >= 6 { self.day_processed = datetime.days; self.randomize(); } self.day_target = nanos() + S_TO_NANO; } else if nanos() > self.randomize_target { self.randomize(); } }, _ => {}, } self.driver.flush(); } if should_cycle { self.cycle_next_shader(); } self.overflow_watch(); } pub fn ready(&mut self) { if !self.ready { self.ready = true; self.randomize(); } } }
if crate::USE_WIFI { let appropriate_shader = get_shader_configs().get_shader(crate::get_world_time()); match self.find_shader(&appropriate_shader) { None => return self.shaders.get(0).unwrap(), Some(shader) => { if shader.disabled { return self.get_next_shader(); } return shader; } } } else { let idx = rand() % self.shaders.size() as u64; let next_shader = self.shaders.get(idx as usize).unwrap(); if next_shader.wifi_only || next_shader.disabled { return self.get_next_shader(); } else { return next_shader; } } }
function_block-function_prefix_line
[ { "content": "pub fn parse_http_request(rx_buffer: &Str, header: &mut Str, content: &mut Str) -> bool {\n\n // Ensure buffers are setup\n\n header.clear();\n\n content.clear();\n\n\n\n debug_str(b\"HELLO YES THIS IS DOG\");\n\n \n\n let mut content_length_cmp = str!(b\"Content-Length: \");\n\n let mut ipd = str!(b\"+IPD,\");\n\n let mut colon = str!(b\":\");\n\n let mut crnl = str!(b\"\\r\\n\");\n\n \n\n\n\n let mut content_length: Option<u64> = None;\n\n let mut state = ParserState::LookForStart;\n\n let mut parsed_packet = parse_response_payload(&rx_buffer);\n\n\n\n // Debug each line of parsed data.\n\n // Search for the content length\n\n // Then search for the content start\n", "file_path": "src/http/parser.rs", "rank": 0, "score": 177336.26559761917 }, { "content": "fn arr_contains(arr: Vector<u8>, target: &Str) -> bool {\n\n if arr.size() < target.len() {\n\n return false;\n\n }\n\n\n\n for idx in 0 .. target.len() {\n\n let c = arr.get(idx);\n\n if c.is_some() && c.unwrap() != target.char_at(idx).unwrap() {\n\n return false;\n\n }\n\n }\n\n\n\n return true;\n\n}", "file_path": "src/http/parser.rs", "rank": 1, "score": 174267.66661054525 }, { "content": "pub fn rgb(r: u8, g: u8, b: u8) -> Color {\n\n return Color {\n\n r: r,\n\n g: g,\n\n b: b,\n\n };\n\n}\n\n\n", "file_path": "src/pixel_engine/color.rs", "rank": 2, "score": 165378.14482201554 }, { "content": "fn ready(gate: &mut Gate) -> bool { return rx_contains(gate, unsafe { &READY }, true); }\n", "file_path": "src/wifi_task.rs", "rank": 3, "score": 143772.40364581833 }, { "content": "pub fn get_shader_configs() -> &'static ShaderConfigList {\n\n return unsafe { &SHADER_CONFIGS };\n\n}", "file_path": "src/lib.rs", "rank": 4, "score": 140574.92519320745 }, { "content": "/// Write content over TCP/UDP\n\npub fn esp8266_write(device: SerioDevice, content: &Str, id: Option<u8>) {\n\n serial_write(device, b\"AT+CIPSEND=\");\n\n\n\n match id {\n\n None => {\n\n\n\n },\n\n Some(con_id) => {\n\n serial_write_str(device, &itoa(con_id as u64));\n\n serial_write(device, b\",\");\n\n }\n\n }\n\n\n\n serial_write_str(device, &itoa(content.len() as u64));\n\n serial_write(device, b\"\\r\\n\");\n\n // TODO: would be great to not block at all...\n\n esp8266_block_until(device, b\"OK\", S_TO_NANO);\n\n serial_write_str(device, &content);\n\n serial_write(device, b\"\\r\\n\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 5, "score": 138249.1412639714 }, { "content": "fn rx_contains(gate: &mut Gate, cond: &Option<Str>, clear: bool) -> bool {\n\n match unsafe { &ERROR } {\n\n // I know you think this logic is wrong, but it's not\n\n None => {\n\n return false;\n\n },\n\n Some(target) => {\n\n if serial_read(DEVICE).contains(&target) {\n\n gate.reset();\n\n serial_read(DEVICE).clear();\n\n return false;\n\n }\n\n }\n\n }\n\n\n\n match unsafe { &FAIL } {\n\n // I know you think this logic is wrong, but it's not\n\n None => {\n\n return false;\n\n },\n", "file_path": "src/wifi_task.rs", "rank": 6, "score": 136188.1951213416 }, { "content": "pub fn rgb_to_hsv(r: u8, g: u8, b: u8) -> (f32, f32, f32) {\n\n let rgb_prime = (r as f32 / 255.0, g as f32 / 255.0, b as f32 / 255.0);\n\n let c_max = max(max(rgb_prime.0, rgb_prime.1), rgb_prime.2);\n\n let c_min = min(min(rgb_prime.0, rgb_prime.1), rgb_prime.2);\n\n let delta = c_max - c_min;\n\n\n\n let v = c_max;\n\n let mut h;\n\n let s = match c_max == 0.0 || delta == 0.0{\n\n true => 0.0,\n\n false => delta / c_max,\n\n };\n\n\n\n // assert_eq!(c_min, 12.0);\n\n\n\n if delta == 0.0 {\n\n h = 0.0;\n\n } else if c_max == rgb_prime.0 {\n\n // R is dominant\n\n h = 60.0 * (((rgb_prime.1 - rgb_prime.2) / delta) % 6.0);\n", "file_path": "src/pixel_engine/color.rs", "rank": 7, "score": 130600.82076578609 }, { "content": "pub fn initialize_effects<'a>() -> Vector<Effect> {\n\n return vector!(\n\n\n\n Effect::new(b\"Distributed\")\n\n .with_initializer(|_, ctx| {\n\n let mut next_ctx = ctx.clone();\n\n let step = TIME / ctx.total_nodes;\n\n next_ctx.offset = (ctx.node_id * step) as uNano;\n\n return next_ctx;\n\n })\n\n .transition_to(100, TIME)\n\n .build(),\n\n\n\n Effect::new(b\"Explosion\")\n\n .with_min_hex_units(10)\n\n .with_initializer(|_, ctx| {\n\n let mut next_ctx = ctx.clone();\n\n let midpoint = ctx.total_nodes / 2;\n\n let id = match ctx.node_id > midpoint {\n\n true => (ctx.total_nodes - ctx.node_id) + midpoint,\n", "file_path": "src/effects.rs", "rank": 8, "score": 128405.85138216015 }, { "content": "pub fn initialize_shaders<'a>() -> Vector<Shader> {\n\n return vector!(\n\n // Shader::new(b\"Lunar\")\n\n // .with_color(rgb(199, 7, 2))\n\n // .transition_to(rgb(255, 7, 2), TIME)\n\n // .transition_to(rgb(255, 84, 2), TIME)\n\n // .transition_to(rgb(199, 7, 2), TIME)\n\n // .build(),\n\n\n\n // Shader::new(b\"Independence\")\n\n // .as_wifi_only()\n\n // .with_color(rgb(255, 0, 0))\n\n // .transition_to(rgb(255,255,255), TIME)\n\n // .transition_to(rgb(0, 0, 255), TIME)\n\n // .transition_to(rgb(255, 0, 0), TIME)\n\n // .build(),\n\n\n\n Shader::new(b\"Medbay\")\n\n .with_color(rgb(0, 255, 0))\n\n .transition_to(rgb(0,0,255), TIME)\n", "file_path": "src/shaders.rs", "rank": 9, "score": 127799.56384546921 }, { "content": "/// Esetablish a TCP connection\n\npub fn esp8266_open_tcp(device: SerioDevice, domain: &Str, port: u32, id: Option<u8>) {\n\n match id {\n\n None => {\n\n serial_write(device, b\"AT+CIPSTART=\\\"TCP\\\",\\\"\");\n\n },\n\n Some(con_id) => {\n\n serial_write(device, b\"AT+CIPSTART=\");\n\n serial_write_str(device, &itoa(con_id as u64));\n\n serial_write(device, b\",\\\"\");\n\n }\n\n }\n\n serial_write_str(device, &domain);\n\n serial_write(device, b\"\\\",\");\n\n serial_write_str(device, &itoa(port as u64));\n\n serial_write(device, b\"\\r\\n\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 10, "score": 125203.51360326397 }, { "content": "pub fn esp8266_block_until(device: SerioDevice, command: &[u8], timeout: uNano) {\n\n let threshold = nanos() + timeout;\n\n let cmd = str!(command);\n\n\n\n loop {\n\n let buf = serial_read(device);\n\n if buf.contains(&cmd) {\n\n buf.clear();\n\n wait_ns(MS_TO_NANO * 14);\n\n return;\n\n } else if nanos() > threshold {\n\n return;\n\n }\n\n wait_ns(MS_TO_NANO * 7);\n\n }\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 11, "score": 124908.31158602188 }, { "content": "/// Connect to a wifi access point.\n\npub fn esp8266_connect_to_wifi(device: SerioDevice, ssid: &Str, pwd: &Str) {\n\n serial_write(device, b\"AT+CWJAP=\\\"\");\n\n serial_write_str(device, &ssid);\n\n serial_write(device, b\"\\\",\\\"\");\n\n serial_write_str(device, &pwd);\n\n serial_write(device, b\"\\\"\\r\\n\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 12, "score": 123649.16461469256 }, { "content": "/// This method takes a serial blob of data and returns\n\n/// All content after IPD+,[content_length]: and is smart\n\n/// enough to handle chunking.\n\npub fn parse_response_payload(buf: &Str) -> Str {\n\n let mut result = Str::new();\n\n let mut ipd = str!(b\"+IPD,\");\n\n let mut colon = str!(b\":\");\n\n\n\n\n\n let mut state: ParserState = ParserState::LookForStart;\n\n let mut temp = Str::new();\n\n let mut ipd_buf = Vector::<u8>::new();\n\n let mut packet_length: Option<usize> = None;\n\n\n\n // Scan until ipd is found. At which point,\n\n // read until packet_length is known at which point\n\n // aggregate packet details. Repeat until end of stream.\n\n for char in buf.into_iter() {\n\n ipd_buf.enqueue(char);\n\n if ipd_buf.size() > 5 {\n\n ipd_buf.dequeue();\n\n }\n\n\n", "file_path": "src/http/parser.rs", "rank": 13, "score": 122042.86766293789 }, { "content": "/// Send a raw command to the esp8266\n\npub fn esp8266_raw(device: SerioDevice, command: &[u8]) {\n\n serial_write(device, command);\n\n serial_write(device, b\"\\r\\n\");\n\n}", "file_path": "src/drivers/esp8266.rs", "rank": 14, "score": 121253.82876183513 }, { "content": "/// Given a domain, this command will return the ip address\n\npub fn esp8266_dns_lookup(device: SerioDevice, domain: &Str) {\n\n serial_write(device, b\"AT+CIPDOMAIN=\\\"\");\n\n serial_write_str(device, &domain);\n\n serial_write(device, b\"\\\"\\r\\n\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 15, "score": 118184.23706502357 }, { "content": "/// Configures the ESP8266 to either send back\n\n/// received commands, or not.\n\npub fn esp8266_configure_echo(device: SerioDevice, enabled: bool) {\n\n match enabled {\n\n true => {\n\n esp8266_raw(device, b\"ATE1\");\n\n },\n\n false => {\n\n esp8266_raw(device, b\"ATE0\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 16, "score": 118181.06421198574 }, { "content": "/// This muxes the device to either allow or disallow multiple connections.\n\n/// If multiple connections are allowed, you'll need to be cognizant\n\n/// of that when interfacing with some of the other commands.\n\npub fn esp8266_multiple_connections(device: SerioDevice, allow: bool) {\n\n match allow {\n\n true => {\n\n esp8266_raw(device, b\"AT+CIPMUX=1\");\n\n },\n\n false => {\n\n // This may require a reboot to work as intended\n\n esp8266_raw(device, b\"AT+CIPMUX=0\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 17, "score": 118181.06421198574 }, { "content": "pub fn esp8266_set_ip(device: SerioDevice, ip: Str) {\n\n serial_write(device, b\"AT+CIPAP=\\\"\");\n\n serial_write_str(device, &ip);\n\n serial_write(device, b\"\\\"\\r\\n\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 18, "score": 118181.06421198577 }, { "content": "fn ok(gate: &mut Gate) -> bool { return rx_contains(gate, unsafe { &OK }, true); }\n", "file_path": "src/wifi_task.rs", "rank": 19, "score": 116655.36601562475 }, { "content": "/// Set whether the device will automatically attempt to reconnect\n\n/// to the AP on boot.\n\npub fn esp8266_auto_connect(device: SerioDevice, auto_connect: bool) {\n\n match auto_connect {\n\n true => {\n\n esp8266_raw(device, b\"AT+CWAUTOCONN=1\");\n\n },\n\n false => {\n\n esp8266_raw(device, b\"AT+CWAUTOCONN=0\");\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 20, "score": 115502.90779387829 }, { "content": "pub fn set_transition_delay(nanos: uNano) {\n\n unsafe {\n\n TRANSITION_DELAY_NANOS = nanos;\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 21, "score": 115323.3132030065 }, { "content": "/// Close active TCP connection\n\npub fn esp8266_close_tcp(device: SerioDevice, id: Option<u8>) {\n\n serial_write(device, b\"AT+CIPCLOSE\");\n\n match id {\n\n None => {\n\n serial_write(device, b\"\\r\\n\");\n\n },\n\n Some(con_id) => {\n\n serial_write(device, b\"=\");\n\n serial_write_str(device, &itoa(con_id as u64));\n\n serial_write(device, b\"\\r\\n\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 22, "score": 113119.13051834042 }, { "content": "fn send_ok(gate: &mut Gate) -> bool { return rx_contains(gate, unsafe { &SEND_OK }, false); }\n", "file_path": "src/wifi_task.rs", "rank": 23, "score": 112309.60417915319 }, { "content": "pub fn hsv(h: f32, s: f32, v: f32) -> Color {\n\n let theta = h % 360.0; \n\n let c = v * s;\n\n let x = c * (1.0 - abs(((theta / 60.0) % 2.0) - 1.0));\n\n let m = v - c;\n\n \n\n let rgb_prime;\n\n\n\n if theta >= 0.0 && theta < 60.0 {\n\n rgb_prime = (c, x, 0.0);\n\n } else if theta >= 60.0 && theta < 120.0 {\n\n rgb_prime = (x, c, 0.0);\n\n } else if theta >= 120.0 && theta < 180.0 {\n\n rgb_prime = (0.0, c, x);\n\n } else if theta >= 180.0 && theta < 240.0 {\n\n rgb_prime = (0.0, x, c);\n\n } else if theta >= 240.0 && theta < 300.0 {\n\n rgb_prime = (x, 0.0, c);\n\n } else {\n\n rgb_prime = (c, 0.0, x);\n", "file_path": "src/pixel_engine/color.rs", "rank": 24, "score": 105978.07849593295 }, { "content": "pub fn esp8266_dhcp_mode(device: SerioDevice, mode: WifiMode, dhcp_enabled: bool) {\n\n // serial_write(device, b\"AT+CWDHCP=1\");\n\n serial_write_str(device, &itoa((mode as u64) - 1));\n\n serial_write(device, b\",\");\n\n match dhcp_enabled {\n\n true => {\n\n serial_write(device, b\"0\\r\\n\");\n\n },\n\n false => {\n\n serial_write(device, b\"1\\r\\n\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 25, "score": 103641.74945762752 }, { "content": "/// Returns the current unix epoch relative to seconds\n\npub fn get_world_time() -> uNano {\n\n return unsafe {\n\n WORLD_TIME_S + (nanos() / S_TO_NANO) - UPTIME_WORLDTIME_OFFSET_S\n\n };\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 26, "score": 97677.36786311744 }, { "content": "pub fn get_utc_offset() -> uNano {\n\n return unsafe {\n\n UTC_OFFSET\n\n };\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 27, "score": 97674.11433225009 }, { "content": "pub fn get_tranasition_delay() -> uNano {\n\n return unsafe {\n\n TRANSITION_DELAY_NANOS\n\n };\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 28, "score": 97674.11433225009 }, { "content": "pub fn set_shader_configs(config_list: ShaderConfigList) {\n\n unsafe {\n\n SHADER_CONFIGS = ShaderConfigList {\n\n configs: config_list.configs.clone(),\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 29, "score": 97293.28805654119 }, { "content": "/// The AT command checks whether the system is\n\n/// in a healthy state.\n\npub fn esp8266_at(device: SerioDevice) {\n\n esp8266_raw(device, b\"AT\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 30, "score": 95692.91268924772 }, { "content": "/// Sends the AT+RST reset command, causing\n\n/// the system to do a software-level\n\n/// reboot.\n\npub fn esp8266_reset(device: SerioDevice) {\n\n esp8266_raw(device, b\"AT+RST\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 31, "score": 93000.56786351737 }, { "content": "pub fn esp8266_version(device: SerioDevice) {\n\n esp8266_raw(device, b\"AT+GMR\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 32, "score": 93000.56786351737 }, { "content": "fn parse_config(serial_content: &Str) -> ShaderConfigList {\n\n // Parse the http headers\n\n let mut time_cmd = str!(b\"time\");\n\n let mut rule_cmd = str!(b\"rule\");\n\n let mut delay_cmd = str!(b\"delay\");\n\n\n\n let mut header = Str::new();\n\n let mut content = Str::new();\n\n let mut configs = Vector::new();\n\n\n\n if parse_http_request(serial_content, &mut header, &mut content) {\n\n let mut lines = content.split(b'\\n');\n\n for line in lines.into_iter() {\n\n let mut paths = line.split(b';');\n\n match paths.get(0) {\n\n None => {},\n\n Some(command) => {\n\n\n\n if command.contains(&time_cmd) && paths.size() > 1 {\n\n // Parse world time\n", "file_path": "src/wifi_task.rs", "rank": 33, "score": 92645.60280868586 }, { "content": "pub fn set_utc_offset(offset: uNano) {\n\n unsafe {\n\n UTC_OFFSET = offset;\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 34, "score": 91724.64423130784 }, { "content": "/// Sets the current unix epoch in seconds\n\npub fn set_world_time(time_s: uNano) {\n\n unsafe {\n\n UPTIME_WORLDTIME_OFFSET_S = nanos() / S_TO_NANO;\n\n WORLD_TIME_S = time_s;\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 35, "score": 91724.64423130784 }, { "content": "pub fn esp8266_list_wifi(device: SerioDevice) {\n\n esp8266_raw(device, b\"AT+CWLAP\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 36, "score": 90517.49913257705 }, { "content": "/// Get the devices ip address\n\npub fn esp8266_read_ip(device: SerioDevice) {\n\n esp8266_raw(device, b\"AT+CIFSR\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 37, "score": 90517.49913257705 }, { "content": "/// Disconnect from any currently active access point\n\npub fn esp8266_disconnect_from_wifi(device: SerioDevice) {\n\n esp8266_raw(device, b\"AT+CWQAP\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 38, "score": 90517.49913257705 }, { "content": "fn nano_rand() -> uNano {\n\n return rand() as uNano;\n\n}\n\n\n", "file_path": "src/effects.rs", "rank": 39, "score": 89765.15342492536 }, { "content": "#[derive(Copy, Clone)]\n\nstruct ShaderStep {\n\n time: uNano,\n\n color: Color,\n\n next: Option<*mut ShaderStep>,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Shader {\n\n pub name: &'static [u8],\n\n sealed: bool,\n\n color: Color,\n\n root: Option<*mut ShaderStep>,\n\n pub total_segments: usize,\n\n pub total_time: uNano,\n\n pub wifi_only: bool,\n\n pub disabled: bool,\n\n}\n\n\n\nimpl Shader {\n\n\n", "file_path": "src/pixel_engine/shader.rs", "rank": 40, "score": 84165.35242898209 }, { "content": "pub fn esp8266_create_server(device: SerioDevice, port: u32) {\n\n serial_write(device, b\"AT+CIPSERVER=1,\");\n\n serial_write_str(device, &itoa(port as u64));\n\n serial_write(device, b\"\\r\\n\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 41, "score": 82238.72693228025 }, { "content": "/// Configure the ESP8266 to either be a client,\n\n/// a host, or both.\n\npub fn esp8266_wifi_mode(device: SerioDevice, mode: WifiMode) {\n\n serial_write(device, b\"AT+CWMODE=\");\n\n serial_write(device, &[int_to_hex(mode as u8)]);\n\n serial_write(device, b\"\\r\\n\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 42, "score": 80249.40282389756 }, { "content": "/// Set the server timeout in seconds\n\npub fn esp8266_set_server_timeout(device: SerioDevice, timeout: u32) {\n\n serial_write(device, b\"AT+CIPSTO=\");\n\n serial_write_str(device, &itoa(timeout as u64));\n\n serial_write(device, b\"\\r\\n\");\n\n}\n\n\n", "file_path": "src/drivers/esp8266.rs", "rank": 43, "score": 80249.40282389756 }, { "content": "#[derive(Clone, Copy)]\n\nstruct Node {\n\n pub green: u8,\n\n pub red: u8,\n\n pub blue: u8,\n\n}\n\n\n\nimpl Node {\n\n pub const fn new(red: u8, green: u8, blue: u8) -> Node {\n\n return Node {\n\n red: red,\n\n green: green,\n\n blue: blue,\n\n };\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct WS2812Driver<const SIZE: usize> {\n\n nodes: [Node; SIZE],\n\n pin: usize,\n", "file_path": "src/drivers/ws2812.rs", "rank": 44, "score": 80194.72551488769 }, { "content": "enum ParserState {\n\n LookForStart = 0x0,\n\n LookForLength = 0x1,\n\n LookForContent = 0x2,\n\n ReadContent = 0x3,\n\n Done = 0x4,\n\n}\n\n\n\n\n", "file_path": "src/http/parser.rs", "rank": 45, "score": 78848.71303423076 }, { "content": "pub fn interpolate(from: u32, to: u32, current_time: uNano, duration: uNano) -> u32 {\n\n let x0 = 0f64;\n\n let y0 = min(from, to) as f64;\n\n let x1 = duration as f64;\n\n let y1 = max(to, from) as f64;\n\n let x = current_time as f64;\n\n let delta = (x * ((y1 - y0)/(x1 - x0))) as u32;\n\n\n\n if from > to {\n\n return from - delta;\n\n } else {\n\n return from + delta;\n\n }\n\n}", "file_path": "src/pixel_engine/math.rs", "rank": 47, "score": 72549.69317805926 }, { "content": "fn abs(val: f32) -> f32 {\n\n if val < 0.0 {\n\n return val * -1.0;\n\n } else {\n\n return val;\n\n }\n\n}\n\n\n", "file_path": "src/pixel_engine/color.rs", "rank": 48, "score": 58968.51430401384 }, { "content": " false => midpoint + ctx.node_id,\n\n };\n\n next_ctx.offset = id * (TIME / midpoint / 4);\n\n return next_ctx;\n\n })\n\n .transition_to(100, TIME / 2)\n\n .transition_to(100, TIME / 2)\n\n .build(),\n\n\n\n Effect::new(b\"Randomized\")\n\n .with_max_color_segments(3)\n\n .with_initializer(|_, ctx| {\n\n let mut next_ctx = ctx.clone();\n\n next_ctx.offset = nano_rand() % TIME;\n\n return next_ctx;\n\n })\n\n .transition_to(100, TIME)\n\n .build(),\n\n\n\n // Effect::new(b\"Alternate\")\n", "file_path": "src/effects.rs", "rank": 49, "score": 30961.218967266195 }, { "content": " next_ctx.offset = fx.regs[1] as uNano % TIME;\n\n return next_ctx;\n\n })\n\n .transition_to(100, TIME)\n\n .transition_to(0, TIME)\n\n .transition_to(100, TIME)\n\n .build(),\n\n \n\n Effect::new(b\"Surprise\")\n\n .with_max_color_segments(3)\n\n .with_initializer(|_, ctx| {\n\n let mut next_ctx = ctx.clone();\n\n next_ctx.offset = nano_rand() % TIME / 2;\n\n return next_ctx;\n\n })\n\n .transition_to(50, TIME / 2)\n\n .transition_to(75, TIME / 3)\n\n .transition_to(100, TIME / 4)\n\n .build()\n\n );\n\n}", "file_path": "src/effects.rs", "rank": 50, "score": 30960.44393742952 }, { "content": "use crate::pixel_engine::effect::Effect;\n\nuse teensycore::clock::uNano;\n\nuse teensycore::{system::vector::*, vector, math::rand};\n\n\n\nconst TIME: uNano = 4200 * crate::WORLD_MUTIPLIER;\n\n\n", "file_path": "src/effects.rs", "rank": 51, "score": 30960.15983266789 }, { "content": " // .with_initializer(|_, ctx| {\n\n // let mut next_ctx = ctx.clone();\n\n // if ctx.node_id % 2 == 0 {\n\n // next_ctx.offset = TIME / 2;\n\n // } else {\n\n // next_ctx.offset = 0;\n\n // }\n\n\n\n // return next_ctx;\n\n // })\n\n // .transition_to(100, TIME)\n\n // .build(),\n\n\n\n Effect::new(b\"Grouped\")\n\n .with_max_color_segments(3)\n\n .with_initializer(|fx, ctx| {\n\n let mut next_ctx = ctx.clone();\n\n if ctx.node_id == 0 || rand() % 2 == 0 {\n\n fx.regs[1] = nano_rand() as i32;\n\n }\n", "file_path": "src/effects.rs", "rank": 52, "score": 30959.10932600154 }, { "content": " // .build(),\n\n\n\n // Shader::new(b\"Xmas\")\n\n // .as_wifi_only()\n\n // .with_color(rgb(255, 0, 0))\n\n // .transition_to(rgb(0,255,0), TIME)\n\n // .transition_to(rgb(255,0,0), TIME)\n\n // .build(),\n\n\n\n // Shader::new(b\"Mars\")\n\n // .as_disabled()\n\n // .with_color(rgb(255, 0, 0))\n\n // .transition_to(rgb(232,64,0), TIME)\n\n // .transition_to(rgb(255,0,0), TIME)\n\n // .build(),\n\n\n\n // Shader::new(b\"Diwali\")\n\n // .as_wifi_only()\n\n // .with_color(rgb(255,0,0))\n\n // .transition_to(rgb(0, 161, 94), TIME)\n", "file_path": "src/shaders.rs", "rank": 53, "score": 30463.0752253886 }, { "content": " .build(),\n\n\n\n // Shader::new(b\"R2D2\")\n\n // .with_color(rgb(0, 0, 255))\n\n // // .transition_to(rgb(128, 128, 128), TIME/10)\n\n // // .transition_to(rgb(0, 0, 255), TIME)\n\n // .transition_to(rgb(255, 0, 0), TIME/5)\n\n // .transition_to(rgb(0, 0, 255), TIME)\n\n // .build(),\n\n\n\n Shader::new(b\"Valentines\")\n\n .with_color(rgb(255, 0, 0))\n\n .transition_to(rgb(50, 0, 255), TIME)\n\n .transition_to(rgb(255, 0, 0), TIME)\n\n .build(),\n\n\n\n\n\n Shader::new(b\"Shire\")\n\n .as_wifi_only()\n\n .with_color(rgb(0, 255, 0))\n", "file_path": "src/shaders.rs", "rank": 54, "score": 30463.048217641495 }, { "content": "\n\n Shader::new(b\"Jupiter\")\n\n .as_disabled()\n\n .with_color(rgb(49, 0, 51))\n\n .transition_to(rgb(225,35,0), TIME)\n\n .transition_to(rgb(255,0,0), TIME)\n\n .transition_to(rgb(49, 0, 51), TIME)\n\n .build(),\n\n\n\n \n\n Shader::new(b\"Neptune\")\n\n .with_color(rgb(0,0,188))\n\n .transition_to(rgb(0, 35, 194), TIME)\n\n .transition_to(rgb(0, 255, 183), TIME)\n\n .transition_to(rgb(60, 0, 255), TIME)\n\n .transition_to(rgb(0,0,188), TIME)\n\n .build(),\n\n\n\n Shader::new(b\"R2D2\")\n\n .with_color(rgb(0, 0, 255)) // Blue\n", "file_path": "src/shaders.rs", "rank": 55, "score": 30462.910128639407 }, { "content": "\n\n // Shader::new(b\"Pokemon\")\n\n // .as_wifi_only()\n\n // .with_color(rgb(255, 255, 0))\n\n // .transition_to(rgb(0, 176, 252), TIME)\n\n // .transition_to(rgb(255, 255, 0), TIME)\n\n // .build()\n\n\n\n\n\n // Shader::new(b\"DoctorWho\")\n\n // .as_wifi_only()\n\n // .with_color(rgb(0, 0, 255))\n\n // .transition_to(rgb(0, 0, 255), TIME)\n\n // .transition_to(rgb(255, 255, 255), TIME)\n\n // .transition_to(rgb(0, 0, 255), TIME)\n\n // .transition_to(rgb(0, 0, 255), TIME)\n\n // .build(),\n\n\n\n // Shader::new(b\"Pirate\")\n\n // .as_wifi_only()\n", "file_path": "src/shaders.rs", "rank": 56, "score": 30462.772861839854 }, { "content": "\n\n // Shader::new(b\"Birthday\")\n\n // .as_wifi_only()\n\n // .with_color(rgb(255, 0, 70))\n\n // // .transition_to(rgb(124, 142, 208), TIME)\n\n // .transition_to(rgb(255, 213, 0), TIME)\n\n // .transition_to(rgb(77, 255, 0), TIME)\n\n // .transition_to(rgb(255, 0, 70), TIME)\n\n // .build(),\n\n\n\n\n\n // Shader::new(b\"Pride\")\n\n // .with_color(rgb(255, 0, 0))\n\n // .transition_to(rgb(255, 60, 0), TIME)\n\n // .transition_to(rgb(255, 200, 0), TIME)\n\n // .transition_to(rgb(0, 255, 0), TIME)\n\n // .transition_to(rgb(0, 0, 255), TIME)\n\n // .transition_to(rgb(60, 0, 255), TIME)\n\n // .transition_to(rgb(255, 0, 0), TIME)\n\n // .build(),\n", "file_path": "src/shaders.rs", "rank": 57, "score": 30461.5864369416 }, { "content": " .transition_to(rgb(77, 255, 106), TIME)\n\n .transition_to(rgb(0, 255, 0), TIME)\n\n .build(),\n\n\n\n\n\n Shader::new(b\"Honeycomb\")\n\n .with_color(rgb(255, 180, 0)) // Orange\n\n .transition_to(rgb(255, 0, 0), TIME) // Red\n\n .transition_to(rgb(255, 255, 0), TIME)\n\n .transition_to(rgb(255, 180, 0), TIME) // Orange\n\n .build(),\n\n\n\n \n\n // Shader::new(b\"Halloween\")\n\n // .as_wifi_only() \n\n // .with_color(rgb(255, 64, 0))\n\n // .transition_to(rgb(255,0,0), TIME)\n\n // .transition_to(rgb(0,0,0), TIME)\n\n // .transition_to(rgb(0, 0, 0), TIME)\n\n // .transition_to(rgb(255, 64, 0), TIME)\n", "file_path": "src/shaders.rs", "rank": 58, "score": 30461.514311307168 }, { "content": "use crate::pixel_engine::color::*;\n\nuse crate::pixel_engine::shader::*;\n\nuse teensycore::{system::vector::*, vector};\n\nuse teensycore::clock::uNano;\n\n\n\nconst TIME: uNano = 1000 * crate::WORLD_MUTIPLIER;\n\n\n", "file_path": "src/shaders.rs", "rank": 59, "score": 30461.29635009593 }, { "content": " .transition_to(rgb(255, 0, 0), TIME/2) // Red\n\n // .transition_to(r gb(89, 100, 255), TIME) // Blue-white\n\n .transition_to(rgb(0, 0, 255), TIME) // Blue\n\n .build(),\n\n\n\n Shader::new(b\"RetroFuturistic\")\n\n .with_color(rgb(255, 0, 173))\n\n .transition_to(rgb(255, 0, 0), TIME)\n\n .transition_to(rgb(115, 6, 121), TIME)\n\n .transition_to(rgb(4, 110, 106), TIME)\n\n .transition_to(rgb(255, 255, 0), TIME)\n\n .transition_to(rgb(255, 0, 173), TIME)\n\n .build()\n\n\n\n // Shader::new(b\"Thanksgiving\")\n\n // .as_wifi_only()\n\n // .with_color(rgb(255, 98, 0))\n\n // .transition_to(rgb(255, 0, 0), TIME)\n\n // .transition_to(rgb(255, 98, 0), TIME)\n\n // .build(),\n", "file_path": "src/shaders.rs", "rank": 60, "score": 30461.112642653112 }, { "content": " // .with_color(rgb(255, 0, 0))\n\n // .transition_to(rgb(255, 255, 255), TIME)\n\n // .transition_to(rgb(255, 0, 0), TIME)\n\n // .build(),\n\n\n\n // Shader::new(b\"BattlestarGalactica\")\n\n // .as_wifi_only()\n\n // .with_color(rgb(255, 255, 255))\n\n // .transition_to(rgb(255, 0, 0), TIME)\n\n // .transition_to(rgb(200, 200, 200), TIME)\n\n // .transition_to(rgb(255, 0, 0), TIME)\n\n // .transition_to(rgb(255, 255, 255), TIME)\n\n // .build(),\n\n \n\n );\n\n}", "file_path": "src/shaders.rs", "rank": 61, "score": 30460.962018038037 }, { "content": " .transition_to(rgb(0, 255, 0), TIME)\n\n .build(),\n\n\n\n\n\n Shader::new(b\"Ghibli\")\n\n .with_color(rgb(0, 255, 0))\n\n .transition_to(rgb(255, 255, 0), TIME) \n\n .transition_to(rgb(0, 255, 0), TIME) \n\n .transition_to(rgb(0, 0, 255), TIME) \n\n .transition_to(rgb(0, 255, 0), TIME) \n\n .build(),\n\n\n\n\n\n Shader::new(b\"80SciFi\")\n\n .as_disabled()\n\n .with_color(rgb(0, 145, 255)) // Tron Light Blue\n\n .transition_to(rgb(106, 0, 255), TIME) // Jazzersize Purple\n\n .transition_to(rgb(255, 0, 204), TIME) // Pink\n\n .transition_to(rgb(255,255,0), TIME) // Yellow\n\n .transition_to(rgb(0, 145, 255), TIME) // Tron Light Blue\n", "file_path": "src/shaders.rs", "rank": 62, "score": 30460.88585114025 }, { "content": " // .transition_to(rgb(252, 210, 0), TIME)\n\n // .transition_to(rgb(255,0,0), TIME)\n\n // .transition_to(rgb(255,0,0), TIME)\n\n // .build(),\n\n \n\n Shader::new(b\"Dinosaur\")\n\n .with_color(rgb(0, 0, 255))\n\n .transition_to(rgb(0, 255, 0), TIME)\n\n .transition_to(rgb(64, 0, 148), TIME)\n\n .transition_to(rgb(0, 0, 255), TIME)\n\n .set_segment_count(2) // Override segment count because colors are dupicated\n\n .build(),\n\n \n\n Shader::new(b\"Rainbow\")\n\n .with_color(rgb(255,0, 0))\n\n .transition_to(rgb(0,0,255), TIME)\n\n .transition_to(rgb(0,255,0), TIME)\n\n .transition_to(rgb(255,0,0), TIME)\n\n .set_segment_count(10) // This looks bad in randomized mode, so make sure it never goes there. \n\n .build(),\n", "file_path": "src/shaders.rs", "rank": 63, "score": 30460.874617907935 }, { "content": "pub mod ws2812;\n\npub mod max31820;\n\npub mod esp8266;", "file_path": "src/drivers.rs", "rank": 64, "score": 29908.920238757626 }, { "content": "use teensycore::*;\n\nuse teensycore::clock::*;\n\nuse teensycore::debug::*;\n\nuse teensycore::phys::pins::*;\n\n\n\nconst fn micros(time: uNano) -> uNano {\n\n return MICRO_TO_NANO * time;\n\n}\n\n\n\npub struct Max31820Driver {\n\n pin: usize,\n\n}\n\n\n\nimpl Max31820Driver {\n\n pub fn new(data_pin: usize) -> Self {\n\n pin_mux_config(data_pin, Alt::Alt5);\n\n return Max31820Driver {\n\n pin: data_pin,\n\n };\n\n }\n", "file_path": "src/drivers/max31820.rs", "rank": 65, "score": 28358.19316869953 }, { "content": "//! This module is a driver for the ESP8266 WiFi\n\n//! peripheral. It includes wrappers for most\n\n//! of the AT+ instruction set.\n\n\n\nuse teensycore::*;\n\nuse teensycore::serio::*;\n\nuse teensycore::clock::*;\n\nuse teensycore::math::*;\n\nuse teensycore::system::str::*;\n\n\n\npub enum WifiMode {\n\n Client = 0x1,\n\n Host = 0x2,\n\n All = 0x3,\n\n}\n\n\n\n/// The AT command checks whether the system is\n\n/// in a healthy state.\n", "file_path": "src/drivers/esp8266.rs", "rank": 66, "score": 28353.49270387701 }, { "content": " iteration: usize,\n\n}\n\n\n\nimpl<const SIZE: usize> WS2812Driver<SIZE> {\n\n pub const fn new(pin: usize) -> WS2812Driver::<SIZE> {\n\n return WS2812Driver::<SIZE> {\n\n nodes: [Node::new(0, 0, 0); SIZE],\n\n pin: pin,\n\n iteration: 0,\n\n }\n\n }\n\n\n\n pub fn init(&self) {\n\n // Configure the pin\n\n pin_mode(self.pin, Mode::Output);\n\n pin_pad_config(self.pin, PadConfig {\n\n hysterisis: false, // HYS\n\n resistance: PullUpDown::PullDown100k, // PUS\n\n pull_keep: PullKeep::Pull, // PUE\n\n pull_keep_en: false, // PKE\n", "file_path": "src/drivers/ws2812.rs", "rank": 67, "score": 28350.159662141836 }, { "content": " let mut crc = 0u64;\n\n for bit in 0 .. 8 {\n\n crc |= (self.read_bit() as u64) << bit;\n\n }\n\n\n\n debug_u64(crc, b\"crc\");\n\n return Some(rom_code);\n\n }\n\n\n\n return None;\n\n }\n\n\n\n #[allow(dead_code)]\n\n fn cmd_match_rom(&self, rom: u64) {\n\n // Tell the bus we're about to address a specific node\n\n self.send_command(0x55);\n\n let mut bit_index: usize = 0;\n\n while bit_index < 64 {\n\n let bit = rom & (0x1 << bit_index);\n\n if bit > 0 {\n", "file_path": "src/drivers/max31820.rs", "rank": 68, "score": 28348.172494806637 }, { "content": " open_drain: false, // ODE\n\n speed: PinSpeed::Max200MHz, // SPEED\n\n drive_strength: DriveStrength::MaxDiv7, // DSE\n\n fast_slew_rate: true, // SRE\n\n });\n\n\n\n pin_out(self.pin, Power::Low);\n\n }\n\n\n\n pub fn set_color(&mut self, index: usize, rgb: u32) {\n\n // Don't process requests out of bounds\n\n if index >= SIZE {\n\n return;\n\n }\n\n\n\n self.nodes[index].red = ((rgb & 0xFF0000) >> 16) as u8;\n\n self.nodes[index].green = ((rgb & 0x00FF00) >> 8) as u8;\n\n self.nodes[index].blue = ((rgb & 0x0000FF) >> 0) as u8;\n\n }\n\n\n", "file_path": "src/drivers/ws2812.rs", "rank": 69, "score": 28347.17971255179 }, { "content": "\n\n pub fn iterate(&mut self) {\n\n self.iteration += 1;\n\n }\n\n\n\n pub fn flush(&self) {\n\n disable_interrupts();\n\n let mut node_index = 0;\n\n let mut bit_index: i32;\n\n \n\n for _ in 0 ..= 1 {\n\n while node_index < SIZE {\n\n let node = self.nodes[node_index];\n\n let color: u32 = \n\n ((node.green as u32) << 16) |\n\n ((node.red as u32) << 8) |\n\n (node.blue as u32); \n\n\n\n\n\n // Now we need to process each bit\n", "file_path": "src/drivers/ws2812.rs", "rank": 70, "score": 28346.562017983415 }, { "content": " // Rest period\n\n wait_ns(micros(1));\n\n\n\n // Read 64 bits \n\n let mut family_code = 0;\n\n for bit in 0 .. 8 {\n\n let bit_val = self.read_bit();\n\n debug_u64(bit_val as u64, b\"bit\");\n\n family_code |= (bit_val as u64) << bit;\n\n }\n\n\n\n debug_u64(family_code, b\"family code\");\n\n\n\n let mut rom_code = 0; \n\n for bit in 0 .. 48 {\n\n rom_code |= (self.read_bit() as u64) << bit;\n\n }\n\n\n\n debug_u64(rom_code, b\"rom code\");\n\n\n", "file_path": "src/drivers/max31820.rs", "rank": 71, "score": 28345.257942857537 }, { "content": " let byte_msb = self.read_byte() as u16; \n\n let result: u16 = (byte_msb << 8) | byte_lsb;\n\n\n\n // Take the first 10 bits\n\n return Some(result);\n\n }\n\n\n\n fn cmd_skip_rom(&self) -> Option<bool> {\n\n if self.initialize() {\n\n self.send_command(0xCC);\n\n return Some(true);\n\n }\n\n\n\n return None;\n\n }\n\n\n\n fn cmd_read_rom(&self) -> Option<u64> {\n\n if self.initialize() {\n\n self.send_command(0x33);\n\n \n", "file_path": "src/drivers/max31820.rs", "rank": 72, "score": 28344.98484329109 }, { "content": "use teensycore::phys::irq::{disable_interrupts, enable_interrupts};\n\nuse teensycore::phys::pins::*;\n\nuse teensycore::{wait_ns, wait_exact_ns, MICRO_TO_NANO, clock::uNano};\n\n\n\n// 800MHz\n\nconst T0_H: uNano = 110; // ns\n\nconst T0_L: uNano = 600;\n\nconst T1_H: uNano = 600; // ns\n\nconst T1_L: uNano = 600;\n\n\n\n// const T0_H: uNano = 400; // ns\n\n// const T0_L: uNano = 850;\n\n// const T1_H: uNano = 800; // ns\n\n// const T1_L: uNano = 450;\n\n\n\n#[derive(Clone, Copy)]\n", "file_path": "src/drivers/ws2812.rs", "rank": 73, "score": 28344.725183329756 }, { "content": " self.write_1();\n\n } else {\n\n self.write_0();\n\n }\n\n bit_index += 1;\n\n }\n\n }\n\n\n\n pub fn read_rom(&self) -> Option<u64> {\n\n return self.cmd_read_rom();\n\n }\n\n\n\n pub fn read_temperature(&self) -> Option<u16> {\n\n // Get ROM\n\n if self.initialize() {\n\n self.cmd_skip_rom();\n\n self.cmd_convert_t();\n\n self.cmd_skip_rom();\n\n return self.cmd_read_scratchpad();\n\n } else {\n", "file_path": "src/drivers/max31820.rs", "rank": 74, "score": 28344.054060083967 }, { "content": " self.pull_low();\n\n wait_ns(micros(500));\n\n\n\n // Allow float\n\n self.as_input();\n\n wait_ns(micros(70));\n\n\n\n // Wait a while then sample\n\n let target = nanos() + micros(240 - 70);\n\n let mut result = 1;\n\n while nanos() < target {\n\n if pin_read(self.pin) == 0 {\n\n result = 0;\n\n }\n\n }\n\n \n\n // Wait 410 micros\n\n wait_ns(micros(240));\n\n\n\n // If result is 0, that's an alive pulse.\n", "file_path": "src/drivers/max31820.rs", "rank": 75, "score": 28344.033394569447 }, { "content": "\n\n // Determine the result based on which bucket\n\n // got more hits.\n\n if sig_high > sig_low {\n\n return 1;\n\n } else {\n\n return 0;\n\n }\n\n } \n\n\n\n fn read_byte(&self) -> u8 {\n\n let mut result = 0;\n\n for bit in 0 .. 8 {\n\n result |= self.read_bit() << bit;\n\n }\n\n return result;\n\n }\n\n}", "file_path": "src/drivers/max31820.rs", "rank": 76, "score": 28343.965286214756 }, { "content": " }\n\n\n\n fn pull_low(&self) {\n\n self.as_output();\n\n pin_out(self.pin, Power::Low);\n\n }\n\n\n\n\n\n fn initialize(&self) -> bool {\n\n for _ in 0 .. 125 {\n\n if self.reset() {\n\n return true;\n\n }\n\n }\n\n\n\n return false;\n\n }\n\n\n\n fn reset(&self) -> bool{\n\n // Write low\n", "file_path": "src/drivers/max31820.rs", "rank": 77, "score": 28342.865101334744 }, { "content": "\n\n let mut sig_high = 0;\n\n let mut sig_low = 0;\n\n let duration = nanos() + micros(1);\n\n\n\n loop {\n\n let signal = pin_read(self.pin);\n\n if signal == 0 {\n\n sig_low += 1;\n\n } else {\n\n sig_high += 1;\n\n }\n\n\n\n if nanos() > duration {\n\n break;\n\n }\n\n }\n\n \n\n // Wait the remainder of the time slot\n\n wait_ns(micros(53));\n", "file_path": "src/drivers/max31820.rs", "rank": 78, "score": 28342.15577246034 }, { "content": " return result == 0;\n\n }\n\n\n\n fn send_command(&self, command: u8) {\n\n for bit in 0 .. 8 {\n\n let signal = command & (0x1 << bit);\n\n if signal > 0 {\n\n self.write_1();\n\n } else {\n\n self.write_0();\n\n }\n\n }\n\n\n\n self.as_input();\n\n }\n\n\n\n fn write_1(&self) {\n\n self.pull_low();\n\n wait_ns(micros(10));\n\n // Release\n", "file_path": "src/drivers/max31820.rs", "rank": 79, "score": 28341.787418548334 }, { "content": " bit_index = 23;\n\n while bit_index >= 0 {\n\n let bit = color & (0x1 << bit_index);\n\n if bit > 0 {\n\n self.on_bit();\n\n } else {\n\n self.off_bit();\n\n }\n\n bit_index -= 1;\n\n }\n\n\n\n node_index += 1;\n\n }\n\n }\n\n\n\n self.rest();\n\n enable_interrupts();\n\n \n\n }\n\n}", "file_path": "src/drivers/ws2812.rs", "rank": 80, "score": 28341.772992360453 }, { "content": " pin_mode(self.pin, Mode::Input);\n\n wait_ns(micros(65));\n\n } \n\n\n\n fn write_0(&self) {\n\n self.pull_low();\n\n wait_ns(micros( 65));\n\n\n\n // Release\n\n pin_mode(self.pin, Mode::Input);\n\n wait_ns(micros(5));\n\n }\n\n\n\n fn read_bit(&self) -> u8 {\n\n // Initiate a read slot\n\n self.pull_low();\n\n wait_ns( micros(3));\n\n\n\n self.as_input();\n\n wait_ns(micros(12));\n", "file_path": "src/drivers/max31820.rs", "rank": 81, "score": 28341.23404825653 }, { "content": " return None;\n\n }\n\n }\n\n\n\n fn as_input(&self) {\n\n pin_mode(self.pin, Mode::Input);\n\n pin_pad_config(self.pin, PadConfig { \n\n hysterisis: false, \n\n resistance: PullUpDown::PullDown100k, \n\n pull_keep: PullKeep::Pull, \n\n pull_keep_en: false, \n\n open_drain: true, \n\n speed: PinSpeed::Max200MHz, \n\n drive_strength: DriveStrength::Max, \n\n fast_slew_rate: false \n\n });\n\n }\n\n\n\n fn as_output(&self) {\n\n pin_mode(self.pin, Mode::Output);\n", "file_path": "src/drivers/max31820.rs", "rank": 82, "score": 28340.911479656 }, { "content": "\n\n fn cmd_convert_t(&self) {\n\n self.send_command(0x44); // Convert T command\n\n // Read until receiving a 1\n\n // TODO: Add an oh-shit detector\n\n loop {\n\n let bit = self.read_bit();\n\n // Temperature conversino is done\n\n if bit > 0 {\n\n break;\n\n }\n\n wait_ns(micros(1));\n\n }\n\n }\n\n\n\n fn cmd_read_scratchpad(&self) -> Option<u16> {\n\n self.send_command(0xBE);\n\n\n\n // Read bytes\n\n let byte_lsb = self.read_byte() as u16;\n", "file_path": "src/drivers/max31820.rs", "rank": 83, "score": 28339.465625020846 }, { "content": " #[inline] \n\n fn on_bit(&self) {\n\n pin_out(self.pin, Power::High);\n\n wait_exact_ns(T1_H);\n\n pin_out(self.pin, Power::Low);\n\n wait_exact_ns(T1_L);\n\n }\n\n \n\n #[inline]\n\n fn off_bit(&self) { \n\n pin_out(self.pin, Power::High);\n\n wait_exact_ns(T0_H);\n\n pin_out(self.pin, Power::Low);\n\n wait_exact_ns(T0_L);\n\n }\n\n\n\n fn rest(&self) {\n\n pin_out(self.pin, Power::Low);\n\n wait_ns(500 * MICRO_TO_NANO);\n\n }\n", "file_path": "src/drivers/ws2812.rs", "rank": 84, "score": 28339.465625020846 }, { "content": "use teensycore::clock::uNano;\n\n\n\n/// Context represents attributes for a single hexagon node.\n\n/// Each unit in the overall design will have its own corresponding\n\n/// context instance.\n\n#[derive(Copy, Clone)]\n\npub struct Context { \n\n pub initialized: bool,\n\n pub node_id: uNano, // These are time units because they will be integrated with time in math\n\n pub total_nodes: uNano,\n\n pub temperature: i32,\n\n pub registers: [i32; 10],\n\n pub offset: uNano,\n\n}\n\n\n\nimpl Context {\n\n pub fn empty() -> Self {\n\n return Context {\n\n initialized: false,\n\n node_id: 0,\n\n total_nodes: 0,\n\n temperature: 0,\n\n registers: [0; 10],\n\n offset: 0,\n\n };\n\n }\n\n}", "file_path": "src/pixel_engine/context.rs", "rank": 85, "score": 28215.26892488246 }, { "content": "use teensycore::math::{max, min};\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Color {\n\n pub r: u8,\n\n pub g: u8,\n\n pub b: u8,\n\n}\n\n\n", "file_path": "src/pixel_engine/color.rs", "rank": 86, "score": 28038.384631959907 }, { "content": " }\n\n\n\n return rgb(\n\n ((rgb_prime.0 + m) * 255.0) as u8,\n\n ((rgb_prime.1 + m) * 255.0) as u8,\n\n ((rgb_prime.2 + m) * 255.0) as u8,\n\n );\n\n}\n\n\n\nimpl Color {\n\n pub fn as_hex(&self) -> u32 {\n\n return ((self.r as u32) << 16) |\n\n ((self.g as u32) << 8) |\n\n (self.b as u32); \n\n }\n\n\n\n pub fn blank() -> Self {\n\n return Color {\n\n r: 255,\n\n g: 255,\n", "file_path": "src/pixel_engine/color.rs", "rank": 87, "score": 28037.438751370795 }, { "content": " b: 255,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test_colors {\n\n use crate::pixel_engine::math::interpolate;\n\n use std::println;\n\n use super::*;\n\n\n\n #[test]\n\n fn test_hsv() {\n\n // assert_eq!(hsv(39.0, 1.0, 1.0).g, 165);\n\n\n\n // let (h, s, v) = rgb_to_hsv(0,0,128);\n\n // assert_eq!(h, 240.0);\n\n // assert_eq!(s, 1.0);\n\n // assert_eq!(v, 0.5019608);\n\n // assert_eq!(hsv(h, s, v).b, 128);\n", "file_path": "src/pixel_engine/color.rs", "rank": 88, "score": 28030.07784148533 }, { "content": " // let start = (50, 0, 255);\n\n // let end = (255, 0, 0);\n\n // let r = interpolate(start.0, end.0, time, 100);\n\n // let g = interpolate(start.1, end.1, time, 100);\n\n // let b = interpolate(start.2, end.2, time, 100);\n\n // let (h, s, v) = rgb_to_hsv(r as u8, g as u8, b as u8);\n\n\n\n // println!(\"rgb({}, {}, {}) hsv({}, {}, {})\", r, g, b, h, s, v);\n\n // }\n\n\n\n std::println!(\"hi\");\n\n let (h, s, v) = rgb_to_hsv(148, 0, 133);\n\n // assert_eq!(h, 306.0);\n\n }\n\n}", "file_path": "src/pixel_engine/color.rs", "rank": 89, "score": 28026.889866336427 }, { "content": " // assert_eq!(hsv(0.0).r, 255);\n\n }\n\n\n\n #[test]\n\n fn test_interpolated_hsv() {\n\n\n\n\n\n // // Interpolate\n\n // for time in 0 .. 100 {\n\n // let start = (255, 0, 0);\n\n // let end = (50, 0, 255);\n\n // let r = interpolate(start.0, end.0, time, 100);\n\n // let g = interpolate(start.1, end.1, time, 100);\n\n // let b = interpolate(start.2, end.2, time, 100);\n\n // let (h, s, v) = rgb_to_hsv(r as u8, g as u8, b as u8);\n\n\n\n // println!(\"rgb({}, {}, {}) hsv({}, {}, {})\", r, g, b, h, s, v);\n\n // }\n\n\n\n // for time in 0 .. 100 {\n", "file_path": "src/pixel_engine/color.rs", "rank": 90, "score": 28026.79787794964 }, { "content": " } else if c_max == rgb_prime.1 {\n\n // G is dominant\n\n h = 60.0 * (((rgb_prime.2 - rgb_prime.0) / delta) + 2.0);\n\n } else {\n\n // B is dominant\n\n h = 60.0 * (((rgb_prime.0 - rgb_prime.1) / delta) + 4.0);\n\n }\n\n\n\n \n\n if h < 0.0 {\n\n h = 360.0 + h;\n\n } \n\n \n\n\n\n if h > 360.0 {\n\n h = h - 360.0;\n\n }\n\n\n\n return (h, s, v);\n\n}\n\n\n", "file_path": "src/pixel_engine/color.rs", "rank": 91, "score": 28023.43722201473 }, { "content": "use teensycore::mem::*;\n\nuse teensycore::clock::uNano;\n\nuse crate::pixel_engine::math::*;\n\nuse crate::pixel_engine::context::*;\n\n#[derive(Copy, Clone)]\n\npub struct EffectNode {\n\n pub duration: uNano,\n\n pub target: u32,\n\n hold: bool,\n\n next: Option<*mut EffectNode>,\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Effect {\n\n pub name: &'static [u8],\n\n initializer: Option<fn(effect: &mut Effect, context: &Context) -> Context>,\n\n root: Option<*mut EffectNode>,\n\n pub total_time: uNano,\n\n pub disabled: bool,\n\n pub max_color_segments: Option<usize>,\n", "file_path": "src/pixel_engine/effect.rs", "rank": 92, "score": 27896.393388055225 }, { "content": " pub min_hex_units: Option<usize>,\n\n pub regs: [i32; 6],\n\n}\n\n\n\nimpl Effect {\n\n pub fn new(name: &'static [u8]) -> Self {\n\n return Effect { \n\n name: name,\n\n initializer: None,\n\n root: None,\n\n total_time: 0,\n\n max_color_segments: None,\n\n disabled: false,\n\n min_hex_units: None,\n\n regs: [0; 6],\n\n };\n\n }\n\n\n\n pub fn with_min_hex_units(&mut self, min_hex_units: usize) -> &mut Self {\n\n self.min_hex_units = Some(min_hex_units);\n", "file_path": "src/pixel_engine/effect.rs", "rank": 93, "score": 27895.51228634223 }, { "content": " return self;\n\n }\n\n\n\n pub fn with_max_color_segments(&mut self, max_color_segments: usize) -> &mut Self {\n\n self.max_color_segments = Some(max_color_segments);\n\n return self;\n\n }\n\n\n\n pub fn as_disabled(&mut self) -> &mut Self {\n\n self.disabled = true;\n\n return self;\n\n }\n\n\n\n pub fn with_initializer(&mut self, func: fn(effect: &mut Effect, context: &Context) -> Context) -> &mut Self {\n\n self.initializer = Some(func);\n\n return self;\n\n }\n\n\n\n fn add_node(&mut self, node: EffectNode) {\n\n let ptr = alloc();\n", "file_path": "src/pixel_engine/effect.rs", "rank": 94, "score": 27890.97023737888 }, { "content": " hold: false,\n\n next: None,\n\n });\n\n\n\n self.total_time += duration;\n\n return self;\n\n }\n\n\n\n pub fn transition_to_and_hold(&mut self, target: u32, duration: uNano) -> &mut Self {\n\n self.add_node(EffectNode {\n\n duration: duration,\n\n target: target,\n\n hold: true,\n\n next: None,\n\n });\n\n\n\n self.total_time += duration;\n\n return self;\n\n }\n\n\n", "file_path": "src/pixel_engine/effect.rs", "rank": 95, "score": 27887.051417942108 }, { "content": " unsafe {\n\n (*ptr) = node;\n\n }\n\n\n\n if self.root.is_none() {\n\n self.root = Some(ptr);\n\n } else {\n\n let mut tail_ptr = self.root.unwrap();\n\n while unsafe { tail_ptr.as_mut().unwrap().next.is_some() } {\n\n tail_ptr = unsafe { (*tail_ptr).next.unwrap() };\n\n }\n\n\n\n unsafe { (*tail_ptr).next = Some(ptr) };\n\n }\n\n }\n\n\n\n pub fn transition_to(&mut self, target: u32, duration: uNano) -> &mut Self {\n\n self.add_node(EffectNode {\n\n duration: duration,\n\n target: target,\n", "file_path": "src/pixel_engine/effect.rs", "rank": 96, "score": 27884.510996329154 }, { "content": "\n\n#[cfg(test)]\n\npub mod test_effects {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_effects() {\n\n // let fx = Effect::new(b\"Sample\")\n\n // .randomize(|node_id, total_nodes| {\n\n // return rand() % 100;\n\n // })\n\n // .transition_to_without_delay(100, 500)\n\n // .transition_to(0, 500)\n\n // .build();\n\n\n\n\n\n }\n\n}", "file_path": "src/pixel_engine/effect.rs", "rank": 97, "score": 27883.382202848705 }, { "content": "\n\n pub fn build(&mut self) -> Self {\n\n return self.clone();\n\n }\n\n\n\n pub fn process(&mut self, ctx: &mut Context, current_time: uNano) -> (uNano, Context) {\n\n let mut next_context = ctx.clone();\n\n\n\n if !ctx.initialized {\n\n if self.initializer.is_some() {\n\n next_context = self.initializer.unwrap()(self, &ctx);\n\n }\n\n next_context.initialized = true;\n\n }\n\n \n\n let normalized_time = (next_context.offset + current_time) % self.total_time;\n\n\n\n if self.root.is_none() {\n\n return (0, next_context);\n\n } else {\n", "file_path": "src/pixel_engine/effect.rs", "rank": 98, "score": 27883.199643315253 }, { "content": " let mut ptr = self.root.unwrap();\n\n\n\n let mut start_time = 0;\n\n let mut elapsed = 0;\n\n\n\n loop {\n\n let node = unsafe { *ptr };\n\n let node_duration = match node.hold {\n\n false => node.duration,\n\n true => node.duration + next_context.offset,\n\n };\n\n\n\n \n\n if elapsed + node_duration > normalized_time {\n\n break;\n\n }\n\n \n\n elapsed += node.duration;\n\n start_time = node.target;\n\n if node.next.is_some() {\n", "file_path": "src/pixel_engine/effect.rs", "rank": 99, "score": 27877.135125586054 } ]
Rust
src/lib.rs
tickbh/td_rthreadpool
d7f23f07b56777afdef2e3c89324339d2ee97a32
extern crate libc; use std::panic; use std::panic::AssertUnwindSafe; use std::sync::mpsc::{channel, Sender, Receiver, SyncSender, sync_channel, RecvError}; use std::sync::{Arc, Mutex}; use std::thread::{self, JoinHandle}; mod mutex; pub use mutex::{ReentrantMutex, ReentrantMutexGuard}; trait FnBox { fn call_box(self: Box<Self>); } impl<F: FnOnce()> FnBox for F { fn call_box(self: Box<F>) { (*self)() } } type Thunk<'a> = Box<FnBox + Send + 'a>; enum Message { NewJob(Thunk<'static>), Join, } pub struct ThreadPool { threads: Vec<ThreadData>, job_sender: Sender<Message>, job_receiver: Arc<Mutex<Receiver<Message>>>, active_count: Arc<Mutex<usize>>, max_count: Arc<Mutex<usize>>, name: String, } struct ThreadData { _thread_join_handle: JoinHandle<()>, pool_sync_rx: Receiver<()>, thread_sync_tx: SyncSender<()>, } fn create_thread(job_receiver: Arc<Mutex<Receiver<Message>>>, active_count: Arc<Mutex<usize>>, name: String) -> ThreadData { let job_receiver = job_receiver.clone(); let (pool_sync_tx, pool_sync_rx) = sync_channel::<()>(0); let (thread_sync_tx, thread_sync_rx) = sync_channel::<()>(0); let thread = thread::Builder::new() .name(name) .spawn(move || { loop { let result = panic::catch_unwind(AssertUnwindSafe(|| { let message = { let lock = job_receiver.lock().unwrap(); lock.recv() }; match message { Ok(Message::NewJob(job)) => { *active_count.lock().unwrap() += 1; job.call_box(); *active_count.lock().unwrap() -= 1; } Ok(Message::Join) => { if pool_sync_tx.send(()).is_err() { return; } if thread_sync_rx.recv().is_err() { return; } } Err(..) => { return; } } })); if result.is_err() { println!("thread error is {:?}", result); } } }) .ok() .unwrap(); ThreadData { _thread_join_handle: thread, pool_sync_rx: pool_sync_rx, thread_sync_tx: thread_sync_tx, } } impl ThreadPool { pub fn new(n: usize) -> ThreadPool { Self::new_with_name(n, "unknow".to_string()) } pub fn new_with_name(n: usize, name: String) -> ThreadPool { assert!(n >= 1); let (job_sender, job_receiver) = channel(); let job_receiver = Arc::new(Mutex::new(job_receiver)); let active_count = Arc::new(Mutex::new(0)); let max_count = Arc::new(Mutex::new(n as usize)); let mut threads = Vec::with_capacity(n as usize); for _ in 0..n { let thread = create_thread(job_receiver.clone(), active_count.clone(), name.clone()); threads.push(thread); } ThreadPool { threads: threads, job_sender: job_sender, job_receiver: job_receiver.clone(), active_count: active_count, max_count: max_count, name: name, } } pub fn thread_count(&self) -> usize { self.threads.len() } pub fn execute<F>(&self, job: F) where F: FnOnce() + Send + 'static { self.job_sender.send(Message::NewJob(Box::new(job))).unwrap(); } pub fn join_all(&self) { for _ in 0..self.threads.len() { self.job_sender.send(Message::Join).unwrap(); } let mut worker_panic = false; for thread_data in &self.threads { if let Err(RecvError) = thread_data.pool_sync_rx.recv() { worker_panic = true; } } if worker_panic { panic!("Thread pool worker panicked"); } for thread_data in &self.threads { thread_data.thread_sync_tx.send(()).unwrap(); } } pub fn active_count(&self) -> usize { *self.active_count.lock().unwrap() } pub fn max_count(&self) -> usize { *self.max_count.lock().unwrap() } pub fn set_threads(&mut self, threads: usize) -> i32 { assert!(threads >= 1); if threads <= self.thread_count() { return -1; } for _ in 0..(threads - self.thread_count()) { let thread = create_thread(self.job_receiver.clone(), self.active_count.clone(), self.name.clone()); self.threads.push(thread); } *self.max_count.lock().unwrap() = threads; 0 } }
extern crate libc; use std::panic; use std::panic::AssertUnwindSafe; use std::sync::mpsc::{channel, Sender, Receiver, SyncSender, sync_channel, RecvError}; use std::sync::{Arc, Mutex}; use std::thread::{self, JoinHandle}; mod mutex; pub use mutex::{ReentrantMutex, ReentrantMutexGuard}; trait FnBox { fn call_box(self: Box<Self>); } impl<F: FnOnce()> FnBox for F { fn call_box(self: Box<F>) { (*self)() } } type Thunk<'a> = Box<FnBox + Send + 'a>; enum Message { NewJob(Thunk<'static>), Join, } pub struct ThreadPool { threads: Vec<ThreadData>, job_sender: Sender<Message>, job_receiver: Arc<Mutex<Receiver<Message>>>, active_count: Arc<Mutex<usize>>, max_count: Arc<Mutex<usize>>, name: String, } struct ThreadData { _thread_join_handle: JoinHandle<()>, pool_sync_rx: Receiver<()>, thread_sync_tx: SyncSender<()>, } fn create_thread(job_receiver: Arc<Mutex<Receiver<Message>>>, active_count: Arc<Mutex<usize>>, name: String) -> ThreadData { let job_receiver = job_receiver.clone(); let (pool_sync_tx, pool_sync_rx) = sync_channel::<()>(0); let (thread_sync_tx, thread_sync_rx) = sync_channel::<()>(0); let thread = thread::Builder::new() .name(name)
{ return; } } Err(..) => { return; } } })); if result.is_err() { println!("thread error is {:?}", result); } } }) .ok() .unwrap(); ThreadData { _thread_join_handle: thread, pool_sync_rx: pool_sync_rx, thread_sync_tx: thread_sync_tx, } } impl ThreadPool { pub fn new(n: usize) -> ThreadPool { Self::new_with_name(n, "unknow".to_string()) } pub fn new_with_name(n: usize, name: String) -> ThreadPool { assert!(n >= 1); let (job_sender, job_receiver) = channel(); let job_receiver = Arc::new(Mutex::new(job_receiver)); let active_count = Arc::new(Mutex::new(0)); let max_count = Arc::new(Mutex::new(n as usize)); let mut threads = Vec::with_capacity(n as usize); for _ in 0..n { let thread = create_thread(job_receiver.clone(), active_count.clone(), name.clone()); threads.push(thread); } ThreadPool { threads: threads, job_sender: job_sender, job_receiver: job_receiver.clone(), active_count: active_count, max_count: max_count, name: name, } } pub fn thread_count(&self) -> usize { self.threads.len() } pub fn execute<F>(&self, job: F) where F: FnOnce() + Send + 'static { self.job_sender.send(Message::NewJob(Box::new(job))).unwrap(); } pub fn join_all(&self) { for _ in 0..self.threads.len() { self.job_sender.send(Message::Join).unwrap(); } let mut worker_panic = false; for thread_data in &self.threads { if let Err(RecvError) = thread_data.pool_sync_rx.recv() { worker_panic = true; } } if worker_panic { panic!("Thread pool worker panicked"); } for thread_data in &self.threads { thread_data.thread_sync_tx.send(()).unwrap(); } } pub fn active_count(&self) -> usize { *self.active_count.lock().unwrap() } pub fn max_count(&self) -> usize { *self.max_count.lock().unwrap() } pub fn set_threads(&mut self, threads: usize) -> i32 { assert!(threads >= 1); if threads <= self.thread_count() { return -1; } for _ in 0..(threads - self.thread_count()) { let thread = create_thread(self.job_receiver.clone(), self.active_count.clone(), self.name.clone()); self.threads.push(thread); } *self.max_count.lock().unwrap() = threads; 0 } }
.spawn(move || { loop { let result = panic::catch_unwind(AssertUnwindSafe(|| { let message = { let lock = job_receiver.lock().unwrap(); lock.recv() }; match message { Ok(Message::NewJob(job)) => { *active_count.lock().unwrap() += 1; job.call_box(); *active_count.lock().unwrap() -= 1; } Ok(Message::Join) => { if pool_sync_tx.send(()).is_err() { return; } if thread_sync_rx.recv().is_err()
random
[ { "content": "#[test]\n\nfn join_all_with_thread_panic() {\n\n let mut pool = ThreadPool::new(TEST_TASKS);\n\n for _ in 0..4 {\n\n pool.execute(move || {\n\n panic!();\n\n });\n\n }\n\n\n\n sleep(Duration::from_millis(1000));\n\n\n\n let active_count = pool.active_count();\n\n assert_eq!(active_count, TEST_TASKS);\n\n let initialized_count = pool.max_count();\n\n assert_eq!(initialized_count, TEST_TASKS);\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 1, "score": 62856.267124180704 }, { "content": "pub fn map_result<T, U, F>(result: LockResult<T>, f: F) -> LockResult<U>\n\n where F: FnOnce(T) -> U\n\n{\n\n match result {\n\n Ok(t) => Ok(f(t)),\n\n Err(PoisonError { guard }) => Err(PoisonError::new(f(guard))),\n\n }\n\n}\n", "file_path": "src/mutex/poison.rs", "rank": 5, "score": 57538.011792457335 }, { "content": "#[test]\n\nfn join_all() {\n\n let pool = ThreadPool::new(4);\n\n\n\n let (tx_, rx) = sync::mpsc::channel();\n\n\n\n let tx = tx_.clone();\n\n pool.execute(move || {\n\n sleep(Duration::from_millis(1000));\n\n tx.send(2).unwrap();\n\n });\n\n\n\n let tx = tx_.clone();\n\n pool.execute(move || {\n\n tx.send(1).unwrap();\n\n });\n\n\n\n pool.join_all();\n\n\n\n let tx = tx_.clone();\n\n pool.execute(move || {\n\n tx.send(3).unwrap();\n\n });\n\n\n\n\n\n assert_eq!(rx.iter().take(3).collect::<Vec<_>>(), vec![1, 2, 3]);\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 7, "score": 50834.753732799145 }, { "content": "#[test]\n\nfn test_set_threads_decreasing() {\n\n let new_thread_amount = 2;\n\n let mut pool = ThreadPool::new(TEST_TASKS);\n\n for _ in 0..TEST_TASKS {\n\n pool.execute(move || {\n\n 1 + 1;\n\n });\n\n }\n\n pool.set_threads(new_thread_amount);\n\n for _ in 0..new_thread_amount {\n\n pool.execute(move || {\n\n loop {\n\n sleep(Duration::from_millis(10000));\n\n }\n\n });\n\n }\n\n sleep(Duration::from_millis(1000));\n\n assert_eq!(pool.active_count(), new_thread_amount);\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 8, "score": 43821.94655254303 }, { "content": "#![allow(dead_code)]\n\n#![allow(unused_mut)]\n\n#![allow(unreachable_code)]\n\n\n\n\n\n#[cfg(unix)]\n\n#[path = \"unix.rs\"]mod sys;\n\n#[cfg(windows)]\n\n#[path = \"windows.rs\"]mod sys;\n\n\n\nmod poison;\n\nmod remutex;\n\n\n\npub use self::remutex::{ReentrantMutex, ReentrantMutexGuard};\n", "file_path": "src/mutex/mod.rs", "rank": 9, "score": 34341.525177047035 }, { "content": "#[test]\n\nfn test_active_count() {\n\n let pool = ThreadPool::new(TEST_TASKS);\n\n for _ in 0..TEST_TASKS {\n\n pool.execute(move|| {\n\n loop {\n\n sleep(Duration::from_millis(10000));\n\n }\n\n });\n\n }\n\n sleep(Duration::from_millis(1000));\n\n let active_count = pool.active_count();\n\n assert_eq!(active_count, TEST_TASKS);\n\n let initialized_count = pool.max_count();\n\n assert_eq!(initialized_count, TEST_TASKS);\n\n}\n", "file_path": "tests/test.rs", "rank": 10, "score": 28929.331323307182 }, { "content": " mod os {\n\n use libc;\n\n\n\n #[repr(C)]\n\n pub struct pthread_mutex_t {\n\n value: libc::c_int,\n\n }\n\n pub type pthread_mutexattr_t = libc::c_long;\n\n pub const PTHREAD_MUTEX_RECURSIVE: libc::c_int = 1;\n\n }\n\n}\n", "file_path": "src/mutex/unix.rs", "rank": 11, "score": 14726.148370134033 }, { "content": "\n\n extern \"C\" {\n\n pub fn pthread_mutex_init(lock: *mut pthread_mutex_t,\n\n attr: *const pthread_mutexattr_t)\n\n -> libc::c_int;\n\n pub fn pthread_mutex_destroy(lock: *mut pthread_mutex_t) -> libc::c_int;\n\n pub fn pthread_mutex_lock(lock: *mut pthread_mutex_t) -> libc::c_int;\n\n pub fn pthread_mutex_trylock(lock: *mut pthread_mutex_t) -> libc::c_int;\n\n pub fn pthread_mutex_unlock(lock: *mut pthread_mutex_t) -> libc::c_int;\n\n\n\n pub fn pthread_mutexattr_init(attr: *mut pthread_mutexattr_t) -> libc::c_int;\n\n pub fn pthread_mutexattr_destroy(attr: *mut pthread_mutexattr_t) -> libc::c_int;\n\n pub fn pthread_mutexattr_settype(attr: *mut pthread_mutexattr_t,\n\n _type: libc::c_int)\n\n -> libc::c_int;\n\n }\n\n\n\n #[cfg(any(target_os = \"freebsd\", target_os = \"dragonfly\",\n\n target_os = \"bitrig\", target_os = \"openbsd\"))]\n\n mod os {\n", "file_path": "src/mutex/unix.rs", "rank": 12, "score": 14725.137189423425 }, { "content": " use libc;\n\n\n\n pub type pthread_mutex_t = *mut libc::c_void;\n\n pub type pthread_mutexattr_t = *mut libc::c_void;\n\n pub const PTHREAD_MUTEX_RECURSIVE: libc::c_int = 2;\n\n }\n\n\n\n #[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n\n mod os {\n\n use libc;\n\n\n\n #[cfg(any(target_arch = \"x86_64\",\n\n target_arch = \"aarch64\"))]\n\n const __PTHREAD_MUTEX_SIZE__: usize = 56;\n\n #[cfg(any(target_arch = \"x86\",\n\n target_arch = \"arm\"))]\n\n const __PTHREAD_MUTEX_SIZE__: usize = 40;\n\n\n\n #[repr(C)]\n\n pub struct pthread_mutex_t {\n", "file_path": "src/mutex/unix.rs", "rank": 13, "score": 14725.125926904859 }, { "content": "\n\n #[inline]\n\n pub unsafe fn try_lock(&self) -> bool {\n\n ffi::pthread_mutex_trylock(self.inner.get()) == 0\n\n }\n\n\n\n pub unsafe fn unlock(&self) {\n\n let result = ffi::pthread_mutex_unlock(self.inner.get());\n\n debug_assert_eq!(result, 0);\n\n }\n\n\n\n pub unsafe fn destroy(&self) {\n\n let result = ffi::pthread_mutex_destroy(self.inner.get());\n\n debug_assert_eq!(result, 0);\n\n }\n\n}\n\n\n\nmod ffi {\n\n use libc;\n\n pub use self::os::{pthread_mutex_t, pthread_mutexattr_t, PTHREAD_MUTEX_RECURSIVE};\n", "file_path": "src/mutex/unix.rs", "rank": 14, "score": 14724.001525239284 }, { "content": " __sig: libc::c_long,\n\n __opaque: [u8; __PTHREAD_MUTEX_SIZE__],\n\n }\n\n #[repr(C)]\n\n pub struct pthread_mutexattr_t {\n\n __sig: libc::c_long,\n\n __opaque: [u8; 16],\n\n }\n\n pub const PTHREAD_MUTEX_RECURSIVE: libc::c_int = 2;\n\n }\n\n\n\n #[cfg(target_os = \"linux\")]\n\n mod os {\n\n use libc;\n\n\n\n #[cfg(target_arch = \"x86_64\")]\n\n const __SIZEOF_PTHREAD_MUTEX_T: usize = 40 - 8;\n\n #[cfg(any(target_arch = \"x86\",\n\n target_arch = \"arm\",\n\n target_arch = \"mips\",\n", "file_path": "src/mutex/unix.rs", "rank": 15, "score": 14723.93471792781 }, { "content": " pub unsafe fn unlock(&self) {\n\n ffi::LeaveCriticalSection(self.inner.get());\n\n }\n\n\n\n #[inline]\n\n pub unsafe fn destroy(&self) {\n\n ffi::DeleteCriticalSection(self.inner.get());\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\nmod ffi {\n\n use libc::{c_long, c_ulong, c_void};\n\n\n\n pub type BOOLEAN = u8;\n\n pub type LONG = c_long;\n\n pub type LPVOID = *mut c_void;\n\n pub type HANDLE = LPVOID;\n\n pub type ULONG_PTR = c_ulong;\n\n\n", "file_path": "src/mutex/windows.rs", "rank": 16, "score": 14723.101910711102 }, { "content": "// }\n\n\n\nimpl<'a, T: ?Sized> Drop for ReentrantMutexGuard<'a, T> {\n\n #[inline]\n\n fn drop(&mut self) {\n\n unsafe {\n\n self.__lock.poison.done(&self.__poison);\n\n self.__lock.inner.unlock();\n\n }\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::ReentrantMutex;\n\n use std::sync::Arc;\n\n use std::thread;\n\n\n\n #[test]\n", "file_path": "src/mutex/remutex.rs", "rank": 17, "score": 14722.487906091581 }, { "content": "/// common source of deadlocks.\n\npub struct ReentrantMutex<T: ?Sized> {\n\n inner: Box<sys::ReentrantMutex>,\n\n poison: poison::Flag,\n\n data: UnsafeCell<T>,\n\n}\n\n\n\nunsafe impl<T: Send + ?Sized> Send for ReentrantMutex<T> {}\n\nunsafe impl<T: Send + ?Sized> Sync for ReentrantMutex<T> {}\n\n\n\n#[must_use]\n\npub struct ReentrantMutexGuard<'a, T: ?Sized + 'a> {\n\n __lock: &'a ReentrantMutex<T>,\n\n __poison: poison::Guard,\n\n __marker: marker::PhantomData<*mut ()>, // !Send\n\n}\n\n\n\nimpl<T> ReentrantMutex<T> {\n\n /// Creates a new reentrant mutex in an unlocked state.\n\n pub fn new(t: T) -> ReentrantMutex<T> {\n", "file_path": "src/mutex/remutex.rs", "rank": 18, "score": 14722.45956881271 }, { "content": "// Copyright 2015 The Rust Project Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse std::mem;\n\nuse std::cell::UnsafeCell;\n\n\n\npub struct ReentrantMutex {\n\n inner: UnsafeCell<ffi::pthread_mutex_t>,\n\n}\n\n\n\nunsafe impl Send for ReentrantMutex {}\n\nunsafe impl Sync for ReentrantMutex {}\n\n\n\nimpl ReentrantMutex {\n\n pub unsafe fn uninitialized() -> ReentrantMutex {\n", "file_path": "src/mutex/unix.rs", "rank": 19, "score": 14721.520324926047 }, { "content": " target_arch = \"mipsel\",\n\n target_arch = \"powerpc\"))]\n\n const __SIZEOF_PTHREAD_MUTEX_T: usize = 24 - 8;\n\n #[cfg(target_arch = \"aarch64\")]\n\n const __SIZEOF_PTHREAD_MUTEX_T: usize = 48 - 8;\n\n\n\n #[repr(C)]\n\n pub struct pthread_mutex_t {\n\n __align: libc::c_longlong,\n\n size: [u8; __SIZEOF_PTHREAD_MUTEX_T],\n\n }\n\n #[repr(C)]\n\n pub struct pthread_mutexattr_t {\n\n __align: libc::c_longlong,\n\n size: [u8; 16],\n\n }\n\n pub const PTHREAD_MUTEX_RECURSIVE: libc::c_int = 1;\n\n }\n\n\n\n #[cfg(target_os = \"android\")]\n", "file_path": "src/mutex/unix.rs", "rank": 20, "score": 14721.414407433469 }, { "content": "// Copyright 2015 The Rust Project Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse std::mem;\n\nuse std::cell::UnsafeCell;\n\n\n\npub struct ReentrantMutex {\n\n inner: UnsafeCell<ffi::CRITICAL_SECTION>,\n\n}\n\n\n\nunsafe impl Send for ReentrantMutex {}\n\nunsafe impl Sync for ReentrantMutex {}\n\n\n\nimpl ReentrantMutex {\n\n #[inline]\n", "file_path": "src/mutex/windows.rs", "rank": 21, "score": 14720.904261415555 }, { "content": " let m = Arc::new(ReentrantMutex::new(()));\n\n let _lock1 = m.try_lock().unwrap();\n\n let _lock2 = m.try_lock().unwrap();\n\n {\n\n let m = m.clone();\n\n thread::spawn(move || {\n\n let lock = m.try_lock();\n\n assert!(lock.is_err());\n\n })\n\n .join();\n\n }\n\n let _lock3 = m.try_lock().unwrap();\n\n }\n\n\n\n}\n", "file_path": "src/mutex/remutex.rs", "rank": 22, "score": 14720.137396484286 }, { "content": "// Copyright 2015 The Rust Project Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse std::fmt;\n\nuse std::marker;\n\nuse std::ops::{Deref, DerefMut};\n\nuse std::cell::UnsafeCell;\n\n\n\nuse super::sys;\n\nuse super::poison::{self, TryLockError, TryLockResult, LockResult};\n\n\n\n/// A re-entrant mutual exclusion\n\n///\n\n/// This mutex will block *other* threads waiting for the lock to become available. The thread\n\n/// which has already locked the mutex can lock it multiple times without blocking, preventing a\n", "file_path": "src/mutex/remutex.rs", "rank": 23, "score": 14720.091558388305 }, { "content": "// a totally private structure.\n\nunsafe impl Send for Flag {}\n\nunsafe impl Sync for Flag {}\n\n\n\nimpl Flag {\n\n #[inline]\n\n pub fn borrow(&self) -> LockResult<Guard> {\n\n let ret = Guard { panicking: thread::panicking() };\n\n if unsafe { *self.failed.get() } {\n\n Err(PoisonError::new(ret))\n\n } else {\n\n Ok(ret)\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn done(&self, guard: &Guard) {\n\n if !guard.panicking && thread::panicking() {\n\n unsafe {\n\n *self.failed.get() = true;\n", "file_path": "src/mutex/poison.rs", "rank": 24, "score": 14719.93916583671 }, { "content": " unsafe {\n\n let mut mutex = ReentrantMutex {\n\n inner: Box::new(sys::ReentrantMutex::uninitialized()),\n\n poison: poison::Flag::new(),\n\n data: UnsafeCell::new(t),\n\n };\n\n mutex.inner.init();\n\n mutex\n\n }\n\n }\n\n\n\n /// Acquires a mutex, blocking the current thread until it is able to do so.\n\n ///\n\n /// This function will block the caller until it is available to acquire the mutex.\n\n /// Upon returning, the thread is the only thread with the mutex held. When the thread\n\n /// calling this method already holds the lock, the call shall succeed without\n\n /// blocking.\n\n ///\n\n /// # Failure\n\n ///\n", "file_path": "src/mutex/remutex.rs", "rank": 25, "score": 14719.716937448664 }, { "content": "// Copyright 2014 The Rust Project Developers\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse std::cell::UnsafeCell;\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::thread;\n\n\n\nuse std::any::Any;\n\n\n\npub struct Flag {\n\n failed: UnsafeCell<bool>,\n\n}\n\n\n\n// This flag is only ever accessed with a lock previously held. Note that this\n", "file_path": "src/mutex/poison.rs", "rank": 26, "score": 14719.448952993407 }, { "content": " let m = Arc::new(ReentrantMutex::new(0));\n\n let lock = m.lock().unwrap();\n\n {\n\n let mc = m.clone();\n\n let handle = thread::spawn(move || {\n\n let lock = mc.lock().unwrap();\n\n assert_eq!(*lock, 4950);\n\n });\n\n for i in 0..100 {\n\n let mut lock = m.lock().unwrap();\n\n *lock += i;\n\n }\n\n drop(lock);\n\n drop(handle);\n\n }\n\n }\n\n\n\n #[test]\n\n #[allow(unused_must_use)]\n\n fn trylock_works() {\n", "file_path": "src/mutex/remutex.rs", "rank": 27, "score": 14719.219669959695 }, { "content": " #[allow(non_snake_case)]\n\n #[repr(C)]\n\n pub struct CRITICAL_SECTION {\n\n CriticalSectionDebug: LPVOID,\n\n LockCount: LONG,\n\n RecursionCount: LONG,\n\n OwningThread: HANDLE,\n\n LockSemaphore: HANDLE,\n\n SpinCount: ULONG_PTR,\n\n }\n\n\n\n extern \"system\" {\n\n pub fn InitializeCriticalSection(CriticalSection: *mut CRITICAL_SECTION);\n\n pub fn EnterCriticalSection(CriticalSection: *mut CRITICAL_SECTION);\n\n pub fn TryEnterCriticalSection(CriticalSection: *mut CRITICAL_SECTION) -> BOOLEAN;\n\n pub fn LeaveCriticalSection(CriticalSection: *mut CRITICAL_SECTION);\n\n pub fn DeleteCriticalSection(CriticalSection: *mut CRITICAL_SECTION);\n\n }\n\n}\n", "file_path": "src/mutex/windows.rs", "rank": 28, "score": 14719.100914314049 }, { "content": " /// If another user of this mutex panicked while holding the mutex, then\n\n /// this call will return failure if the mutex would otherwise be\n\n /// acquired.\n\n pub fn lock(&self) -> LockResult<ReentrantMutexGuard<T>> {\n\n unsafe { self.inner.lock() }\n\n ReentrantMutexGuard::new(&self)\n\n }\n\n\n\n /// Attempts to acquire this lock.\n\n ///\n\n /// If the lock could not be acquired at this time, then `Err` is returned.\n\n /// Otherwise, an RAII guard is returned.\n\n ///\n\n /// This function does not block.\n\n ///\n\n /// # Failure\n\n ///\n\n /// If another user of this mutex panicked while holding the mutex, then\n\n /// this call will return failure if the mutex would otherwise be\n\n /// acquired.\n", "file_path": "src/mutex/remutex.rs", "rank": 29, "score": 14718.411955714508 }, { "content": " pub unsafe fn uninitialized() -> ReentrantMutex {\n\n mem::uninitialized()\n\n }\n\n\n\n #[inline]\n\n pub unsafe fn init(&mut self) {\n\n ffi::InitializeCriticalSection(self.inner.get());\n\n }\n\n\n\n #[inline]\n\n pub unsafe fn lock(&self) {\n\n ffi::EnterCriticalSection(self.inner.get());\n\n }\n\n\n\n #[inline]\n\n pub unsafe fn try_lock(&self) -> bool {\n\n ffi::TryEnterCriticalSection(self.inner.get()) != 0\n\n }\n\n\n\n #[inline]\n", "file_path": "src/mutex/windows.rs", "rank": 30, "score": 14718.300069468985 }, { "content": " ReentrantMutex { inner: mem::uninitialized() }\n\n }\n\n\n\n pub unsafe fn init(&mut self) {\n\n let mut attr: ffi::pthread_mutexattr_t = mem::uninitialized();\n\n let result = ffi::pthread_mutexattr_init(&mut attr as *mut _);\n\n debug_assert_eq!(result, 0);\n\n let result = ffi::pthread_mutexattr_settype(&mut attr as *mut _,\n\n ffi::PTHREAD_MUTEX_RECURSIVE);\n\n debug_assert_eq!(result, 0);\n\n let result = ffi::pthread_mutex_init(self.inner.get(), &attr as *const _);\n\n debug_assert_eq!(result, 0);\n\n let result = ffi::pthread_mutexattr_destroy(&mut attr as *mut _);\n\n debug_assert_eq!(result, 0);\n\n }\n\n\n\n pub unsafe fn lock(&self) {\n\n let result = ffi::pthread_mutex_lock(self.inner.get());\n\n debug_assert_eq!(result, 0);\n\n }\n", "file_path": "src/mutex/unix.rs", "rank": 31, "score": 14718.048570685563 }, { "content": " pub fn try_lock(&self) -> TryLockResult<ReentrantMutexGuard<T>> {\n\n if unsafe { self.inner.try_lock() } {\n\n Ok(try!(ReentrantMutexGuard::new(&self)))\n\n } else {\n\n Err(TryLockError::WouldBlock)\n\n }\n\n }\n\n}\n\n\n\nimpl<T: ?Sized> Drop for ReentrantMutex<T> {\n\n fn drop(&mut self) {\n\n unsafe { self.inner.destroy() }\n\n }\n\n}\n\n\n\nimpl<T: fmt::Debug + 'static> fmt::Debug for ReentrantMutex<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self.try_lock() {\n\n Ok(guard) => write!(f, \"ReentrantMutex {{ data: {:?} }}\", &*guard),\n\n Err(TryLockError::Poisoned(err)) => {\n", "file_path": "src/mutex/remutex.rs", "rank": 32, "score": 14717.981794305397 }, { "content": " write!(f,\n\n \"ReentrantMutex {{ data: Poisoned({:?}) }}\",\n\n &**err.get_ref())\n\n }\n\n Err(TryLockError::WouldBlock) => write!(f, \"ReentrantMutex {{ <locked> }}\"),\n\n }\n\n }\n\n}\n\n\n\nimpl<'mutex, T> ReentrantMutexGuard<'mutex, T> {\n\n fn new(lock: &'mutex ReentrantMutex<T>) -> LockResult<ReentrantMutexGuard<'mutex, T>> {\n\n poison::map_result(lock.poison.borrow(), |guard| {\n\n ReentrantMutexGuard {\n\n __lock: lock,\n\n __poison: guard,\n\n __marker: marker::PhantomData,\n\n }\n\n })\n\n }\n\n}\n", "file_path": "src/mutex/remutex.rs", "rank": 33, "score": 14717.360180495998 }, { "content": "\n\nimpl<'mutex, T> Deref for ReentrantMutexGuard<'mutex, T> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &T {\n\n unsafe { &*self.__lock.data.get() }\n\n }\n\n}\n\n\n\nimpl<'mutex, T> DerefMut for ReentrantMutexGuard<'mutex, T> {\n\n // type Target = T;\n\n fn deref_mut(&mut self) -> &mut T {\n\n unsafe { &mut *self.__lock.data.get() }\n\n }\n\n}\n\n\n\n// impl<'mutex, T: ?Sized> DerefMut for MutexGuard<'mutex, T> {\n\n// fn deref_mut(&mut self) -> &mut T {\n\n// unsafe { &mut *self.__data.get() }\n\n// }\n", "file_path": "src/mutex/remutex.rs", "rank": 34, "score": 14717.3046734559 }, { "content": "/// Both Mutexes and RwLocks are poisoned whenever a task fails while the lock\n\n/// is held. The precise semantics for when a lock is poisoned is documented on\n\n/// each lock, but once a lock is poisoned then all future acquisitions will\n\n/// return this error.\n\npub struct PoisonError<T> {\n\n guard: T,\n\n}\n\n\n\n/// An enumeration of possible errors which can occur while calling the\n\n/// `try_lock` method.\n\npub enum TryLockError<T> {\n\n /// The lock could not be acquired because another task failed while holding\n\n /// the lock.\n\n Poisoned(PoisonError<T>),\n\n /// The lock could not be acquired at this time because the operation would\n\n /// otherwise block.\n\n WouldBlock,\n\n}\n\n\n\n/// A type alias for the result of a lock method which can be poisoned.\n", "file_path": "src/mutex/poison.rs", "rank": 35, "score": 14717.303688245245 }, { "content": "\n\nimpl<T> fmt::Display for PoisonError<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n \"poisoned lock: another task failed inside\".fmt(f)\n\n }\n\n}\n\n\n\nimpl<T: Send + Any> Error for PoisonError<T> {\n\n fn description(&self) -> &str {\n\n \"poisoned lock: another task failed inside\"\n\n }\n\n}\n\n\n\nimpl<T> PoisonError<T> {\n\n /// Create a `PoisonError`.\n\n pub fn new(guard: T) -> PoisonError<T> {\n\n PoisonError { guard: guard }\n\n }\n\n\n\n /// Consumes this error indicating that a lock is poisoned, returning the\n", "file_path": "src/mutex/poison.rs", "rank": 36, "score": 14717.060317064626 }, { "content": " }\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn get(&self) -> bool {\n\n unsafe { *self.failed.get() }\n\n }\n\n\n\n pub fn new() -> Flag {\n\n Flag { failed: UnsafeCell::new(false) }\n\n }\n\n}\n\n\n\npub struct Guard {\n\n panicking: bool,\n\n}\n\n\n\n/// A type of error which can be returned whenever a lock is acquired.\n\n///\n", "file_path": "src/mutex/poison.rs", "rank": 37, "score": 14716.956288820296 }, { "content": " fn smoke() {\n\n let m = ReentrantMutex::new(());\n\n {\n\n let a = m.lock().unwrap();\n\n {\n\n let b = m.lock().unwrap();\n\n {\n\n let c = m.lock().unwrap();\n\n assert_eq!(*c, ());\n\n }\n\n assert_eq!(*b, ());\n\n }\n\n assert_eq!(*a, ());\n\n }\n\n }\n\n\n\n #[test]\n\n fn is_mutex() {\n\n\n\n\n", "file_path": "src/mutex/remutex.rs", "rank": 38, "score": 14716.955257465177 }, { "content": " TryLockError::Poisoned(err)\n\n }\n\n}\n\n\n\nimpl<T> fmt::Debug for TryLockError<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n TryLockError::Poisoned(..) => \"Poisoned(..)\".fmt(f),\n\n TryLockError::WouldBlock => \"WouldBlock\".fmt(f),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Send + Any> fmt::Display for TryLockError<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n self.description().fmt(f)\n\n }\n\n}\n\n\n\nimpl<T: Send + Any> Error for TryLockError<T> {\n", "file_path": "src/mutex/poison.rs", "rank": 39, "score": 14716.639016247856 }, { "content": " /// underlying guard to allow access regardless.\n\n pub fn into_inner(self) -> T {\n\n self.guard\n\n }\n\n\n\n /// Reaches into this error indicating that a lock is poisoned, returning a\n\n /// reference to the underlying guard to allow access regardless.\n\n pub fn get_ref(&self) -> &T {\n\n &self.guard\n\n }\n\n\n\n /// Reaches into this error indicating that a lock is poisoned, returning a\n\n /// mutable reference to the underlying guard to allow access regardless.\n\n pub fn get_mut(&mut self) -> &mut T {\n\n &mut self.guard\n\n }\n\n}\n\n\n\nimpl<T> From<PoisonError<T>> for TryLockError<T> {\n\n fn from(err: PoisonError<T>) -> TryLockError<T> {\n", "file_path": "src/mutex/poison.rs", "rank": 40, "score": 14716.508513936304 }, { "content": "///\n\n/// The `Ok` variant of this result indicates that the primitive was not\n\n/// poisoned, and the `Guard` is contained within. The `Err` variant indicates\n\n/// that the primitive was poisoned. Note that the `Err` variant *also* carries\n\n/// the associated guard, and it can be acquired through the `into_inner`\n\n/// method.\n\npub type LockResult<Guard> = Result<Guard, PoisonError<Guard>>;\n\n\n\n/// A type alias for the result of a nonblocking locking method.\n\n///\n\n/// For more information, see `LockResult`. A `TryLockResult` doesn't\n\n/// necessarily hold the associated guard in the `Err` type as the lock may not\n\n/// have been acquired for other reasons.\n\npub type TryLockResult<Guard> = Result<Guard, TryLockError<Guard>>;\n\n\n\nimpl<T> fmt::Debug for PoisonError<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n \"PoisonError { inner: .. }\".fmt(f)\n\n }\n\n}\n", "file_path": "src/mutex/poison.rs", "rank": 41, "score": 14716.028414693259 }, { "content": " fn description(&self) -> &str {\n\n match *self {\n\n TryLockError::Poisoned(ref p) => p.description(),\n\n TryLockError::WouldBlock => \"try_lock failed because the operation would block\",\n\n }\n\n }\n\n\n\n fn cause(&self) -> Option<&Error> {\n\n match *self {\n\n TryLockError::Poisoned(ref p) => Some(p),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/mutex/poison.rs", "rank": 42, "score": 14714.6423042918 }, { "content": "extern crate td_rthreadpool;\n\nuse td_rthreadpool::ThreadPool;\n\nuse std::thread::{self, sleep};\n\nuse std::sync;\n\nuse std::time::Duration;\n\n\n\nconst TEST_TASKS: usize = 4;\n\n#[test]\n", "file_path": "tests/test.rs", "rank": 48, "score": 9.308691690527771 }, { "content": "#td_rthreadpool\n\n==========\n\n\n\nA thread pool for running a number of jobs on a fixed set of worker threads. And has the reenter mutex lock.\n\n\n\n[![Build Status](https://travis-ci.org/tickbh/td_rthreadpool.svg?branch=master)](https://travis-ci.org/tickbh/td_rthreadpool)\n\n\n\n## Usage\n\n\n\nAdd this to your `Cargo.toml`:\n\n\n\n```toml\n\n[dependencies]\n\ntd_rthreadpool = \"0.1.0\"\n\n```\n\n\n\nand this to your crate root:\n\n\n\n```rust\n\nextern crate td_rthreadpool;\n\n```\n\n\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0\n\n * MIT license\n\n\n\nat your option.\n", "file_path": "README.md", "rank": 51, "score": 6.502351268046613 } ]
Rust
tests/asm.rs
jonas-schievink/x87
c24fe7ee4fd51ebb8411187a7e200bd80c9ea87c
#![feature(asm, untagged_unions)] #![cfg(any(target_arch = "x86", target_arch = "x86_64"))] #[macro_use] extern crate x87; #[macro_use] extern crate proptest; extern crate env_logger; extern crate ieee754; use x87::{X87State, f80}; use ieee754::Ieee754; union X87StateUnion { raw: [u8; 108], structured: X87State, } #[test] fn meta() { let mut backup = X87StateUnion { raw: [0; 108] }; let mut state = X87StateUnion { raw: [0; 108] }; unsafe { asm!(r" fnsave $0 fld1 fnsave $1 frstor $0 " : "=*m"(&mut backup.raw), "=*m"(&mut state.raw) :: "memory"); } let state2 = run_host_asm!("fld1" :); assert_eq!(unsafe { state.structured.scrub() }, state2); } #[test] fn add_f64_double_round_wrong() { let lhs = f64::from_bits(964674174654497230); let rhs = f64::from_bits(10131472521302454270); let mut result80 = 0.0f64; run_host_asm!(r" fldl $1 fldl $2 faddp fstpl $0 " : "=*m"(&mut result80), "=*m"(&lhs), "=*m"(&rhs)); println!("{}+{}={}", lhs, rhs, result80); let result64 = lhs + rhs; assert_eq!(result64.to_bits(), result80.to_bits() + 1, "host FPU returned wrong result"); let (l80, r80) = (f80::from(lhs), f80::from(rhs)); let f80sum = l80 + r80; let f80bits = f80sum.to_f64().to_bits(); assert_eq!(result80.to_bits(), f80bits, "host FPU != emulation result"); } fn add32(lhs_bits: u32, rhs_bits: u32) { let (lhs, rhs) = (f32::from_bits(lhs_bits), f32::from_bits(rhs_bits)); let mut native_f32_sum = 0.0f32; let mut native_f80_sum = [0u8; 10]; run_host_asm!(r" flds $1 flds $0 faddp fsts $2 fstpt $3 " : "=*m"(&lhs), "=*m"(&rhs), "=*m"(&mut native_f32_sum), "=*m"(&mut native_f80_sum)); let (l80, r80) = (f80::from(lhs), f80::from(rhs)); let f80sum = l80 + r80; let f80_f32bits = f80sum.to_f32().to_bits(); let f80native = f80::from_bytes(native_f80_sum); assert_eq!( f80_f32bits, native_f32_sum.to_bits(), "f32 sum mismatch: x87:{}={:?}={:#010X}={:?}, native:{}={:?}={:#010X}={:?}", f80sum.to_f32(), f80sum.to_f32().classify(), f80_f32bits, f80sum.to_f32().decompose(), native_f32_sum, native_f32_sum.classify(), native_f32_sum.to_bits(), native_f32_sum.decompose(), ); assert_eq!( f80sum.to_bytes(), native_f80_sum, "f80 sum mismatch: x87:{:?}={:?}, native:{:?}={:?}", f80sum, f80sum.classify(), f80native, f80native.classify(), ); } fn sub32(lhs_bits: u32, rhs_bits: u32) { let (lhs, rhs) = (f32::from_bits(lhs_bits), f32::from_bits(rhs_bits)); let mut native_f32_diff = 0.0f32; let mut native_f80_diff = [0u8; 10]; run_host_asm!(r" flds $1 flds $0 fsubp fsts $2 fstpt $3 " : "=*m"(&lhs), "=*m"(&rhs), "=*m"(&mut native_f32_diff), "=*m"(&mut native_f80_diff)); let (l80, r80) = (f80::from(lhs), f80::from(rhs)); let f80diff = l80 - r80; let f80_f32bits = f80diff.to_f32().to_bits(); let f80native = f80::from_bytes(native_f80_diff); assert_eq!( f80_f32bits, native_f32_diff.to_bits(), "f32 sum mismatch: x87:{}={:?}={:#010X}={:?}, native:{}={:?}={:#010X}={:?}", f80diff.to_f32(), f80diff.to_f32().classify(), f80_f32bits, f80diff.to_f32().decompose(), native_f32_diff, native_f32_diff.classify(), native_f32_diff.to_bits(), native_f32_diff.decompose(), ); assert_eq!( f80diff.to_bytes(), native_f80_diff, "f80 sum mismatch: x87:{:?}={:?}, native:{:?}={:?}", f80diff, f80diff.classify(), f80native, f80native.classify(), ); } fn mul32(lhs_bits: u32, rhs_bits: u32) { let (lhs, rhs) = (f32::from_bits(lhs_bits), f32::from_bits(rhs_bits)); let mut native_f32_prod = 0.0f32; let mut native_f80_prod = [0u8; 10]; run_host_asm!(r" flds $1 flds $0 fmulp fsts $2 fstpt $3 " : "=*m"(&lhs), "=*m"(&rhs), "=*m"(&mut native_f32_prod), "=*m"(&mut native_f80_prod)); let (l80, r80) = (f80::from(lhs), f80::from(rhs)); let f80prod = l80 * r80; let f80_f32bits = f80prod.to_f32().to_bits(); let f80native = f80::from_bytes(native_f80_prod); assert_eq!( f80_f32bits, native_f32_prod.to_bits(), "f32 product mismatch: x87:{}={:?}={:#010X}={:?}, native:{}={:?}={:#010X}={:?}", f80prod.to_f32(), f80prod.to_f32().classify(), f80_f32bits, f80prod.to_f32().decompose(), native_f32_prod, native_f32_prod.classify(), native_f32_prod.to_bits(), native_f32_prod.decompose(), ); assert_eq!( f80prod.to_bytes(), native_f80_prod, "f80 product mismatch: x87:{:?}={:?}, native:{:?}={:?}", f80prod, f80prod.classify(), f80native, f80native.classify(), ); } #[test] fn f32_add_nan_payload() { env_logger::try_init().ok(); add32(2139095041, 0); } #[test] fn nan_propagation() { env_logger::try_init().ok(); add32(0xff800002, 0x7f800001); add32(0x7f800002, 0xff800001); add32(0xff800002, 0xff800001); add32(0x7f800002, 0x7f800001); add32(0xff800001, 0x7f800002); add32(0x7f800001, 0xff800002); add32(0xff800001, 0xff800002); add32(0x7f800001, 0x7f800002); } #[test] fn rounding_affects_integer_bits() { env_logger::try_init().ok(); add32(1, 3976200192); } #[test] fn to_f32_postnormalizes() { env_logger::try_init().ok(); add32(3120562177, 1518338048); } #[test] fn addition_doesnt_create_signed_zero() { env_logger::try_init().ok(); add32(54623649, 2202107297); } #[test] fn infinities() { let pinf: f32 = 1.0/0.0; let minf: f32 = -1.0/0.0; add32(pinf.to_bits(), pinf.to_bits()); add32(minf.to_bits(), minf.to_bits()); add32(minf.to_bits(), pinf.to_bits()); add32(pinf.to_bits(), minf.to_bits()); } #[test] fn zero_exponent() { env_logger::try_init().ok(); add32(2147483649, 0); } #[test] fn zero_minus_nan() { env_logger::try_init().ok(); sub32(0, 2139095041); } #[test] fn mul_denormal_zero() { env_logger::try_init().ok(); mul32(0, 1); mul32(1, 0); } #[test] fn mul_f32_denormals() { env_logger::try_init().ok(); mul32(1, 3); } #[test] #[ignore] fn mul_rounding_denormal_result() { env_logger::try_init().ok(); mul32(2496593444, 706412423); } proptest! { #[test] fn add_f32(lhs_bits: u32, rhs_bits: u32) { add32(lhs_bits, rhs_bits); } } proptest! { #[test] fn sub_f32(lhs_bits: u32, rhs_bits: u32) { sub32(lhs_bits, rhs_bits); } } proptest! { #[test] fn mul_f32(lhs_bits: u32, rhs_bits: u32) { mul32(lhs_bits, rhs_bits); } }
#![feature(asm, untagged_unions)] #![cfg(any(target_arch = "x86", target_arch = "x86_64"))] #[macro_use] extern crate x87; #[macro_use] extern crate proptest; extern crate env_logger; extern crate ieee754; use x87::{X87State, f80}; use ieee754::Ieee754; union X87StateUnion { raw: [u8; 108], structured: X87State, } #[test] fn meta() { let mut backup = X87StateUnion { raw: [0; 108] }; let mut state = X87StateUnion { raw: [0; 108] }; unsafe { asm!(r" fnsave $0 fld1 fnsave $1 frstor $0 " : "=*m"(&mut backup.raw), "=*m"(&mut state.raw) :: "memory"); } let state2 = run_host_asm!("fld1" :); assert_eq!(unsafe { state.structured.scrub() }, state2); } #[test] fn add_f64_double_round_wrong() { let lhs = f64::from_bits(964674174654497230); let rhs = f64::from_bits(10131472521302454270); let mut result80 = 0.0f64; run_host_asm!(r" fldl $1 fldl $2 fadd
fn add32(lhs_bits: u32, rhs_bits: u32) { let (lhs, rhs) = (f32::from_bits(lhs_bits), f32::from_bits(rhs_bits)); let mut native_f32_sum = 0.0f32; let mut native_f80_sum = [0u8; 10]; run_host_asm!(r" flds $1 flds $0 faddp fsts $2 fstpt $3 " : "=*m"(&lhs), "=*m"(&rhs), "=*m"(&mut native_f32_sum), "=*m"(&mut native_f80_sum)); let (l80, r80) = (f80::from(lhs), f80::from(rhs)); let f80sum = l80 + r80; let f80_f32bits = f80sum.to_f32().to_bits(); let f80native = f80::from_bytes(native_f80_sum); assert_eq!( f80_f32bits, native_f32_sum.to_bits(), "f32 sum mismatch: x87:{}={:?}={:#010X}={:?}, native:{}={:?}={:#010X}={:?}", f80sum.to_f32(), f80sum.to_f32().classify(), f80_f32bits, f80sum.to_f32().decompose(), native_f32_sum, native_f32_sum.classify(), native_f32_sum.to_bits(), native_f32_sum.decompose(), ); assert_eq!( f80sum.to_bytes(), native_f80_sum, "f80 sum mismatch: x87:{:?}={:?}, native:{:?}={:?}", f80sum, f80sum.classify(), f80native, f80native.classify(), ); } fn sub32(lhs_bits: u32, rhs_bits: u32) { let (lhs, rhs) = (f32::from_bits(lhs_bits), f32::from_bits(rhs_bits)); let mut native_f32_diff = 0.0f32; let mut native_f80_diff = [0u8; 10]; run_host_asm!(r" flds $1 flds $0 fsubp fsts $2 fstpt $3 " : "=*m"(&lhs), "=*m"(&rhs), "=*m"(&mut native_f32_diff), "=*m"(&mut native_f80_diff)); let (l80, r80) = (f80::from(lhs), f80::from(rhs)); let f80diff = l80 - r80; let f80_f32bits = f80diff.to_f32().to_bits(); let f80native = f80::from_bytes(native_f80_diff); assert_eq!( f80_f32bits, native_f32_diff.to_bits(), "f32 sum mismatch: x87:{}={:?}={:#010X}={:?}, native:{}={:?}={:#010X}={:?}", f80diff.to_f32(), f80diff.to_f32().classify(), f80_f32bits, f80diff.to_f32().decompose(), native_f32_diff, native_f32_diff.classify(), native_f32_diff.to_bits(), native_f32_diff.decompose(), ); assert_eq!( f80diff.to_bytes(), native_f80_diff, "f80 sum mismatch: x87:{:?}={:?}, native:{:?}={:?}", f80diff, f80diff.classify(), f80native, f80native.classify(), ); } fn mul32(lhs_bits: u32, rhs_bits: u32) { let (lhs, rhs) = (f32::from_bits(lhs_bits), f32::from_bits(rhs_bits)); let mut native_f32_prod = 0.0f32; let mut native_f80_prod = [0u8; 10]; run_host_asm!(r" flds $1 flds $0 fmulp fsts $2 fstpt $3 " : "=*m"(&lhs), "=*m"(&rhs), "=*m"(&mut native_f32_prod), "=*m"(&mut native_f80_prod)); let (l80, r80) = (f80::from(lhs), f80::from(rhs)); let f80prod = l80 * r80; let f80_f32bits = f80prod.to_f32().to_bits(); let f80native = f80::from_bytes(native_f80_prod); assert_eq!( f80_f32bits, native_f32_prod.to_bits(), "f32 product mismatch: x87:{}={:?}={:#010X}={:?}, native:{}={:?}={:#010X}={:?}", f80prod.to_f32(), f80prod.to_f32().classify(), f80_f32bits, f80prod.to_f32().decompose(), native_f32_prod, native_f32_prod.classify(), native_f32_prod.to_bits(), native_f32_prod.decompose(), ); assert_eq!( f80prod.to_bytes(), native_f80_prod, "f80 product mismatch: x87:{:?}={:?}, native:{:?}={:?}", f80prod, f80prod.classify(), f80native, f80native.classify(), ); } #[test] fn f32_add_nan_payload() { env_logger::try_init().ok(); add32(2139095041, 0); } #[test] fn nan_propagation() { env_logger::try_init().ok(); add32(0xff800002, 0x7f800001); add32(0x7f800002, 0xff800001); add32(0xff800002, 0xff800001); add32(0x7f800002, 0x7f800001); add32(0xff800001, 0x7f800002); add32(0x7f800001, 0xff800002); add32(0xff800001, 0xff800002); add32(0x7f800001, 0x7f800002); } #[test] fn rounding_affects_integer_bits() { env_logger::try_init().ok(); add32(1, 3976200192); } #[test] fn to_f32_postnormalizes() { env_logger::try_init().ok(); add32(3120562177, 1518338048); } #[test] fn addition_doesnt_create_signed_zero() { env_logger::try_init().ok(); add32(54623649, 2202107297); } #[test] fn infinities() { let pinf: f32 = 1.0/0.0; let minf: f32 = -1.0/0.0; add32(pinf.to_bits(), pinf.to_bits()); add32(minf.to_bits(), minf.to_bits()); add32(minf.to_bits(), pinf.to_bits()); add32(pinf.to_bits(), minf.to_bits()); } #[test] fn zero_exponent() { env_logger::try_init().ok(); add32(2147483649, 0); } #[test] fn zero_minus_nan() { env_logger::try_init().ok(); sub32(0, 2139095041); } #[test] fn mul_denormal_zero() { env_logger::try_init().ok(); mul32(0, 1); mul32(1, 0); } #[test] fn mul_f32_denormals() { env_logger::try_init().ok(); mul32(1, 3); } #[test] #[ignore] fn mul_rounding_denormal_result() { env_logger::try_init().ok(); mul32(2496593444, 706412423); } proptest! { #[test] fn add_f32(lhs_bits: u32, rhs_bits: u32) { add32(lhs_bits, rhs_bits); } } proptest! { #[test] fn sub_f32(lhs_bits: u32, rhs_bits: u32) { sub32(lhs_bits, rhs_bits); } } proptest! { #[test] fn mul_f32(lhs_bits: u32, rhs_bits: u32) { mul32(lhs_bits, rhs_bits); } }
p fstpl $0 " : "=*m"(&mut result80), "=*m"(&lhs), "=*m"(&rhs)); println!("{}+{}={}", lhs, rhs, result80); let result64 = lhs + rhs; assert_eq!(result64.to_bits(), result80.to_bits() + 1, "host FPU returned wrong result"); let (l80, r80) = (f80::from(lhs), f80::from(rhs)); let f80sum = l80 + r80; let f80bits = f80sum.to_f64().to_bits(); assert_eq!(result80.to_bits(), f80bits, "host FPU != emulation result"); }
function_block-function_prefixed
[]
Rust
05_recon/src/types.rs
lazear/types-and-programming-languages
0787493713b41639878db206e76d82fa8f29a77a
use std::collections::{HashMap, HashSet, VecDeque}; #[derive(Copy, Clone, Default, PartialEq, PartialOrd, Eq, Hash)] pub struct TypeVar(pub u32, pub u32); #[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Hash)] pub struct Tycon { id: usize, arity: usize, } #[derive(Clone, PartialEq, PartialOrd, Eq, Hash)] pub enum Type { Var(TypeVar), Con(Tycon, Vec<Type>), } #[derive(Debug, Clone)] pub enum Scheme { Mono(Type), Poly(Vec<TypeVar>, Type), } pub trait Substitution { fn ftv(&self) -> HashSet<TypeVar>; fn apply(self, s: &HashMap<TypeVar, Type>) -> Self; } impl Substitution for Type { fn ftv(&self) -> HashSet<TypeVar> { let mut set = HashSet::new(); let mut queue = VecDeque::new(); queue.push_back(self); while let Some(ty) = queue.pop_front() { match ty { Type::Var(x) => { set.insert(*x); } Type::Con(_, tys) => { for ty in tys { queue.push_back(ty); } } } } set } fn apply(self, map: &HashMap<TypeVar, Type>) -> Type { match self { Type::Var(x) => map.get(&x).cloned().unwrap_or(Type::Var(x)), Type::Con(tc, vars) => Type::Con(tc, vars.into_iter().map(|ty| ty.apply(map)).collect()), } } } impl Type { pub fn arrow(a: Type, b: Type) -> Type { Type::Con(T_ARROW, vec![a, b]) } pub fn bool() -> Type { Type::Con(T_BOOL, vec![]) } pub fn occurs(&self, exist: TypeVar) -> bool { match self { Type::Var(x) => *x == exist, Type::Con(_, tys) => tys.iter().any(|ty| ty.occurs(exist)), } } pub fn de_arrow(&self) -> (&Type, &Type) { match self { Type::Con(T_ARROW, v) => (&v[0], &v[1]), _ => panic!("Not arrow type! {:?}", self), } } } pub fn compose(s1: HashMap<TypeVar, Type>, s2: HashMap<TypeVar, Type>) -> HashMap<TypeVar, Type> { let mut s2 = s2 .into_iter() .map(|(k, v)| (k, v.apply(&s1))) .collect::<HashMap<TypeVar, Type>>(); for (k, v) in s1 { if !s2.contains_key(&k) { s2.insert(k, v); } } s2 } impl Substitution for Scheme { fn ftv(&self) -> HashSet<TypeVar> { match self { Scheme::Mono(ty) => ty.ftv(), Scheme::Poly(vars, ty) => ty.ftv(), } } fn apply(self, map: &HashMap<TypeVar, Type>) -> Scheme { match self { Scheme::Mono(ty) => Scheme::Mono(ty.apply(map)), Scheme::Poly(vars, ty) => { let mut map: HashMap<TypeVar, Type> = map.clone(); for v in &vars { map.remove(v); } Scheme::Poly(vars, ty.apply(&map)) } } } } pub const T_ARROW: Tycon = Tycon { id: 0, arity: 2 }; pub const T_INT: Tycon = Tycon { id: 1, arity: 0 }; pub const T_UNIT: Tycon = Tycon { id: 2, arity: 0 }; pub const T_BOOL: Tycon = Tycon { id: 3, arity: 0 }; impl std::fmt::Debug for Tycon { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self.id { 0 => write!(f, "->"), 1 => write!(f, "int"), 2 => write!(f, "unit"), 3 => write!(f, "bool"), _ => write!(f, "??"), } } } fn fresh_name(x: u32) -> String { let last = ((x % 26) as u8 + 'a' as u8) as char; (0..x / 26) .map(|_| 'z') .chain(std::iter::once(last)) .collect::<String>() } impl std::fmt::Debug for TypeVar { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_str(&fresh_name(self.0)) } } impl std::fmt::Debug for Type { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Type::Var(x) => write!(f, "{:?}", x), Type::Con(T_ARROW, tys) => write!(f, "({:?} -> {:?})", tys[0], tys[1]), Type::Con(tc, _) => write!(f, "{:?}", tc,), } } }
use std::collections::{HashMap, HashSet, VecDeque}; #[derive(Copy, Clone, Default, PartialEq, PartialOrd, Eq, Hash)] pub struct TypeVar(pub u32, pub u32); #[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Hash)] pub struct Tycon { id: usize, arity: usize, } #[derive(Clone, PartialEq, PartialOrd, Eq, Hash)] pub enum Type { Var(TypeVar), Con(Tycon, Vec<Type>), } #[derive(Debug, Clone)] pub enum Scheme { Mono(Type), Poly(Vec<TypeVar>, Type), } pub trait Substitution { fn ftv(&self) -> HashSet<TypeVar>; fn apply(self, s: &HashMap<TypeVar, Type>) -> Self; } impl Substitution for Type { fn ftv(&self) -> HashSet<TypeVar> { let mut set = HashSet::new(); let mut queue = VecDeque::new(); queue.push_back(self); while let Some(ty) = queue.pop_front() { match ty { Type::Var(x) => { set.insert(*x); } Type::Con(_, tys) => { for ty in tys { queue.push_back(ty); } } } } set } fn apply(self, map: &HashMap<TypeVar, Type>) -> Type { match self { Type::Var(x) => map.get(&x).cloned().unwrap_or(Type::Var(x)), Type::Con(tc, vars) => Type::Con(tc, vars.into_iter().map(|ty| ty.apply(map)).collect()), } } } impl Type { pub fn arrow(a: Type, b: Type) -> Type { Type::Con(T_ARROW, vec![a, b]) } pub fn bool() -> Type { Type::Con(T_BOOL, vec![]) } pub fn occurs(&self, exist: TypeVar) -> bool { match self { Type::Var(x) => *x == exist, Type::Con(_, tys) => tys.iter().any(|ty| ty.occurs(exist)), } } pub fn de_arrow(&self) -> (&Type, &Type) { match self { Type::Con(T_ARROW, v) => (&v[0], &v[1]), _ => panic!("Not arrow type! {:?}", self), } } } pub fn compose(s1: HashMap<TypeVar, Type>, s2: HashMap<TypeVar, Type>) -> HashMap<TypeVar, Type> { let mut s2 = s2 .into_iter() .map(|(k, v)| (k, v.apply(&s1))) .collect::<HashMap<TypeVar, Type>>(); for (k, v) in s1 { if !s2.contains_key(&k) { s2.insert(k, v); } } s2 } impl Substitution for Scheme { fn ftv(&self) -> HashSet<TypeVar> { match self { Scheme::Mono(ty) => ty.ftv(), Scheme::Poly(vars, ty) => ty.ftv(), } } fn apply(self, map: &HashMap<TypeVar, Type>) -> Scheme { match self { Scheme::Mono(ty) => Scheme::Mono(ty.apply(map)), Scheme::Poly(vars, ty) => { let mut map: HashMap<TypeVar, Type> = map.clone(); for v in &vars { map.remove(v); } Scheme::Poly(vars, ty.apply(&map)) } } } } pub const T_ARROW: Tycon = Tycon { id: 0, arity: 2 }; pub const T_INT: Tycon = Tycon { id: 1, arity: 0 }; pub const T_UNIT: Tycon = Tycon { id: 2, arity: 0 }; pub const T_BOOL: Tycon = Tycon { id: 3, arity: 0 }; impl std::fmt::Debug for Tycon { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self.id { 0 => write!(f, "->"), 1 => write!(f, "int"), 2 => write!(f, "unit"), 3 => write!(f, "bool"), _ => write!(f, "??"), } } } fn fresh_name(x: u32) -> String { let last = ((x % 26) as u8 + 'a' as u8
TypeVar { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_str(&fresh_name(self.0)) } } impl std::fmt::Debug for Type { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Type::Var(x) => write!(f, "{:?}", x), Type::Con(T_ARROW, tys) => write!(f, "({:?} -> {:?})", tys[0], tys[1]), Type::Con(tc, _) => write!(f, "{:?}", tc,), } } }
) as char; (0..x / 26) .map(|_| 'z') .chain(std::iter::once(last)) .collect::<String>() } impl std::fmt::Debug for
random
[ { "content": "fn var_bind(var: TypeVar, ty: Type) -> Result<HashMap<TypeVar, Type>, String> {\n\n if ty.occurs(var) {\n\n return Err(format!(\"Fails occurs check! {:?} {:?}\", var, ty));\n\n }\n\n let mut sub = HashMap::new();\n\n match ty {\n\n Type::Var(x) if x == var => {}\n\n _ => {\n\n sub.insert(var, ty);\n\n }\n\n }\n\n Ok(sub)\n\n}\n\n\n", "file_path": "05_recon/src/naive.rs", "rank": 1, "score": 336440.4079559585 }, { "content": "pub fn unify(a: Type, b: Type) -> Result<HashMap<TypeVar, Type>, String> {\n\n // println!(\"{:?} {:?}\", a, b);\n\n match (a, b) {\n\n (Type::Con(a, a_args), Type::Con(b, b_args)) => {\n\n if a_args.len() == b_args.len() && a == b {\n\n solve(a_args.into_iter().zip(b_args.into_iter()))\n\n } else {\n\n Err(format!(\n\n \"Can't unify types: {:?} {:?}\",\n\n Type::Con(a, a_args),\n\n Type::Con(b, b_args)\n\n ))\n\n }\n\n }\n\n (Type::Var(tv), b) => var_bind(tv, b),\n\n (a, Type::Var(tv)) => var_bind(tv, a),\n\n }\n\n}\n\n\n", "file_path": "05_recon/src/naive.rs", "rank": 2, "score": 320881.69585097954 }, { "content": "pub fn occurs_check(v: &TypeVar, ty: &Type) -> bool {\n\n match ty {\n\n Type::Var(x) => {\n\n if let Some(info) = x.data.get() {\n\n occurs_check(v, &info)\n\n } else {\n\n let min_rank = x.data.get_rank().min(v.data.get_rank());\n\n if min_rank != x.data.get_rank() {\n\n println!(\"promoting type var {:?} {}->{}\", x, x.data.get_rank(), min_rank);\n\n x.data.set_rank(min_rank);\n\n }\n\n\n\n x.exist == v.exist\n\n }\n\n }\n\n Type::Con(_, vars) => vars.iter().any(|x| occurs_check(v, x)),\n\n }\n\n}\n\n\n", "file_path": "05_recon/src/mutation/mod.rs", "rank": 3, "score": 320068.03833708534 }, { "content": "fn var_bind(v: &TypeVar, ty: &Type) -> Result<(), String> {\n\n if occurs_check(&v, ty) {\n\n return Err(format!(\"Failed occurs check {:?} {:?}\", v, ty));\n\n }\n\n\n\n v.data.set(ty.clone()).unwrap();\n\n Ok(())\n\n}\n\n\n", "file_path": "05_recon/src/mutation/mod.rs", "rank": 4, "score": 282602.673342834 }, { "content": "pub fn solve<I: Iterator<Item = (Type, Type)>>(iter: I) -> Result<HashMap<TypeVar, Type>, String> {\n\n let mut un = Unifier::new();\n\n\n\n for (a, b) in iter {\n\n let a = un.intern(a);\n\n let b = un.intern(b);\n\n un.unify(a, b)?;\n\n }\n\n let mut map = HashMap::new();\n\n for (ty, var) in &un.map {\n\n match ty {\n\n Type::Var(x) => {\n\n map.insert(*x, un.decode(un.set.find(*var)));\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n Ok(map)\n\n}\n", "file_path": "05_recon/src/disjoint.rs", "rank": 5, "score": 281640.0435154171 }, { "content": "pub fn solve<I: Iterator<Item = (Type, Type)>>(iter: I) -> Result<HashMap<TypeVar, Type>, String> {\n\n let mut sub = HashMap::new();\n\n for (a, b) in iter {\n\n let tmp = unify(a.clone().apply(&sub), b.clone().apply(&sub))?;\n\n sub = compose(tmp, sub);\n\n }\n\n Ok(sub)\n\n}\n", "file_path": "05_recon/src/naive.rs", "rank": 6, "score": 281640.04351541714 }, { "content": "pub fn subst(mut s: Type, mut t: Type) -> Type {\n\n Shift::new(1).visit(&mut s);\n\n Subst::new(s).visit(&mut t);\n\n Shift::new(-1).visit(&mut t);\n\n t\n\n}\n\n\n", "file_path": "06_system_f/src/types/mod.rs", "rank": 7, "score": 237492.0393828438 }, { "content": "fn ty_display(ty: &Type) -> String {\n\n use std::collections::HashMap;\n\n let mut map = HashMap::new();\n\n fn walk(ty: &Type, map: &mut HashMap<usize, char>) -> String {\n\n let nc = ('a' as u8 + map.len() as u8) as char;\n\n let vc = ('A' as u8 + map.len() as u8) as char;\n\n match ty {\n\n Type::Unit => \"()\".into(),\n\n Type::Bool => \"bool\".into(),\n\n Type::Int => \"int\".into(),\n\n Type::Arrow(a, b) => format!(\"({}->{})\", walk(a, map), walk(b, map)),\n\n Type::Univ(k, ty) => format!(\"forall {}. {}\", vc, walk(ty, map)),\n\n Type::Exist(idx) => format!(\"{}\", map.entry(*idx).or_insert(nc)),\n\n Type::Var(idx) => format!(\"{}\", map.entry(0xdeadbeef + *idx).or_insert(vc)),\n\n Type::Sum(a, b) => format!(\"{} + {}\", walk(a, map), walk(b, map)),\n\n Type::Product(a, b) => format!(\"({} x {})\", walk(a, map), walk(b, map)),\n\n _ => \"\".into(),\n\n }\n\n }\n\n walk(ty, &mut map)\n\n}\n\n\n", "file_path": "x1_bidir/src/helpers.rs", "rank": 8, "score": 229057.86209343036 }, { "content": "fn walk_mut_term<V: MutVisitor>(visitor: &mut V, var: &mut Term) {\n\n match var {\n\n Term::Unit | Term::True | Term::False | Term::Zero => visitor.visit_const(var),\n\n Term::Succ(t) => visitor.visit_succ(t),\n\n Term::Pred(t) => visitor.visit_pred(t),\n\n Term::IsZero(t) => visitor.visit_iszero(t),\n\n Term::Var(_) => visitor.visit_var(var),\n\n Term::Abs(ty, body) => visitor.visit_abs(ty, body),\n\n Term::App(t1, t2) => visitor.visit_app(t1, t2),\n\n Term::If(a, b, c) => visitor.visit_if(a, b, c),\n\n Term::Let(bind, body) => visitor.visit_let(bind, body),\n\n Term::Record(rec) => visitor.visit_record(rec),\n\n Term::Projection(rec, idx) => visitor.visit_proj(rec, idx),\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum Direction {\n\n Up,\n\n Down,\n", "file_path": "04_stlc/src/visitor.rs", "rank": 10, "score": 224867.68862673326 }, { "content": "pub fn parameterized_set() -> Type {\n\n tyop!(kind!(*), exist!(kind!(* => *), op_app!(Type::Var(0), Type::Var(1))))\n\n}\n\n\n", "file_path": "07_system_fw/src/functor.rs", "rank": 11, "score": 203097.2834263657 }, { "content": "fn unify_type(a: &Type, b: &Type) -> Result<(), String> {\n\n match (a, b) {\n\n (Type::Var(a), b) => match a.data.get() {\n\n Some(ty) => unify_type(ty, b),\n\n None => var_bind(a, b),\n\n },\n\n (a, Type::Var(b)) => match b.data.get() {\n\n Some(ty) => unify_type(a, ty),\n\n None => var_bind(b, a),\n\n },\n\n (Type::Con(a, a_args), Type::Con(b, b_args)) => {\n\n if a != b {\n\n return Err(format!(\"Can't unify constructors {:?} and {:?}\", a, b));\n\n }\n\n if a_args.len() != b_args.len() {\n\n return Err(format!(\"Can't unify argument lists {:?} and {:?}\", a_args, b_args));\n\n }\n\n for (c, d) in a_args.into_iter().zip(b_args) {\n\n unify_type(c, d)?;\n\n }\n", "file_path": "05_recon/src/mutation/mod.rs", "rank": 13, "score": 190747.32148208935 }, { "content": "pub trait MutTypeVisitor: Sized {\n\n fn visit_var(&mut self, _: &mut usize) {}\n\n\n\n fn visit_arrow(&mut self, ty1: &mut Type, ty2: &mut Type) {\n\n self.visit(ty1);\n\n self.visit(ty2);\n\n }\n\n\n\n fn visit_universal(&mut self, _: &mut TyKind, ty: &mut Type) {\n\n self.visit(ty);\n\n }\n\n\n\n fn visit_existential(&mut self, _: &mut TyKind, ty: &mut Type) {\n\n self.visit(ty);\n\n }\n\n\n\n fn visit_abs(&mut self, _: &mut TyKind, ty: &mut Type) {\n\n self.visit(ty);\n\n }\n\n\n", "file_path": "07_system_fw/src/types.rs", "rank": 14, "score": 190731.72513280215 }, { "content": "pub trait MutTypeVisitor: Sized {\n\n fn visit_var(&mut self, var: &mut usize) {}\n\n fn visit_alias(&mut self, alias: &mut String) {}\n\n\n\n fn visit_arrow(&mut self, ty1: &mut Type, ty2: &mut Type) {\n\n self.visit(ty1);\n\n self.visit(ty2);\n\n }\n\n\n\n fn visit_universal(&mut self, inner: &mut Type) {\n\n self.visit(inner);\n\n }\n\n\n\n fn visit_existential(&mut self, inner: &mut Type) {\n\n self.visit(inner);\n\n }\n\n\n\n fn visit_variant(&mut self, variant: &mut Vec<Variant>) {\n\n for v in variant {\n\n self.visit(&mut v.ty);\n", "file_path": "06_system_f/src/visit.rs", "rank": 15, "score": 184352.15814972532 }, { "content": "pub fn typing(tm: RcTerm) -> Result<Type, TyError> {\n\n match &tm as &Term {\n\n Term::TmTrue => Ok(Type::Bool),\n\n Term::TmFalse => Ok(Type::Bool),\n\n Term::TmZero => Ok(Type::Nat),\n\n Term::TmSucc(t) => match typing(t.clone()) {\n\n Ok(Type::Nat) => Ok(Type::Nat),\n\n _ => Err(TyError::TypingError),\n\n },\n\n Term::TmPred(t) => match typing(t.clone()) {\n\n Ok(Type::Nat) => Ok(Type::Nat),\n\n _ => Err(TyError::TypingError),\n\n },\n\n Term::TmIsZero(t) => match typing(t.clone()) {\n\n Ok(Type::Nat) => Ok(Type::Bool),\n\n _ => Err(TyError::TypingError),\n\n },\n\n Term::TmIf(a, b, c) => match typing(a.clone()) {\n\n Ok(Type::Bool) => {\n\n let ty_b = typing(b.clone())?;\n", "file_path": "03_typedarith/src/ast.rs", "rank": 16, "score": 183088.81472171115 }, { "content": "fn unfold(ty: Type) -> Type {\n\n match &ty {\n\n Type::Recursive(inner) => op_app!(*inner.clone(), ty),\n\n Type::App(a, b) => match a.as_ref() {\n\n Type::Recursive(_) => op_app!(unfold(*a.clone()), *b.clone()),\n\n _ => ty,\n\n },\n\n _ => ty,\n\n }\n\n}\n", "file_path": "07_system_fw/src/main.rs", "rank": 17, "score": 183060.93819337047 }, { "content": "fn parse_and_eval(ctx: &mut types::Context, input: &str, verbose: bool) -> bool {\n\n let mut p = Parser::new(input);\n\n loop {\n\n let term = match p.parse() {\n\n Ok(term) => term,\n\n Err(parser::Error {\n\n kind: parser::ErrorKind::Eof,\n\n ..\n\n }) => break,\n\n Err(e) => {\n\n dbg!(e);\n\n break;\n\n }\n\n };\n\n if let Err(diag) = eval(ctx, term, verbose) {\n\n code_format(input, diag);\n\n return false;\n\n }\n\n }\n\n let diag = p.diagnostic();\n\n if diag.error_count() > 0 {\n\n println!(\"Parsing {}\", diag.emit());\n\n false\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "06_system_f/src/main.rs", "rank": 18, "score": 178423.57819188133 }, { "content": "/// Convert from natural number to church encoding\n\nfn baptize(int: u32) -> Term {\n\n let mut num = Term::TmZero;\n\n for _ in 0..int {\n\n num = Term::TmSucc(Box::new(num));\n\n }\n\n num\n\n}\n", "file_path": "01_arith/src/parser.rs", "rank": 19, "score": 170999.534214563 }, { "content": "/// Convert from natural number to church encoding\n\nfn baptize(int: u32) -> Term {\n\n let mut num = Term::TmZero;\n\n for _ in 0..int {\n\n num = Term::TmSucc(num.into());\n\n }\n\n num\n\n}\n", "file_path": "03_typedarith/src/parser.rs", "rank": 20, "score": 170999.53421456303 }, { "content": "fn type_subst(s: Type, t: &mut Term) {\n\n TyTermSubst::new(s).visit(t);\n\n Shift::new(-1).visit(t);\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use util::span::Span;\n\n\n\n #[test]\n\n fn literal() {\n\n let ctx = crate::types::Context::default();\n\n let eval = Eval::with_context(&ctx);\n\n assert_eq!(eval.small_step(lit!(false)), None);\n\n }\n\n\n\n #[test]\n\n fn application() {\n\n let ctx = crate::types::Context::default();\n", "file_path": "06_system_f/src/eval.rs", "rank": 21, "score": 167778.74125779467 }, { "content": "fn eval(ctx: &mut types::Context, mut term: Term, verbose: bool) -> Result<Term, Diagnostic> {\n\n ctx.de_alias(&mut term);\n\n InjRewriter.visit(&mut term);\n\n let ty = ctx.type_check(&term)?;\n\n println!(\" -: {:?}\", ty);\n\n\n\n let ev = eval::Eval::with_context(ctx);\n\n let mut t = term;\n\n let fin = loop {\n\n if let Some(res) = ev.small_step(t.clone()) {\n\n t = res;\n\n } else {\n\n break t;\n\n }\n\n if verbose {\n\n println!(\"---> {}\", t);\n\n }\n\n };\n\n println!(\"===> {}\", fin);\n\n let fty = ctx.type_check(&fin)?;\n\n if fty != ty {\n\n panic!(\n\n \"Type of term after evaluation is different than before!\\n1 {:?}\\n2 {:?}\",\n\n ty, fty\n\n );\n\n }\n\n Ok(fin)\n\n}\n\n\n", "file_path": "06_system_f/src/main.rs", "rank": 22, "score": 166458.58142524492 }, { "content": "/// Return true if `existing` covers `new`, i.e. if new is a useful pattern\n\n/// then `overlap` will return `false`\n\nfn overlap(existing: &Pattern, new: &Pattern) -> bool {\n\n use Pattern::*;\n\n match (existing, new) {\n\n (Any, _) => true,\n\n (Variable(_), _) => true,\n\n (Constructor(l, a), Constructor(l2, b)) => {\n\n if l == l2 {\n\n overlap(a, b)\n\n } else {\n\n false\n\n }\n\n }\n\n (Product(a), Product(b)) => a.iter().zip(b.iter()).all(|(a, b)| overlap(a, b)),\n\n (Product(a), b) => a.iter().all(|a| overlap(a, b)),\n\n (x, y) => x == y,\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, PartialOrd)]\n\npub struct Matrix<'pat> {\n", "file_path": "06_system_f/src/types/patterns.rs", "rank": 23, "score": 164410.22474282642 }, { "content": "/// Helper function for extracting type from a variant\n\npub fn variant_field<'vs>(var: &'vs [Variant], label: &str, span: Span) -> Result<&'vs Type, Diagnostic> {\n\n for f in var {\n\n if label == f.label {\n\n return Ok(&f.ty);\n\n }\n\n }\n\n Err(Diagnostic::error(\n\n span,\n\n format!(\"constructor {} doesn't appear in variant fields\", label),\n\n ))\n\n\n\n // Err(TypeError {\n\n // span,\n\n // kind: TypeErrorKind::NotVariant,\n\n // })\n\n}\n\n\n\nimpl Context {\n\n pub fn type_check(&mut self, term: &Term) -> Result<Type, Diagnostic> {\n\n // dbg!(&self.stack);\n", "file_path": "06_system_f/src/types/mod.rs", "rank": 24, "score": 161946.57308493374 }, { "content": "pub trait MutVisitor: Sized {\n\n fn visit_var(&mut self, var: &mut Term) {}\n\n\n\n fn visit_abs(&mut self, ty: &mut Type, body: &mut Term) {\n\n self.visit_term(body);\n\n }\n\n fn visit_app(&mut self, t1: &mut Term, t2: &mut Term) {\n\n self.visit_term(t1);\n\n self.visit_term(t2);\n\n }\n\n fn visit_if(&mut self, guard: &mut Term, csq: &mut Term, alt: &mut Term) {\n\n self.visit_term(guard);\n\n self.visit_term(csq);\n\n self.visit_term(alt);\n\n }\n\n fn visit_let(&mut self, bind: &mut Term, body: &mut Term) {\n\n self.visit_term(bind);\n\n self.visit_term(body);\n\n }\n\n fn visit_succ(&mut self, t: &mut Term) {\n", "file_path": "04_stlc/src/visitor.rs", "rank": 25, "score": 154371.95110143055 }, { "content": "pub trait TypeVisitor<'t>: Sized {\n\n fn visit_defined(&mut self, _: &'t str) {}\n\n\n\n fn visit_variable(&mut self, _: &'t str) {}\n\n\n\n fn visit_function(&mut self, ty1: &'t Type, ty2: &'t Type) {\n\n self.visit_ty(ty1);\n\n self.visit_ty(ty2);\n\n }\n\n\n\n fn visit_application(&mut self, ty1: &'t Type, ty2: &'t Type) {\n\n self.visit_ty(ty1);\n\n self.visit_ty(ty2);\n\n }\n\n\n\n fn visit_sum(&mut self, var: &'t [Variant]) {\n\n for v in var {\n\n if let Some(ty) = &v.ty {\n\n self.visit_ty(ty);\n\n }\n", "file_path": "07_system_fw/src/syntax/visit/types.rs", "rank": 26, "score": 151273.08953964448 }, { "content": "fn infer(ex: &Expr) -> Result<Type, String> {\n\n let mut ctx = Context::default();\n\n let inf = ctx.infer(ex)?;\n\n let mut ty = ctx.apply(inf);\n\n ctx.beta_reduce(&mut ty)?;\n\n Ok(ty)\n\n}\n\n\n", "file_path": "x1_bidir/src/main.rs", "rank": 27, "score": 151160.1127995417 }, { "content": "pub trait MutTermVisitor: Sized {\n\n fn visit_lit(&mut self, sp: &mut Span, lit: &mut Literal) {}\n\n fn visit_var(&mut self, sp: &mut Span, var: &mut usize) {}\n\n\n\n fn visit_abs(&mut self, sp: &mut Span, ty: &mut Type, term: &mut Term) {\n\n self.visit(term);\n\n }\n\n\n\n fn visit_app(&mut self, sp: &mut Span, t1: &mut Term, t2: &mut Term) {\n\n self.visit(t1);\n\n self.visit(t2);\n\n }\n\n\n\n fn visit_let(&mut self, sp: &mut Span, pat: &mut Pattern, t1: &mut Term, t2: &mut Term) {\n\n self.visit(t1);\n\n self.visit(t2);\n\n }\n\n\n\n fn visit_tyabs(&mut self, sp: &mut Span, term: &mut Term) {\n\n self.visit(term);\n", "file_path": "06_system_f/src/visit.rs", "rank": 28, "score": 150949.65077310958 }, { "content": "pub fn parameterized_set_term() -> Term {\n\n let body = Term::new(\n\n terms::Kind::Fold(\n\n Box::new(op_app!(list_type(), Type::Var(0))),\n\n Box::new(Term::new(\n\n terms::Kind::Injection(\n\n \"Nil\".into(),\n\n Box::new(unit!()),\n\n // Manually perform an unfold on list_type()\n\n // - In the System F language, we had an\n\n // InjRewriter macro that takes care of this,\n\n // and we could probably tack it directly\n\n // into the type-checker since we can do simplification\n\n // now\n\n Box::new(op_app!(unfold(list_type()), Type::Var(0))),\n\n ),\n\n Span::default(),\n\n )),\n\n ),\n\n Span::default(),\n", "file_path": "07_system_fw/src/functor.rs", "rank": 29, "score": 142870.63140895602 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Type {\n\n Unit,\n\n Int,\n\n Bool,\n\n /// A type variable\n\n Var(usize),\n\n /// The type of functions\n\n Arrow(Box<Type>, Box<Type>),\n\n /// Existential type variable that can be instantiated to a monotype\n\n Exist(usize),\n\n /// Universally quantified type, forall. A\n\n Univ(Box<Kind>, Box<Type>),\n\n /// Class left/right sum type\n\n Sum(Box<Type>, Box<Type>),\n\n /// Simple pair type\n\n Product(Box<Type>, Box<Type>),\n\n\n\n Abs(Box<Kind>, Box<Type>),\n\n App(Box<Type>, Box<Type>),\n\n}\n", "file_path": "x1_bidir/src/main.rs", "rank": 30, "score": 131556.43826635426 }, { "content": "fn list_type() -> Type {\n\n let inner = tyop!(\n\n kind!(* => *),\n\n tyop!(\n\n kind!(*),\n\n sum!(\n\n (\"Nil\", Type::Unit),\n\n (\n\n \"Cons\",\n\n record!((\"head\", Type::Var(0)), (\"tail\", op_app!(Type::Var(1), Type::Var(0))))\n\n )\n\n )\n\n )\n\n );\n\n Type::Recursive(Box::new(inner))\n\n}\n\n\n", "file_path": "07_system_fw/src/functor.rs", "rank": 31, "score": 127019.62385783081 }, { "content": "pub trait Visitor: Sized {\n\n fn visit_var(&mut self, var: usize);\n\n fn visit_abs(&mut self, ty: Type, body: &Term);\n\n fn visit_app(&mut self, t1: &Term, t2: &Term);\n\n fn visit_if(&mut self, guard: &Term, csq: &Term, alt: &Term);\n\n fn visit_let(&mut self, bind: &Term, body: &Term);\n\n fn visit_succ(&mut self, t: &Term);\n\n fn visit_pred(&mut self, t: &Term);\n\n fn visit_iszero(&mut self, t: &Term);\n\n fn visit_const(&mut self, c: &Term);\n\n fn visit_record(&mut self, c: &[Field]);\n\n fn visit_proj(&mut self, c: &Term, proj: &str);\n\n fn visit_typedecl(&mut self, name: &str, ty: &Type);\n\n}\n\n\n", "file_path": "04_stlc/src/visitor.rs", "rank": 32, "score": 121340.95777546577 }, { "content": "fn nat_list() -> Type {\n\n Type::Rec(Box::new(Type::Variant(vec![\n\n variant!(\"Nil\", Type::Unit),\n\n variant!(\"Cons\", Type::Product(vec![Type::Nat, Type::Var(0)])),\n\n ])))\n\n}\n\n\n", "file_path": "06_system_f/src/main.rs", "rank": 33, "score": 119078.14752659485 }, { "content": "fn nat_list2() -> Type {\n\n Type::Variant(vec![\n\n variant!(\"Nil\", Type::Unit),\n\n variant!(\"Cons\", Type::Product(vec![Type::Nat, Type::Var(0)])),\n\n ])\n\n}\n\n\n", "file_path": "06_system_f/src/main.rs", "rank": 34, "score": 119078.14752659485 }, { "content": "fn test_variant() -> Type {\n\n Type::Variant(vec![\n\n Variant {\n\n label: \"A\".into(),\n\n ty: Type::Unit,\n\n },\n\n Variant {\n\n label: \"B\".into(),\n\n ty: Type::Nat,\n\n },\n\n Variant {\n\n label: \"C\".into(),\n\n ty: Type::Nat,\n\n },\n\n ])\n\n}\n\n\n", "file_path": "06_system_f/src/main.rs", "rank": 35, "score": 119078.14752659485 }, { "content": "fn term_subst(mut s: Term, t: &mut Term) {\n\n Shift::new(1).visit(&mut s);\n\n Subst::new(s).visit(t);\n\n Shift::new(-1).visit(t);\n\n}\n\n\n", "file_path": "06_system_f/src/eval.rs", "rank": 36, "score": 119018.34182703342 }, { "content": "pub trait PatternVisitor: Sized {\n\n fn visit_literal(&mut self, lit: &Literal) {}\n\n fn visit_variable(&mut self, var: &String) {}\n\n fn visit_product(&mut self, pats: &Vec<Pattern>) {\n\n for p in pats {\n\n self.visit_pattern(p);\n\n }\n\n }\n\n\n\n fn visit_constructor(&mut self, label: &String, pat: &Pattern) {\n\n self.visit_pattern(pat);\n\n }\n\n\n\n fn visit_pattern(&mut self, pattern: &Pattern) {\n\n match pattern {\n\n Pattern::Any => {}\n\n Pattern::Constructor(label, pat) => self.visit_constructor(label, pat),\n\n Pattern::Product(pat) => self.visit_product(pat),\n\n Pattern::Literal(lit) => self.visit_literal(lit),\n\n Pattern::Variable(var) => self.visit_variable(var),\n\n }\n\n }\n\n}\n", "file_path": "06_system_f/src/visit.rs", "rank": 37, "score": 118902.87868923621 }, { "content": "#[inline]\n\nfn subst(mut val: Term, body: &mut Term) {\n\n Shifting::new(Direction::Up).visit_term(&mut val);\n\n Substitution::new(val).visit_term(body);\n\n Shifting::new(Direction::Down).visit_term(body);\n\n}\n\n\n", "file_path": "04_stlc/src/eval.rs", "rank": 38, "score": 117127.21728900504 }, { "content": "pub fn eval(t: Term) -> Term {\n\n let mut r = t;\n\n while let Ok(tprime) = eval1(r.clone()) {\n\n r = tprime;\n\n if r.is_normal() {\n\n break;\n\n }\n\n }\n\n r\n\n}\n\n\n", "file_path": "01_arith/src/main.rs", "rank": 39, "score": 108311.78476812705 }, { "content": "fn expr_display(ex: &Expr) -> String {\n\n use std::collections::HashMap;\n\n let mut map = HashMap::new();\n\n\n\n fn walk(ex: &Expr, map: &mut HashMap<usize, char>) -> String {\n\n match ex {\n\n Expr::Unit => \"()\".into(),\n\n Expr::True => \"true\".into(),\n\n Expr::False => \"false\".into(),\n\n Expr::Int(i) => format!(\"{}\", i),\n\n Expr::If(e1, e2, e3) => format!(\"if {} then {} else {}\", walk(e1, map), walk(e2, map), walk(e3, map)),\n\n Expr::App(a, b) => format!(\"({} {})\", walk(a, map), walk(b, map)),\n\n Expr::Abs(body) => {\n\n let vc = ('a' as u8 + map.len() as u8) as char;\n\n let vc = *map.entry(map.len()).or_insert(vc);\n\n format!(\"(\\\\{}. {})\", vc, walk(body, map))\n\n }\n\n Expr::Var(idx) => {\n\n let i = map.len() - (*idx + 1);\n\n let vc = ('a' as u8 + i as u8) as char;\n", "file_path": "x1_bidir/src/helpers.rs", "rank": 40, "score": 107472.81772969934 }, { "content": "pub fn test(prog: Elaborated) {\n\n let mut ctx = Context {\n\n hir_map: &prog.elaborated,\n\n defs: HashMap::new(),\n\n ctx: Vec::new(),\n\n gen: 0,\n\n };\n\n\n\n for id in prog.decls {\n\n match prog.elaborated.get(&id) {\n\n Some(Decl::Value(e)) => {\n\n ctx.infer(e);\n\n }\n\n _ => {}\n\n }\n\n\n\n dbg!(&ctx);\n\n }\n\n}\n", "file_path": "07_system_fw/src/hir/bidir.rs", "rank": 41, "score": 106531.90085048217 }, { "content": "/// Small step beta reduction\n\nfn beta_reduce(mut term: Term) -> Term {\n\n match term {\n\n Term::App(mut abs, arg) => match (abs.normal(), arg.normal()) {\n\n (false, _) => Term::App(Box::new(beta_reduce(*abs)), arg),\n\n (_, false) => Term::App(abs, Box::new(beta_reduce(*arg))),\n\n _ => match *abs {\n\n Term::Abs(_, mut body) => {\n\n body.subst(*arg.clone());\n\n *body\n\n }\n\n x => Term::App(Box::new(x), arg),\n\n },\n\n },\n\n Term::Abs(ty, body) => {\n\n if body.normal() {\n\n Term::Abs(ty, body)\n\n } else {\n\n Term::Abs(ty, Box::new(beta_reduce(*body)))\n\n }\n\n }\n\n _ => term,\n\n }\n\n}\n\n\n", "file_path": "x2_dependent/src/main.rs", "rank": 42, "score": 102775.19929189734 }, { "content": "fn value(ctx: &Context, term: &Term) -> bool {\n\n match term {\n\n Term::Unit | Term::True | Term::False | Term::Abs(_, _) | Term::Zero => true,\n\n Term::Succ(t) | Term::Pred(t) | Term::IsZero(t) => value(ctx, t),\n\n Term::Record(fields) => {\n\n for field in fields {\n\n if !value(ctx, &field.term) {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "04_stlc/src/eval.rs", "rank": 43, "score": 102678.06847323893 }, { "content": "fn parse(ctx: &mut Context, input: &str) {\n\n let mut p = parser::Parser::new(input);\n\n while let Some(tok) = p.parse_term() {\n\n let _ = ev(ctx, *tok);\n\n }\n\n\n\n let diag = p.diagnostic();\n\n if diag.error_count() > 0 {\n\n println!(\"\\n{} error(s) detected while parsing!\", diag.error_count());\n\n println!(\"{}\", diag.emit());\n\n }\n\n}\n\n\n", "file_path": "04_stlc/src/main.rs", "rank": 44, "score": 102430.46636041797 }, { "content": "fn isval(_ctx: &Context, tm: RcTerm) -> bool {\n\n match &tm as &Term {\n\n Term::TmAbs(_, _) => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "02_lambda/src/main.rs", "rank": 45, "score": 100676.13342060165 }, { "content": "struct SetElement<T> {\n\n data: Option<T>,\n\n rank: Cell<u32>,\n\n parent: Cell<usize>,\n\n}\n\n\n\npub struct DisjointSet<T> {\n\n elements: Vec<SetElement<T>>,\n\n components: Cell<usize>,\n\n}\n\n\n\nimpl<T> Default for DisjointSet<T> {\n\n fn default() -> Self {\n\n DisjointSet {\n\n elements: Vec::new(),\n\n components: Cell::new(0),\n\n }\n\n }\n\n}\n\n\n", "file_path": "05_recon/src/disjoint.rs", "rank": 46, "score": 99841.88703629284 }, { "content": "pub fn code_format(src: &str, diag: Diagnostic) {\n\n // let lines = diag.ot\n\n // .iter()\n\n // .map(|(_, sp)| sp.start.line)\n\n // .collect::<std::collections::HashSet<_>>();\n\n let srcl = src.lines().collect::<Vec<&str>>();\n\n\n\n let mut msgs = diag.other.clone();\n\n msgs.insert(0, diag.primary.clone());\n\n\n\n for line in diag.lines() {\n\n println!(\"| {} {}\", line + 1, &srcl[line as usize]);\n\n for anno in &msgs {\n\n if anno.span.start.line != line {\n\n continue;\n\n }\n\n let empty = (0..anno.span.start.col + 3).map(|_| ' ').collect::<String>();\n\n let tilde = (1..anno.span.end.col.saturating_sub(anno.span.start.col))\n\n .map(|_| '~')\n\n .collect::<String>();\n\n println!(\"{}^{}^ --- {}\", empty, tilde, anno.info);\n\n }\n\n }\n\n}\n\n\n", "file_path": "06_system_f/src/main.rs", "rank": 47, "score": 99621.85196535208 }, { "content": "struct Aliaser<'ctx> {\n\n map: &'ctx HashMap<String, Type>,\n\n}\n\n\n\nimpl<'ctx> MutTypeVisitor for Aliaser<'ctx> {\n\n fn visit(&mut self, ty: &mut Type) {\n\n match ty {\n\n Type::Unit | Type::Bool | Type::Nat => {}\n\n Type::Var(v) => {}\n\n Type::Alias(v) => {\n\n if let Some(aliased) = self.map.get(v) {\n\n *ty = aliased.clone();\n\n }\n\n }\n\n Type::Variant(v) => self.visit_variant(v),\n\n Type::Product(v) => self.visit_product(v),\n\n\n\n Type::Arrow(ty1, ty2) => self.visit_arrow(ty1, ty2),\n\n Type::Universal(ty) => self.visit_universal(ty),\n\n Type::Existential(ty) => self.visit_existential(ty),\n", "file_path": "06_system_f/src/types/mod.rs", "rank": 48, "score": 98767.4012188738 }, { "content": "struct TypeSimplifier<'a> {\n\n ctx: &'a mut Context,\n\n res: Result<bool, KindError>,\n\n}\n\n\n\nimpl<'a> TypeSimplifier<'a> {}\n\n\n\nimpl<'a> MutTypeVisitor for TypeSimplifier<'a> {\n\n fn visit_universal(&mut self, kind: &mut TyKind, ty: &mut Type) {\n\n self.ctx.kstack.push(kind.clone());\n\n self.visit(ty);\n\n self.ctx.kstack.pop();\n\n }\n\n\n\n fn visit_existential(&mut self, kind: &mut TyKind, ty: &mut Type) {\n\n self.ctx.kstack.push(kind.clone());\n\n self.visit(ty);\n\n self.ctx.kstack.pop();\n\n }\n\n\n", "file_path": "07_system_fw/src/typecheck.rs", "rank": 49, "score": 98767.4012188738 }, { "content": "pub fn eval1(t: Term) -> Result<Term, RuntimeError> {\n\n use Term::*;\n\n let res = match t {\n\n TmIf(cond, csq, alt) => match *cond {\n\n TmFalse => *alt,\n\n TmTrue => *csq,\n\n _ => TmIf(Box::new(eval1(*cond)?), csq, alt),\n\n },\n\n TmSucc(term) => TmSucc(Box::new(eval1(*term)?)),\n\n TmPred(term) => match *term {\n\n TmZero => TmZero,\n\n TmSucc(nv) => {\n\n if nv.is_numeric() {\n\n *nv\n\n } else {\n\n return Err(RuntimeError::NoRuleApplies);\n\n }\n\n }\n\n _ => TmPred(Box::new(eval1(*term)?)),\n\n },\n", "file_path": "01_arith/src/main.rs", "rank": 50, "score": 97587.83854979399 }, { "content": "#[inline]\n\nfn round(layout: &Layout, align: usize) -> usize {\n\n let pad = padding_needed(layout, align);\n\n let offset = layout.size().checked_add(pad).unwrap();\n\n offset\n\n}\n\n\n", "file_path": "util/src/unsafe_arena.rs", "rank": 51, "score": 91628.02778896152 }, { "content": "#[inline]\n\nfn padding_needed(l: &Layout, align: usize) -> usize {\n\n let len = l.size();\n\n let rounded = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);\n\n rounded.wrapping_sub(len)\n\n}\n\n\n", "file_path": "util/src/unsafe_arena.rs", "rank": 52, "score": 91628.02778896152 }, { "content": "pub fn eval(ctx: &Context, term: Term) -> Result<Term, Error> {\n\n let mut tp = term;\n\n loop {\n\n println!(\" -> {}\", &tp);\n\n match eval1(ctx, tp.clone()) {\n\n Ok(r) => tp = *r,\n\n Err(e) => {\n\n return Ok(tp);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "04_stlc/src/eval.rs", "rank": 53, "score": 90492.43385636382 }, { "content": "pub fn record_access(fields: &[Field], projection: &str) -> Option<Box<Term>> {\n\n for f in fields {\n\n if f.ident == projection {\n\n return Some(f.term.clone());\n\n }\n\n }\n\n None\n\n}\n\n\n\nimpl fmt::Display for Term {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Term::Unit => write!(f, \"unit\"),\n\n Term::True => write!(f, \"true\"),\n\n Term::False => write!(f, \"false\"),\n\n Term::Zero => write!(f, \"Z\"),\n\n Term::Succ(t) => write!(f, \"S({})\", t),\n\n Term::Pred(t) => write!(f, \"P({})\", t),\n\n Term::IsZero(t) => write!(f, \"IsZero({})\", t),\n\n Term::Var(idx) => write!(f, \"#{}\", idx),\n", "file_path": "04_stlc/src/term.rs", "rank": 54, "score": 88817.11259666356 }, { "content": "fn ev(ctx: &mut Context, term: Term) -> Result<Term, eval::Error> {\n\n let ty = match ctx.type_of(&term) {\n\n Ok(ty) => ty,\n\n Err(err) => {\n\n println!(\"Mistyped term {} => {:?}\", term, err);\n\n return Err(eval::Error::NoRuleApplies);\n\n }\n\n };\n\n let r = eval::eval(&ctx, term)?;\n\n\n\n // This is safe by our typing inference/induction rules\n\n // any well typed term t (checked previously) that evaluates to\n\n // a term t' [ t -> t' ] is also well typed\n\n //\n\n // Furthermore, Γ t:T, t ->* t' => t':T\n\n let ty_ = ctx.type_of(&r);\n\n // assert_eq!(ty_, ty);\n\n println!(\"===> {} -- {:?}\\n\", r, ty_);\n\n\n\n Ok(r)\n\n}\n\n\n", "file_path": "04_stlc/src/main.rs", "rank": 55, "score": 88006.96166513945 }, { "content": "#[inline]\n\nfn extend(a: Layout, b: Layout) -> Layout {\n\n let new_align = std::cmp::max(a.align(), b.align());\n\n let pad = padding_needed(&a, b.align());\n\n let off = a.size().checked_add(pad).unwrap();\n\n let sz = off.checked_add(b.size()).unwrap();\n\n\n\n Layout::from_size_align(sz, new_align).unwrap()\n\n}\n\n\n\nimpl<T> Chunk<T> {\n\n #[inline]\n\n fn layout(capacity: usize) -> Layout {\n\n let chunk_layout = Layout::from_size_align(mem::size_of::<Chunk<T>>(), mem::align_of::<Chunk<T>>()).unwrap();\n\n\n\n let size = mem::size_of::<T>().checked_mul(capacity).unwrap();\n\n let elem_layout = Layout::from_size_align(size, mem::align_of::<T>()).unwrap();\n\n\n\n extend(chunk_layout, elem_layout)\n\n }\n\n\n", "file_path": "util/src/unsafe_arena.rs", "rank": 56, "score": 70563.20109976974 }, { "content": "#[derive(Default, Debug)]\n\nstruct Elaborator {\n\n exist: TypeVar,\n\n context: Vec<Scheme>,\n\n constraints: Vec<(Type, Type)>,\n\n\n\n uni: disjoint::Unifier,\n\n}\n\n\n\nimpl SystemF {\n\n fn new(expr: TypedTerm, ty: Type) -> SystemF {\n\n SystemF { expr, ty }\n\n }\n\n\n\n fn de(self) -> (TypedTerm, Type) {\n\n (self.expr, self.ty)\n\n }\n\n}\n\n\n\nimpl Elaborator {\n\n fn fresh(&mut self) -> TypeVar {\n", "file_path": "05_recon/src/main.rs", "rank": 57, "score": 68083.17613373275 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Expr {\n\n /// The unit expression, ()\n\n Unit,\n\n True,\n\n False,\n\n If(Box<Expr>, Box<Expr>, Box<Expr>),\n\n Int(usize),\n\n /// A term variable, given in de Bruijn notation\n\n Var(usize),\n\n /// A lambda abstraction, with it's body. (\\x. body)\n\n Abs(Box<Expr>),\n\n /// Application (e1 e2)\n\n App(Box<Expr>, Box<Expr>),\n\n /// Explicit type annotation of a term, (x : A)\n\n Ann(Box<Expr>, Box<Type>),\n\n /// Injection left/right into a sum type x1\n\n Inj(LR, Box<Expr>, Box<Type>),\n\n /// Simplified case expr\n\n Case(Box<Expr>, Arm, Arm),\n\n /// Introduction of a pair\n\n Pair(Box<Expr>, Box<Expr>),\n\n /// Projection left/right from a pair\n\n Proj(LR, Box<Expr>),\n\n}\n\n\n", "file_path": "x1_bidir/src/main.rs", "rank": 58, "score": 66838.0414107279 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Element {\n\n /// Universal type variable\n\n Var(Kind),\n\n /// Term variable typing x : A. We differ from the paper in that we use\n\n /// de Bruijn indices for variables, so we don't need to mark which var\n\n /// this annotation belongs to - it always belongs to the innermost binding (idx 0)\n\n /// and we will find this by traversing the stack\n\n Ann(Type),\n\n /// Unsolved existential type variable\n\n Exist(usize),\n\n /// Existential type variable that has been solved\n\n /// to some monotype\n\n Solved(usize, Type),\n\n /// I am actually unsure if we really need a marker, due to how we structure\n\n /// scoping, see `with_scope` method.\n\n Marker(usize),\n\n}\n\n\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct Context {\n", "file_path": "x1_bidir/src/main.rs", "rank": 59, "score": 66838.0414107279 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nenum Term {\n\n Universe(usize),\n\n Nat,\n\n Var(usize),\n\n Int(usize),\n\n App(Box<Term>, Box<Term>),\n\n Abs(Box<Term>, Box<Term>),\n\n Pi(Box<Term>, Box<Term>),\n\n}\n\n\n\nimpl Term {\n\n fn normal(&self) -> bool {\n\n match self {\n\n Term::App(_, _) => false,\n\n Term::Pi(a, b) | Term::Abs(a, b) => a.normal() && b.normal(),\n\n _ => true,\n\n }\n\n }\n\n\n\n fn whnf(&self) -> bool {\n", "file_path": "x2_dependent/src/main.rs", "rank": 60, "score": 66838.0414107279 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Kind {\n\n Star,\n\n Arrow(Box<Kind>, Box<Kind>),\n\n}\n\n\n\n/// A source-level type\n", "file_path": "x1_bidir/src/main.rs", "rank": 61, "score": 66838.0414107279 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq)]\n\nenum LR {\n\n Left,\n\n Right,\n\n}\n\n\n\n/// An expression in our simply typed lambda calculus\n", "file_path": "x1_bidir/src/main.rs", "rank": 62, "score": 66837.92806713475 }, { "content": "#[derive(Debug, Clone)]\n\nenum Error {\n\n Unbound,\n\n NotPi(Term),\n\n Mismatch(Term, Term),\n\n}\n\n\n", "file_path": "x2_dependent/src/main.rs", "rank": 63, "score": 66833.61345536578 }, { "content": "#[derive(Default, Debug, Clone)]\n\nstruct Context {\n\n binding: Vec<Term>,\n\n}\n\n\n", "file_path": "x2_dependent/src/main.rs", "rank": 64, "score": 66683.50639562211 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nstruct Arm {\n\n pat: Box<Expr>,\n\n expr: Box<Expr>,\n\n}\n\n\n\n/// An element in the typing context\n", "file_path": "x1_bidir/src/main.rs", "rank": 65, "score": 66683.28089667736 }, { "content": "#[derive(Debug, PartialEq, Copy, Clone)]\n\nstruct Info {\n\n capacity: usize,\n\n used: usize,\n\n}\n\n\n\nimpl<T> Default for Arena<T> {\n\n fn default() -> Arena<T> {\n\n Arena {\n\n ptr: Cell::new(ptr::null_mut()),\n\n end: Cell::new(ptr::null_mut()),\n\n chunk: Cell::new(ptr::null_mut()),\n\n marker: marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Arena<T> {\n\n pub fn with_capacity(capacity: usize) -> Arena<T> {\n\n unsafe {\n\n let chunk: *mut Chunk<T> = Chunk::new(ptr::null_mut(), capacity);\n", "file_path": "util/src/unsafe_arena.rs", "rank": 66, "score": 66683.1675530842 }, { "content": "struct Visitor {\n\n cutoff: usize,\n\n}\n\n\n\nimpl Visitor {\n\n fn new() -> Visitor {\n\n Visitor { cutoff: 0 }\n\n }\n\n\n\n fn visit<F: Fn(&mut Term, usize)>(&mut self, term: &mut Term, f: &F) {\n\n match term {\n\n Term::Universe(_) => {}\n\n Term::Nat | Term::Int(_) => {}\n\n Term::Var(_) => {\n\n f(term, self.cutoff);\n\n }\n\n Term::Pi(t1, t2) | Term::Abs(t1, t2) => {\n\n self.visit(t1, f);\n\n self.cutoff += 1;\n\n self.visit(t2, f);\n", "file_path": "x2_dependent/src/main.rs", "rank": 67, "score": 66674.3243360299 }, { "content": "#[derive(PartialEq, PartialOrd)]\n\nenum Entry<T> {\n\n Vacant(Option<NonZeroU32>),\n\n Occupied(T),\n\n}\n\n\n\nuse std::fmt;\n\nimpl<T: fmt::Debug> fmt::Debug for Entry<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Entry::Vacant(opt) => write!(f, \"Vacant({:?})\", opt),\n\n Entry::Occupied(item) => write!(f, \"{:?}\", item),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> std::default::Default for Arena<T> {\n\n fn default() -> Arena<T> {\n\n Arena::with_capacity(MIN_CAPACITY)\n\n }\n\n}\n", "file_path": "util/src/arena.rs", "rank": 68, "score": 64523.569072041624 }, { "content": "#[derive(Default)]\n\nstruct DeclNames<'s> {\n\n values: Vec<&'s str>,\n\n types: Vec<&'s str>,\n\n}\n\n\n\nimpl<'s> DeclNames<'s> {\n\n fn visit_pat(&mut self, pat: &'s Pattern) {\n\n match &pat.kind {\n\n PatKind::Variable(s) => self.values.push(&s),\n\n PatKind::Product(sub) => {\n\n for p in sub {\n\n self.visit_pat(p);\n\n }\n\n }\n\n PatKind::Record(sub) => {\n\n for p in sub {\n\n self.values.push(p);\n\n }\n\n }\n\n PatKind::Ascribe(pat, ty) => self.visit_pat(&pat),\n", "file_path": "07_system_fw/src/elaborate.rs", "rank": 69, "score": 63072.71414611908 }, { "content": "struct Chunk<T> {\n\n capacity: usize,\n\n entries: usize,\n\n prev: *mut Chunk<T>,\n\n marker: marker::PhantomData<T>,\n\n // data stored here\n\n}\n\n\n", "file_path": "util/src/unsafe_arena.rs", "rank": 70, "score": 63067.876270724875 }, { "content": "type Variable = Element;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Unification {\n\n Unknown(TypeVar),\n\n Constr(Tycon, Vec<Variable>),\n\n}\n\n\n\nimpl Unification {\n\n fn is_var(&self) -> bool {\n\n match self {\n\n Self::Unknown(_) => true,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct Unifier {\n\n set: disjoint::DisjointSet<Unification>,\n", "file_path": "05_recon/src/disjoint.rs", "rank": 71, "score": 62491.65530152135 }, { "content": "fn main() {\n\n let mut ctx = types::Context::default();\n\n\n\n ctx.alias(\"Var\".into(), test_variant());\n\n ctx.alias(\"NatList\".into(), nat_list());\n\n ctx.alias(\"NB\".into(), nat_list2());\n\n\n\n let args = env::args();\n\n if args.len() > 1 {\n\n for f in args.skip(1) {\n\n println!(\"reading {}\", f);\n\n let file = std::fs::read_to_string(&f).unwrap();\n\n if !parse_and_eval(&mut ctx, &file, false) {\n\n panic!(\"test failed! {}\", f);\n\n }\n\n }\n\n return;\n\n }\n\n\n\n loop {\n\n let mut buffer = String::new();\n\n print!(\"repl: \");\n\n std::io::stdout().flush().unwrap();\n\n std::io::stdin().read_to_string(&mut buffer).unwrap();\n\n\n\n parse_and_eval(&mut ctx, &buffer, true);\n\n }\n\n}\n", "file_path": "06_system_f/src/main.rs", "rank": 72, "score": 62419.08341122375 }, { "content": "fn main() {\n\n // let input = \"(λ x. x x) (λ x. x x) λ x. λ y. y λ x. λ x. x\";\n\n //\n\n let input = \"(λ x. (λ y. y) x) (λ x. x)\";\n\n let mut p = Parser::new(input);\n\n while let Some(tm) = p.parse_term() {\n\n println!(\"{:?}\", tm);\n\n dbg!(eval1(p.ctx(), tm));\n\n // dbg!(term_subst_top(Term::TmVar(Span::default(), 0).into(), tm));\n\n }\n\n\n\n dbg!(p.ctx());\n\n\n\n let diag = p.diagnostic();\n\n if diag.error_count() > 0 {\n\n println!(\"\\n{} error(s) detected while parsing!\", diag.error_count());\n\n println!(\"{}\", diag.emit());\n\n }\n\n}\n", "file_path": "02_lambda/src/main.rs", "rank": 73, "score": 62419.08341122375 }, { "content": "fn main() {\n\n use std::io::prelude::*;\n\n use std::time::{Duration, Instant};\n\n\n\n let input = \"fn m. let y = m in let x = y true in x\";\n\n let input = \"\n\n let id = fn x. x in \n\n let g = id id in \n\n let f = id true in \n\n let h = (id id) 1 in \n\n let j = id 10 in \n\n g f\";\n\n let tm = parser::Parser::new(input).parse_term().unwrap();\n\n\n\n let start = Instant::now();\n\n let mut gen = mutation::Elaborator::default();\n\n let tm = gen.elaborate(&tm);\n\n // let sub = gen.uni.subst();\n\n // let sub = disjoint.solve(gen.constraints);\n\n // let sub = disjoint::solve(gen.constraints.into_iter());\n", "file_path": "05_recon/src/main.rs", "rank": 74, "score": 62419.08341122375 }, { "content": "fn main() {\n\n let input = \"if iszero(succ(zero)) then pred(0) else succ(4)\";\n\n let mut p = Parser::new(input);\n\n while let Some(tm) = p.parse_term() {\n\n print!(\"{:?} ==> \", tm);\n\n println!(\"{:?}\", typing(tm));\n\n }\n\n\n\n let diag = p.diagnostic();\n\n if diag.error_count() > 0 {\n\n println!(\"\\n{} error(s) detected while parsing!\", diag.error_count());\n\n println!(\"{}\", diag.emit());\n\n }\n\n}\n", "file_path": "03_typedarith/src/main.rs", "rank": 75, "score": 62419.08341122375 }, { "content": "fn main() {\n\n println!(\"λ\");\n\n let input = \"if iszero(succ(zero)) then false else succ(4)\";\n\n let mut p = Parser::new(input);\n\n while let Some(tm) = p.parse_term() {\n\n print!(\"{:?} ==> \", tm);\n\n println!(\"{:?}\", eval(tm));\n\n }\n\n\n\n let diag = p.diagnostic();\n\n if diag.error_count() > 0 {\n\n println!(\"\\n{} error(s) detected while parsing!\", diag.error_count());\n\n println!(\"{}\", diag.emit());\n\n }\n\n}\n", "file_path": "01_arith/src/main.rs", "rank": 76, "score": 62419.08341122375 }, { "content": "fn main() {\n\n let mut root: Context = Context::default();\n\n // parse(\n\n // &mut root,\n\n // \"let not = (\\\\x: Bool. if x then false else true) in\n\n // let x = not false in\n\n // let y = not x in\n\n // if y then succ 0 else succ succ 0\",\n\n // );\n\n\n\n parse(&mut root, \"let x = (\\\\y: Nat. y) in x\");\n\n\n\n parse(&mut root, \"(\\\\x: Nat. (\\\\y: Nat. iszero x)) (succ 0) 0\");\n\n\n\n parse(\n\n &mut root,\n\n \"(\\\\x: {a: Bool, b: Bool, c: Nat}. x.b) {a: true, b: false, c: 0}\",\n\n );\n\n\n\n // parse(&mut root, \"let not = \\\\x: Bool. if x then false else true in {a:\n\n // 0, b: \\\\x: Bool. not x, c: unit}.b \"); parse(&mut root, \"type Struct\n\n // = {valid: Bool, number: Nat}\"); parse(&mut root, \"(\\\\x: Struct.\n\n // x.number) {valid: true, number: succ 0}\"); parse(\n\n // &mut root,\n\n // \"(\\\\x: Struct. x.number) {valid: false, number: succ 0}\",\n\n // )\n\n // dbg!(root);\n\n}\n", "file_path": "04_stlc/src/main.rs", "rank": 77, "score": 62419.08341122375 }, { "content": "fn main() {\n\n macro_rules! term {\n\n (Abs; $ex1:expr, $ex2:expr) => {\n\n Term::Abs(Box::new($ex1), Box::new($ex2))\n\n };\n\n (App; $ex1:expr, $ex2:expr) => {\n\n Term::App(Box::new($ex1), Box::new($ex2))\n\n };\n\n (Pi; $ex1:expr, $ex2:expr) => {\n\n Term::Pi(Box::new($ex1), Box::new($ex2))\n\n };\n\n (Var; $ex1:expr) => {\n\n Term::Var($ex1)\n\n };\n\n (Star) => {\n\n Term::Universe(0)\n\n };\n\n (Universe; $ex:expr) => {\n\n Term::Universe($ex)\n\n };\n", "file_path": "x2_dependent/src/main.rs", "rank": 78, "score": 61030.59278271173 }, { "content": "fn main() {\n\n loop {\n\n let mut buffer = String::new();\n\n print!(\"repl: \");\n\n std::io::stdout().flush().unwrap();\n\n std::io::stdin().read_to_string(&mut buffer).unwrap();\n\n let mut p = Parser::new(&buffer);\n\n // let mut ctx = elaborate::ElaborationContext::new();\n\n // loop {\n\n match p.parse_program() {\n\n Ok(d) => {\n\n println!(\"====> {:?}\", &d.decls);\n\n // println!(\"Validate: {:?}\", validate::ProgramValidation::validate(&d));\n\n let elab = elaborate::ElaborationContext::elaborate(&d).unwrap();\n\n println!(\"-----\");\n\n hir::bidir::test(elab);\n\n }\n\n Err(Error {\n\n kind: ErrorKind::EOF, ..\n\n }) => {}\n\n Err(e) => {\n\n println!(\"[err] {:?}\", e);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "07_system_fw/src/main.rs", "rank": 79, "score": 61030.59278271173 }, { "content": "fn main() {\n\n // \\x. (x 1, x True) : forall A. (A -> A) -> (Int, Bool)\n\n let h = abs!(pair!(app!(var!(0), Expr::Int(1)), app!(var!(0), Expr::True)));\n\n let h = ann!(\n\n h,\n\n arrow!(\n\n forall!(arrow!(Type::Var(0), Type::Var(0))),\n\n product!(Type::Int, Type::Bool)\n\n )\n\n );\n\n let g = app!(h, abs!(var!(0)));\n\n\n\n let ty = Type::Abs(Box::new(Kind::Star), Box::new(Type::Var(0)));\n\n let ty = Type::App(Box::new(ty), Box::new(Type::Int));\n\n let f = ann!(Expr::Int(99), ty);\n\n\n\n let ty_opt = Type::Abs(Box::new(Kind::Star), Box::new(sum!(Type::Var(0), Type::Unit)));\n\n // \\x. inl x of [\\X::* => X + ()] @ 'A\n\n let some = abs!(inj!(l; var!(0), ty_opt.clone()));\n\n let some = ann!(\n", "file_path": "x1_bidir/src/main.rs", "rank": 80, "score": 61030.59278271173 }, { "content": "fn shift(d: isize, tm: RcTerm) -> RcTerm {\n\n shift1(d, 0, tm)\n\n}\n\n\n", "file_path": "02_lambda/src/main.rs", "rank": 81, "score": 48231.79020290013 }, { "content": "fn eval1(ctx: &Context, tm: RcTerm) -> RcTerm {\n\n match &tm as &Term {\n\n Term::TmApp(_, t, v) if isval(ctx, v.clone()) => {\n\n if let Term::TmAbs(_, t2) = &t as &Term {\n\n term_subst_top(v.clone(), t2.clone())\n\n } else {\n\n panic!(\"No rule applies!\")\n\n }\n\n }\n\n Term::TmApp(sp, v, t) if isval(ctx, v.clone()) => {\n\n let t_prime = eval1(ctx, t.clone());\n\n Term::TmApp(*sp, v.clone(), t_prime).into()\n\n }\n\n Term::TmApp(sp, t1, t2) => {\n\n let t_prime = eval1(ctx, t1.clone());\n\n Term::TmApp(*sp, t_prime, t2.clone()).into()\n\n }\n\n _ => panic!(\"No rule applies!\"),\n\n }\n\n}\n\n\n", "file_path": "02_lambda/src/main.rs", "rank": 82, "score": 47295.21509001669 }, { "content": "fn term_subst_top(s: RcTerm, tm: RcTerm) -> RcTerm {\n\n shift(-1, subst(0, shift(1, s), tm))\n\n}\n\n\n", "file_path": "02_lambda/src/main.rs", "rank": 83, "score": 45577.80005433455 }, { "content": "fn shift1(d: isize, c: isize, tm: RcTerm) -> RcTerm {\n\n match &tm as &Term {\n\n Term::TmVar(sp, x) => {\n\n if *x as isize >= c {\n\n Term::TmVar(*sp, *x + d as usize).into()\n\n } else {\n\n Term::TmVar(*sp, *x).into()\n\n }\n\n }\n\n Term::TmAbs(sp, x) => Term::TmAbs(*sp, shift1(d, c + 1, x.clone())).into(),\n\n Term::TmApp(sp, a, b) => Term::TmApp(*sp, shift1(d, c, a.clone()), shift1(d, c, b.clone())).into(),\n\n }\n\n}\n\n\n", "file_path": "02_lambda/src/main.rs", "rank": 84, "score": 45528.24095198552 }, { "content": "fn subst(j: isize, s: RcTerm, tm: RcTerm) -> RcTerm {\n\n subst_walk(j, s, 0, tm)\n\n}\n\n\n", "file_path": "02_lambda/src/main.rs", "rank": 85, "score": 44645.05723679649 }, { "content": "fn eval1(ctx: &Context, term: Term) -> Result<Box<Term>, Error> {\n\n match term {\n\n Term::App(t1, t2) => {\n\n if value(ctx, &t2) {\n\n match *t1 {\n\n Term::Abs(_, mut abs) => {\n\n subst(*t2, abs.as_mut());\n\n Ok(abs)\n\n }\n\n _ => {\n\n let t_prime = eval1(ctx, *t1)?;\n\n Ok(Term::App(t_prime, t2).into())\n\n }\n\n }\n\n } else if value(ctx, &t1) {\n\n let t_prime = eval1(ctx, *t2)?;\n\n Ok(Term::App(t1.clone(), t_prime).into())\n\n } else {\n\n let t_prime = eval1(ctx, *t1)?;\n\n Ok(Term::App(t_prime, t2.clone()).into())\n", "file_path": "04_stlc/src/eval.rs", "rank": 86, "score": 43897.76286468882 }, { "content": "fn subst_walk(j: isize, s: RcTerm, c: isize, t: RcTerm) -> RcTerm {\n\n match &t as &Term {\n\n Term::TmVar(_, x) => {\n\n if *x as isize == j + c {\n\n shift(c, s)\n\n } else {\n\n t\n\n }\n\n }\n\n Term::TmAbs(sp, tm) => Term::TmAbs(*sp, subst_walk(j, s, c + 1, tm.clone())).into(),\n\n Term::TmApp(sp, lhs, rhs) => Term::TmApp(\n\n *sp,\n\n subst_walk(j, s.clone(), c, lhs.clone()),\n\n subst_walk(j, s, c, rhs.clone()),\n\n )\n\n .into(),\n\n }\n\n}\n\n\n", "file_path": "02_lambda/src/main.rs", "rank": 87, "score": 42389.28250848737 }, { "content": "pub struct RecordField {\n\n // pub span: Span,\n\n pub ident: String,\n\n pub ty: Box<Type>,\n\n}\n\n\n\nimpl fmt::Debug for Type {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Type::Unit => write!(f, \"Unit\"),\n\n Type::Bool => write!(f, \"Bool\"),\n\n Type::Nat => write!(f, \"Nat\"),\n\n Type::Arrow(a, b) => write!(f, \"({:?}->{:?})\", a, b),\n\n Type::Record(r) => write!(\n\n f,\n\n \"{} {{{}}}\",\n\n r.ident,\n\n r.fields\n\n .iter()\n\n .map(|x| format!(\"{}:{:?}\", x.ident, x.ty))\n", "file_path": "04_stlc/src/typing.rs", "rank": 92, "score": 39844.17766594056 }, { "content": "use crate::term::Term;\n\nuse std::fmt;\n\n\n\n#[derive(Clone, PartialEq, PartialOrd)]\n\npub enum Type {\n\n Unit,\n\n Bool,\n\n Nat,\n\n Arrow(Box<Type>, Box<Type>),\n\n Record(Record),\n\n}\n\n\n\n#[derive(Clone, PartialEq, PartialOrd)]\n\npub struct Record {\n\n // pub span: Span,\n\n pub ident: String,\n\n pub fields: Vec<RecordField>,\n\n}\n\n\n\n#[derive(Clone, PartialEq, PartialOrd)]\n", "file_path": "04_stlc/src/typing.rs", "rank": 93, "score": 39842.03686352601 }, { "content": " } else {\n\n Err(TypeError::ParameterMismatch)\n\n }\n\n }\n\n _ => Err(TypeError::ExpectedArrow),\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n// impl<'a> Visitor for Context<'a> {\n\n// fn visit_var(&mut self, var: usize) {\n\n// self.get(var)\n\n// .cloned()\n\n// .ok_or(TypeError::UnknownVariable(var))\n\n// }\n\n\n\n// fn visit_abs(&mut self, ty: Type, body: &Term) {\n\n// let ty = match ty {\n", "file_path": "04_stlc/src/typing.rs", "rank": 94, "score": 39838.53029393109 }, { "content": "\n\n// fn visit_record(&mut self, rec: &[RecordField]) {\n\n// let tys = rec\n\n// .iter()\n\n// .map(|f| f.data.accept(self).map(|ty| (f.label.clone(), ty)))\n\n// .collect::<Result<Vec<(Rc<String>, Type)>, TypeError>>()?;\n\n// Ok(Type::Record(tys))\n\n// }\n\n\n\n// fn visit_proj(&mut self, c: &Term, proj: Rc<String>) {\n\n// match c.accept(self)? {\n\n// Type::Record(fields) => {\n\n// for f in &fields {\n\n// if f.0 == proj {\n\n// return Ok(f.1.clone());\n\n// }\n\n// }\n\n// Err(TypeError::InvalidProjection)\n\n// }\n\n// _ => Err(TypeError::NotRecordType),\n\n// }\n\n// }\n\n\n\n// fn visit_typedecl(&mut self, name: Rc<String>, ty: &Type) {\n\n// self.bind(name.to_string(), ty.clone());\n\n// Ok(Type::Unit)\n\n// }\n\n// }\n", "file_path": "04_stlc/src/typing.rs", "rank": 95, "score": 39838.01848999101 }, { "content": " .collect::<Vec<String>>()\n\n .join(\",\")\n\n ),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, PartialOrd)]\n\npub enum TypeError {\n\n Guard,\n\n ArmMismatch,\n\n ParameterMismatch,\n\n UnknownVariable(usize),\n\n ExpectedArrow,\n\n InvalidProjection,\n\n NotRecordType,\n\n}\n\n\n\n#[derive(Clone, Debug, Default)]\n\n/// A typing context, Γ\n", "file_path": "04_stlc/src/typing.rs", "rank": 96, "score": 39837.47359530045 }, { "content": " }\n\n }\n\n }\n\n\n\n pub fn get(&self, idx: usize) -> Option<&Type> {\n\n if idx == 0 {\n\n self.ty.as_ref()\n\n } else if let Some(ctx) = self.parent {\n\n ctx.get(idx - 1)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn type_of(&self, term: &Term) -> Result<Type, TypeError> {\n\n use Term::*;\n\n match term {\n\n Unit => Ok(Type::Unit),\n\n True => Ok(Type::Bool),\n\n False => Ok(Type::Bool),\n", "file_path": "04_stlc/src/typing.rs", "rank": 97, "score": 39835.79329460841 }, { "content": "// Type::Var(name) => self\n\n// .types\n\n// .borrow()\n\n// .get(&name)\n\n// .cloned()\n\n// .ok_or(TypeError::Undefined(name))?,\n\n// x => x,\n\n// };\n\n// let mut ctx = self.add(ty.clone());\n\n// let ty_body: Result<Type, TypeError> = body.accept(&mut ctx);\n\n// Ok(Type::Arrow(Box::new(ty), Box::new(ty_body?)))\n\n// }\n\n\n\n// fn visit_app(&mut self, t1: &Term, t2: &Term) {\n\n// let ty1 = t1.accept(self)?;\n\n// let ty2 = t2.accept(self)?;\n\n// match ty1 {\n\n// Type::Arrow(ty11, ty12) => {\n\n// if *ty11 == ty2 {\n\n// Ok(*ty12)\n", "file_path": "04_stlc/src/typing.rs", "rank": 99, "score": 39831.34853022152 } ]
Rust
circuit/src/execution-delivery/src/lib.rs
xylix/t3rn
fda43863b640fae63988042addadc21a1bfa32de
#![cfg_attr(not(feature = "std"), no_std)] use codec::{Decode, Encode}; use frame_support::traits::Get; use frame_system::offchain::{AppCrypto, CreateSignedTransaction, SignedPayload, SigningTypes}; use sp_core::crypto::KeyTypeId; use sp_runtime::{ transaction_validity::{InvalidTransaction, TransactionValidity, ValidTransaction}, RuntimeDebug, }; use sp_std::vec::Vec; use t3rn_primitives::*; #[cfg(test)] mod tests; pub const KEY_TYPE: KeyTypeId = KeyTypeId(*b"btc!"); pub mod crypto { use super::KEY_TYPE; use sp_core::sr25519::Signature as Sr25519Signature; use sp_runtime::{ app_crypto::{app_crypto, sr25519}, traits::Verify, }; app_crypto!(sr25519, KEY_TYPE); pub struct TestAuthId; impl frame_system::offchain::AppCrypto<<Sr25519Signature as Verify>::Signer, Sr25519Signature> for TestAuthId { type RuntimeAppPublic = Public; type GenericSignature = sp_core::sr25519::Signature; type GenericPublic = sp_core::sr25519::Public; } } pub use pallet::*; #[frame_support::pallet] pub mod pallet { use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; #[pallet::config] pub trait Config: CreateSignedTransaction<Call<Self>> + frame_system::Config { type AuthorityId: AppCrypto<Self::Public, Self::Signature>; type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; type Call: From<Call<Self>>; #[pallet::constant] type GracePeriod: Get<Self::BlockNumber>; #[pallet::constant] type UnsignedInterval: Get<Self::BlockNumber>; #[pallet::constant] type UnsignedPriority: Get<TransactionPriority>; } #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet<T>(_); #[pallet::hooks] impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> { fn on_initialize(_n: T::BlockNumber) -> Weight { 0 } fn on_finalize(_n: T::BlockNumber) { } fn offchain_worker(_n: T::BlockNumber) { } } #[pallet::call] impl<T: Config> Pallet<T> { #[pallet::weight(0)] pub fn submit_composable_exec_order( origin: OriginFor<T>, io_schedule: Vec<u8>, components: Vec<Compose<T::AccountId, u64>>, ) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; let inter_schedule: InterExecSchedule<T::AccountId, u64> = Self::decompose_io_schedule(components, io_schedule).expect("Wrong io schedule"); for phase in inter_schedule.phases.clone() { for step in phase.steps { Self::deposit_event(Event::NewPhase(who.clone(), 0, step.compose.name.clone())); } } Ok(().into()) } #[pallet::weight(0)] pub fn dummy_check_payload_origin( origin: OriginFor<T>, _price_payload: Payload<T::Public, T::BlockNumber>, _signature: T::Signature, ) -> DispatchResultWithPostInfo { ensure_none(origin)?; Ok(().into()) } } #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event<T: Config> { NewPhase(T::AccountId, u8, Vec<u8>), } #[pallet::validate_unsigned] impl<T: Config> ValidateUnsigned for Pallet<T> { type Call = Call<T>; fn validate_unsigned(_source: TransactionSource, call: &Self::Call) -> TransactionValidity { if let Call::dummy_check_payload_origin(ref payload, ref signature) = call { let signature_valid = SignedPayload::<T>::verify::<T::AuthorityId>(payload, signature.clone()); if !signature_valid { return InvalidTransaction::BadProof.into(); } ValidTransaction::with_tag_prefix("BlankTaskOffchainWorker") .longevity(5) .propagate(true) .build() } else { InvalidTransaction::Call.into() } } } } #[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] pub struct Payload<Public, BlockNumber> { block_number: BlockNumber, public: Public, } impl<T: SigningTypes> SignedPayload<T> for Payload<T::Public, T::BlockNumber> { fn public(&self) -> T::Public { self.public.clone() } } impl<T: Config> Pallet<T> { #[allow(dead_code)] pub fn say_hello() -> &'static str { "hello" } pub fn decompose_io_schedule( _components: Vec<Compose<T::AccountId, u64>>, _io_schedule: Vec<u8>, ) -> Result<InterExecSchedule<T::AccountId, u64>, &'static str> { let inter_schedule = InterExecSchedule::default(); Ok(inter_schedule) } }
#![cfg_attr(not(feature = "std"), no_std)] use codec::{Decode, Encode}; use frame_support::traits::Get; use frame_system::offchain::{AppCrypto, CreateSignedTransaction, SignedPayload, SigningTypes}; use sp_core::crypto::KeyTypeId; use sp_runtime::{ transaction_validity::{InvalidTransaction, TransactionValidity, ValidTransaction}, RuntimeDebug, }; use sp_std::vec::Vec; use t3rn_primitives::*; #[cfg(test)] mod tests; pub const KEY_TYPE: KeyTypeId = KeyTypeId(*b"btc!"); pub mod crypto { use super::KEY_TYPE; use sp_core::sr25519::Signature as Sr25519Signature; use sp_runtime::{ app_crypto::{app_crypto, sr25519}, traits::Verify, }; app_crypto!(sr25519, KEY_TYPE); pub struct TestAuthId; impl frame_system::offchain::AppCrypto<<Sr25519Signature as Verify>::Signer, Sr25519Signature> for TestAuthId { type RuntimeAppPublic = Public; type GenericSignature = sp_core::sr25519::Signature; type GenericPublic = sp_core::sr25519::Public; } } pub use pallet::*; #[frame_support::pallet] pub mod pallet { use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; #[pallet::config] pub trait Config: CreateSignedTransaction<Call<Self>> + frame_system::Config { type AuthorityId: AppCrypto<Self::Public, Self::Signature>; type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; type Call: From<Call<Self>>; #[pallet::constant] type GracePeriod: Get<Self::BlockNumber>; #[pallet::constant] type UnsignedInterval: Get
ong io schedule"); for phase in inter_schedule.phases.clone() { for step in phase.steps { Self::deposit_event(Event::NewPhase(who.clone(), 0, step.compose.name.clone())); } } Ok(().into()) } #[pallet::weight(0)] pub fn dummy_check_payload_origin( origin: OriginFor<T>, _price_payload: Payload<T::Public, T::BlockNumber>, _signature: T::Signature, ) -> DispatchResultWithPostInfo { ensure_none(origin)?; Ok(().into()) } } #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event<T: Config> { NewPhase(T::AccountId, u8, Vec<u8>), } #[pallet::validate_unsigned] impl<T: Config> ValidateUnsigned for Pallet<T> { type Call = Call<T>; fn validate_unsigned(_source: TransactionSource, call: &Self::Call) -> TransactionValidity { if let Call::dummy_check_payload_origin(ref payload, ref signature) = call { let signature_valid = SignedPayload::<T>::verify::<T::AuthorityId>(payload, signature.clone()); if !signature_valid { return InvalidTransaction::BadProof.into(); } ValidTransaction::with_tag_prefix("BlankTaskOffchainWorker") .longevity(5) .propagate(true) .build() } else { InvalidTransaction::Call.into() } } } } #[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] pub struct Payload<Public, BlockNumber> { block_number: BlockNumber, public: Public, } impl<T: SigningTypes> SignedPayload<T> for Payload<T::Public, T::BlockNumber> { fn public(&self) -> T::Public { self.public.clone() } } impl<T: Config> Pallet<T> { #[allow(dead_code)] pub fn say_hello() -> &'static str { "hello" } pub fn decompose_io_schedule( _components: Vec<Compose<T::AccountId, u64>>, _io_schedule: Vec<u8>, ) -> Result<InterExecSchedule<T::AccountId, u64>, &'static str> { let inter_schedule = InterExecSchedule::default(); Ok(inter_schedule) } }
<Self::BlockNumber>; #[pallet::constant] type UnsignedPriority: Get<TransactionPriority>; } #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet<T>(_); #[pallet::hooks] impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> { fn on_initialize(_n: T::BlockNumber) -> Weight { 0 } fn on_finalize(_n: T::BlockNumber) { } fn offchain_worker(_n: T::BlockNumber) { } } #[pallet::call] impl<T: Config> Pallet<T> { #[pallet::weight(0)] pub fn submit_composable_exec_order( origin: OriginFor<T>, io_schedule: Vec<u8>, components: Vec<Compose<T::AccountId, u64>>, ) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; let inter_schedule: InterExecSchedule<T::AccountId, u64> = Self::decompose_io_schedule(components, io_schedule).expect("Wr
random
[ { "content": "#[cfg(not(test))]\n\npub trait TestAuxiliaries {}\n\n#[cfg(not(test))]\n\nimpl<T> TestAuxiliaries for T {}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/gas.rs", "rank": 1, "score": 203546.12681136615 }, { "content": "#[cfg(not(test))]\n\npub trait TestAuxiliaries {}\n\n#[cfg(not(test))]\n\nimpl<T> TestAuxiliaries for T {}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/gas.rs", "rank": 2, "score": 200605.51656979747 }, { "content": "/// This trait can be used to check whether the host environment can satisfy\n\n/// a requested function import.\n\npub trait ImportSatisfyCheck {\n\n /// Returns `true` if the host environment contains a function with\n\n /// the specified name and its type matches to the given type, or `false`\n\n /// otherwise.\n\n fn can_satisfy(name: &[u8], func_type: &FunctionType) -> bool;\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/env_def/mod.rs", "rank": 3, "score": 195654.0072747023 }, { "content": "/// This trait represents a token that can be used for charging `GasMeter`.\n\n/// There is no other way of charging it.\n\n///\n\n/// Implementing type is expected to be super lightweight hence `Copy` (`Clone` is added\n\n/// for consistency). If inlined there should be no observable difference compared\n\n/// to a hand-written code.\n\npub trait Token<T: Trait>: Copy + Clone + TestAuxiliaries {\n\n /// Metadata type, which the token can require for calculating the amount\n\n /// of gas to charge. Can be a some configuration type or\n\n /// just the `()`.\n\n type Metadata;\n\n\n\n /// Calculate amount of gas that should be taken by this token.\n\n ///\n\n /// This function should be really lightweight and must not fail. It is not\n\n /// expected that implementors will query the storage or do any kinds of heavy operations.\n\n ///\n\n /// That said, implementors of this function still can run into overflows\n\n /// while calculating the amount. In this case it is ok to use saturating operations\n\n /// since on overflow they will return `max_value` which should consume all gas.\n\n fn calculate_amount(&self, metadata: &Self::Metadata) -> Gas;\n\n}\n\n\n\n/// A wrapper around a type-erased trait object of what used to be a `Token`.\n\n#[cfg(test)]\n\npub struct ErasedToken {\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/gas.rs", "rank": 4, "score": 191870.555249435 }, { "content": "/// Configure the pallet by specifying the parameters and types on which it depends.\n\npub trait Trait: frame_system::Trait {\n\n /// Because this pallet emits events, it depends on the runtime's definition of an event.\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Trait>::Event>;\n\n}\n\n\n\n// The pallet's runtime storage items.\n\n// https://substrate.dev/docs/en/knowledgebase/runtime/storage\n\ndecl_storage! {\n", "file_path": "gateway/demo-runtime/pallets/template/src/lib.rs", "rank": 5, "score": 190975.5104989004 }, { "content": "/// This trait can be used to check whether the host environment can satisfy\n\n/// a requested function import.\n\npub trait ImportSatisfyCheck {\n\n /// Returns `true` if the host environment contains a function with\n\n /// the specified name and its type matches to the given type, or `false`\n\n /// otherwise.\n\n fn can_satisfy(name: &[u8], func_type: &FunctionType) -> bool;\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/wasm/env_def/mod.rs", "rank": 6, "score": 190521.6406453257 }, { "content": "pub trait ConvertibleToWasm: Sized {\n\n const VALUE_TYPE: ValueType;\n\n type NativeType;\n\n fn to_typed_value(self) -> Value;\n\n fn from_typed_value(_: Value) -> Option<Self>;\n\n}\n\nimpl ConvertibleToWasm for i32 {\n\n type NativeType = i32;\n\n const VALUE_TYPE: ValueType = ValueType::I32;\n\n fn to_typed_value(self) -> Value {\n\n Value::I32(self)\n\n }\n\n fn from_typed_value(v: Value) -> Option<Self> {\n\n v.as_i32()\n\n }\n\n}\n\nimpl ConvertibleToWasm for u32 {\n\n type NativeType = u32;\n\n const VALUE_TYPE: ValueType = ValueType::I32;\n\n fn to_typed_value(self) -> Value {\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/env_def/mod.rs", "rank": 7, "score": 190367.82337890795 }, { "content": "/// This trait represents a token that can be used for charging `GasMeter`.\n\n/// There is no other way of charging it.\n\n///\n\n/// Implementing type is expected to be super lightweight hence `Copy` (`Clone` is added\n\n/// for consistency). If inlined there should be no observable difference compared\n\n/// to a hand-written code.\n\npub trait Token<T: Trait>: Copy + Clone + TestAuxiliaries {\n\n /// Metadata type, which the token can require for calculating the amount\n\n /// of gas to charge. Can be a some configuration type or\n\n /// just the `()`.\n\n type Metadata;\n\n\n\n /// Calculate amount of gas that should be taken by this token.\n\n ///\n\n /// This function should be really lightweight and must not fail. It is not\n\n /// expected that implementors will query the storage or do any kinds of heavy operations.\n\n ///\n\n /// That said, implementors of this function still can run into overflows\n\n /// while calculating the amount. In this case it is ok to use saturating operations\n\n /// since on overflow they will return `max_value` which should consume all gas.\n\n fn calculate_amount(&self, metadata: &Self::Metadata) -> Gas;\n\n}\n\n\n\n/// A wrapper around a type-erased trait object of what used to be a `Token`.\n\n#[cfg(test)]\n\npub struct ErasedToken {\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/gas.rs", "rank": 8, "score": 189506.97029875003 }, { "content": "pub fn get_storage_root_for_code<T: Trait>(\n\n code: Vec<u8>,\n\n input_data: Vec<u8>,\n\n escrow_account: &T::AccountId,\n\n) -> Vec<u8> {\n\n let temp_contract_address = T::DetermineContractAddress::contract_address_for(\n\n &T::Hashing::hash(&code.clone()),\n\n &input_data.clone(),\n\n &escrow_account.clone(),\n\n );\n\n child::root(\n\n &<ContractInfoOf<T>>::get(temp_contract_address.clone())\n\n .unwrap()\n\n .get_alive()\n\n .unwrap()\n\n .child_trie_info(),\n\n )\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/lib.rs", "rank": 9, "score": 189034.0442355716 }, { "content": "type Extrinsic = TestXt<Call, ()>;\n", "file_path": "circuit/src/execution-delivery/src/tests.rs", "rank": 10, "score": 187807.978101929 }, { "content": "pub trait Trait: system::Trait {\n\n type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;\n\n}\n\n\n\ndecl_storage! {\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/fixtures/flipper/src/lib.rs", "rank": 11, "score": 186950.18274482433 }, { "content": "pub trait Trait: system::Trait {}\n\n\n\ndecl_storage! {\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/fixtures/weights/src/lib.rs", "rank": 12, "score": 186950.18274482433 }, { "content": "pub trait ConvertibleToWasm: Sized {\n\n const VALUE_TYPE: ValueType;\n\n type NativeType;\n\n fn to_typed_value(self) -> Value;\n\n fn from_typed_value(_: Value) -> Option<Self>;\n\n}\n\nimpl ConvertibleToWasm for i32 {\n\n type NativeType = i32;\n\n const VALUE_TYPE: ValueType = ValueType::I32;\n\n fn to_typed_value(self) -> Value {\n\n Value::I32(self)\n\n }\n\n fn from_typed_value(v: Value) -> Option<Self> {\n\n v.as_i32()\n\n }\n\n}\n\nimpl ConvertibleToWasm for u32 {\n\n type NativeType = u32;\n\n const VALUE_TYPE: ValueType = ValueType::I32;\n\n fn to_typed_value(self) -> Value {\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/wasm/env_def/mod.rs", "rank": 13, "score": 185235.45674953135 }, { "content": "pub fn execute_escrow_call_recursively<'a, T: Trait>(\n\n escrow_account: &T::AccountId,\n\n requester: &T::AccountId,\n\n target_dest: &T::AccountId,\n\n value: BalanceOf<T>,\n\n input_data: Vec<u8>,\n\n mut gas_meter: &mut GasMeter<T>,\n\n cfg: &Config<T>,\n\n transfers: &mut Vec<TransferEntry>,\n\n deferred_storage_writes: &mut Vec<DeferredStorageWrite>,\n\n call_stamps: &mut Vec<CallStamp>,\n\n code_hash: &CodeHash<T>,\n\n) -> ExecResult {\n\n let vm = WasmVm::new(&cfg.schedule);\n\n let loader = WasmLoader::new(&cfg.schedule);\n\n let mut ctx = ExecutionContext::top_level(escrow_account.clone(), &cfg, &vm, &loader);\n\n\n\n let executable = WasmExecutable {\n\n entrypoint_name: \"call\",\n\n prefab_module: load_code::<T>(code_hash, &cfg.schedule)?,\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/lib.rs", "rank": 14, "score": 185164.05452682188 }, { "content": "/// Dispatch calls to runtime requested during execution of WASM Binaries.\n\npub trait DispatchRuntimeCall<T: VersatileWasm> {\n\n fn dispatch_runtime_call(\n\n module_name: &str,\n\n fn_name: &str,\n\n input: &[u8],\n\n escrow_account: &<T as system::Trait>::AccountId,\n\n requested: &<T as system::Trait>::AccountId,\n\n callee: &<T as system::Trait>::AccountId,\n\n value: BalanceOf<T>,\n\n gas: &mut crate::gas::GasMeter<T>,\n\n ) -> DispatchResult;\n\n}\n\n\n\ndecl_event! {\n\n pub enum Event<T>\n\n where\n\n <T as system::Trait>::AccountId,\n\n {\n\n /// An event deposited upon execution of a contract from the account.\n\n /// \\[escrow_account, requester_account, data\\]\n\n VersatileVMExecution(AccountId, AccountId, Vec<u8>),\n\n }\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/lib.rs", "rank": 15, "score": 183600.2638021978 }, { "content": "pub trait Trait: EscrowTrait + escrow_contracts_wrapper::Trait {\n\n type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;\n\n type WhenStateChangedForceTry: Get<bool>;\n\n}\n\n\n\ndecl_storage! {\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/lib.rs", "rank": 16, "score": 183316.41472319834 }, { "content": "pub trait Trait: EscrowTrait + VersatileWasm {\n\n type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;\n\n}\n\n\n\ndecl_storage! {\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/lib.rs", "rank": 17, "score": 182017.18978077354 }, { "content": "pub trait EscrowTrait: system::Trait + sudo::Trait {\n\n type Currency: Currency<Self::AccountId>;\n\n type Time: Time;\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/src/lib.rs", "rank": 18, "score": 181983.13180172813 }, { "content": "#[cfg(test)]\n\npub trait TestAuxiliaries: Any + Debug + PartialEq + Eq {}\n\n#[cfg(test)]\n\nimpl<T: Any + Debug + PartialEq + Eq> TestAuxiliaries for T {}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/gas.rs", "rank": 19, "score": 175715.0403242824 }, { "content": "/// A trait that represent a virtual machine.\n\n///\n\n/// You can view a virtual machine as something that takes code, an input data buffer,\n\n/// queries it and/or performs actions on the given `Ext` and optionally\n\n/// returns an output data buffer. The type of code depends on the particular virtual machine.\n\n///\n\n/// Execution of code can end by either implicit termination (that is, reached the end of\n\n/// executable), explicit termination via returning a buffer or termination due to a trap.\n\npub trait Vm<T: Trait> {\n\n type Executable;\n\n\n\n fn execute<E: Ext<T = T>>(\n\n &self,\n\n exec: &Self::Executable,\n\n ext: E,\n\n input_data: Vec<u8>,\n\n gas_meter: &mut GasMeter<T>,\n\n ) -> ExecResult;\n\n}\n\n\n\n#[cfg_attr(test, derive(Debug, PartialEq, Eq))]\n\n#[derive(Copy, Clone)]\n\npub enum ExecFeeToken {\n\n /// Base fee charged for a call.\n\n Call,\n\n /// Base fee charged for a instantiate.\n\n Instantiate,\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/exec.rs", "rank": 20, "score": 174266.1158593385 }, { "content": "/// Loader is a companion of the `Vm` trait. It loads an appropriate abstract\n\n/// executable to be executed by an accompanying `Vm` implementation.\n\npub trait Loader<T: Trait> {\n\n type Executable;\n\n\n\n /// Load the initializer portion of the code specified by the `code_hash`. This\n\n /// executable is called upon instantiation.\n\n fn load_init(&self, code_hash: &CodeHash<T>) -> Result<Self::Executable, &'static str>;\n\n /// Load the main portion of the code specified by the `code_hash`. This executable\n\n /// is called for each call to a contract.\n\n fn load_main(&self, code_hash: &CodeHash<T>) -> Result<Self::Executable, &'static str>;\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/exec.rs", "rank": 21, "score": 174263.98380381372 }, { "content": "type Balances = pallet_balances::Module<Test>;\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 22, "score": 174053.89129923892 }, { "content": "type Timestamp = pallet_timestamp::Module<Test>;\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 23, "score": 174053.89129923892 }, { "content": "#[cfg(test)]\n\npub trait TestAuxiliaries: Any + Debug + PartialEq + Eq {}\n\n#[cfg(test)]\n\nimpl<T: Any + Debug + PartialEq + Eq> TestAuxiliaries for T {}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/gas.rs", "rank": 24, "score": 173209.4442383032 }, { "content": "type Contracts = Module<Test>;\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 25, "score": 172547.23988724907 }, { "content": "pub trait VersatileWasm: EscrowTrait + transaction_payment::Trait {\n\n type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;\n\n type Call: Parameter + UnfilteredDispatchable<Origin = Self::Origin> + GetDispatchInfo;\n\n type Randomness: Randomness<Self::Hash>;\n\n type DispatchRuntimeCall: DispatchRuntimeCall<Self>;\n\n}\n\n\n\ndecl_module! {\n\n pub struct Module<T: VersatileWasm> for enum Call where origin: <T as system::Trait>::Origin, system=system {\n\n fn deposit_event() = default;\n\n }\n\n}\n\n\n\n/// A prepared wasm module ready for execution.\n\n#[derive(Clone, Encode, Decode)]\n\npub struct PrefabWasmModule {\n\n /// Version of the schedule with which the code was instrumented.\n\n #[codec(compact)]\n\n schedule_version: u32,\n\n #[codec(compact)]\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/lib.rs", "rank": 26, "score": 171080.38357603955 }, { "content": "type Randomness = pallet_randomness_collective_flip::Module<Test>;\n\n\n\npub struct DummyContractAddressFor;\n\nimpl ContractAddressFor<H256, u64> for DummyContractAddressFor {\n\n fn contract_address_for(_code_hash: &H256, _data: &[u8], origin: &u64) -> u64 {\n\n *origin + 1\n\n }\n\n}\n\n\n\npub struct DummyTrieIdGenerator;\n\nimpl TrieIdGenerator<u64> for DummyTrieIdGenerator {\n\n fn trie_id(account_id: &u64) -> TrieId {\n\n let new_seed = super::AccountCounter::mutate(|v| {\n\n *v = v.wrapping_add(1);\n\n *v\n\n });\n\n\n\n let mut res = vec![];\n\n res.extend_from_slice(&new_seed.to_le_bytes());\n\n res.extend_from_slice(&account_id.to_le_bytes());\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 27, "score": 170498.91513388423 }, { "content": "type Timestamp = pallet_timestamp::Module<Test>;\n\n\n\nimpl contracts::Trait for Test {\n\n type Time = Timestamp;\n\n type Currency = Balances;\n\n type DetermineContractAddress = DummyContractAddressFor;\n\n type Event = MetaEvent;\n\n type TrieIdGenerator = DummyTrieIdGenerator;\n\n type RentPayment = ();\n\n type SignedClaimHandicap = SignedClaimHandicap;\n\n type TombstoneDeposit = TombstoneDeposit;\n\n type StorageSizeOffset = StorageSizeOffset;\n\n type RentByteFee = RentByteFee;\n\n type RentDepositOffset = RentDepositOffset;\n\n type SurchargeReward = SurchargeReward;\n\n type MaxDepth = MaxDepth;\n\n type MaxValueSize = MaxValueSize;\n\n type WeightPrice = ();\n\n type Randomness = Randomness;\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/mock.rs", "rank": 28, "score": 168484.92013504342 }, { "content": "type Timestamp = pallet_timestamp::Module<Test>;\n\n\n\nimpl system::Trait for Test {\n\n type BaseCallFilter = ();\n\n type Origin = Origin;\n\n type Index = u64;\n\n type BlockNumber = u64;\n\n type Hash = H256;\n\n type Call = Call;\n\n type Hashing = BlakeTwo256;\n\n type AccountId = u64;\n\n type Lookup = IdentityLookup<Self::AccountId>;\n\n type Header = Header;\n\n type Event = MetaEvent;\n\n type BlockHashCount = BlockHashCount;\n\n type MaximumBlockWeight = MaximumBlockWeight;\n\n type DbWeight = ();\n\n type BlockExecutionWeight = ();\n\n type ExtrinsicBaseWeight = ();\n\n type MaximumExtrinsicWeight = MaximumBlockWeight;\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/mock.rs", "rank": 29, "score": 168484.92013504342 }, { "content": "/// Helper function to generate a crypto pair from seed\n\npub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {\n\n TPublic::Pair::from_string(&format!(\"//{}\", seed), None)\n\n .expect(\"static values are valid; qed\")\n\n .public()\n\n}\n\n\n", "file_path": "circuit/src/chain_spec.rs", "rank": 30, "score": 168139.61204150494 }, { "content": "/// An interface that provides access to the external environment in which the\n\n/// smart-contract is executed.\n\n///\n\n/// This interface is specialized to an account of the executing code, so all\n\n/// operations are implicitly performed on that account.\n\npub trait Ext {\n\n type T: Trait;\n\n\n\n /// Returns the storage entry of the executing account by the given `key`.\n\n ///\n\n /// Returns `None` if the `key` wasn't previously set by `set_storage` or\n\n /// was deleted.\n\n fn get_storage(&self, key: &StorageKey) -> Option<Vec<u8>>;\n\n\n\n /// Sets the storage entry by the given key to the specified value. If `value` is `None` then\n\n /// the storage entry is deleted.\n\n fn set_storage(&mut self, key: StorageKey, value: Option<Vec<u8>>);\n\n\n\n /// Instantiate a contract from the given code.\n\n ///\n\n /// The newly created account will be associated with `code`. `value` specifies the amount of value\n\n /// transferred from this to the newly created account (also known as endowment).\n\n fn instantiate(\n\n &mut self,\n\n code: &CodeHash<Self::T>,\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/exec.rs", "rank": 31, "score": 166992.0659118441 }, { "content": "pub trait ExtStandards {\n\n type T: EscrowTrait + SystemTrait + VersatileWasm;\n\n /// <h2>Storage</h2>\n\n ///\n\n /// Returns the storage entry of the executing account by the given `key`.\n\n ///\n\n /// Returns `None` if the `key` wasn't previously set by `set_storage` or\n\n /// was deleted.\n\n fn get_storage(&self, key: &StorageKey) -> Option<Vec<u8>>;\n\n\n\n /// Sets the storage entry by the given key to the specified value. If `value` is `None` then\n\n ///\n\n /// the storage entry is deleted.\n\n fn set_storage(&mut self, key: StorageKey, value: Option<Vec<u8>>);\n\n\n\n /// Returns the raw storage entry of the executing account by the given `key`.\n\n /// By default implemented to access unhashed storage - commonly used by parachains storage.\n\n /// Returns `None` if the `key` wasn't previously set by `set_storage` or\n\n /// was deleted.\n\n fn get_raw_storage(&self, key: &StorageKey) -> Option<Vec<u8>>;\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/ext.rs", "rank": 32, "score": 166992.0659118441 }, { "content": "pub trait ExtPlain {\n\n /// Returns the storage entry of the executing account by the given `key`.\n\n ///\n\n /// Returns `None` if the `key` wasn't previously set by `set_storage` or\n\n /// was deleted.\n\n fn get_storage(&self, key: &StorageKey) -> Option<Vec<u8>>;\n\n\n\n /// Sets the storage entry by the given key to the specified value. If `value` is `None` then\n\n /// the storage entry is deleted.\n\n fn set_storage(&mut self, key: StorageKey, value: Option<Vec<u8>>);\n\n\n\n /// Transfer some amount of funds into the specified account.\n\n fn transfer(\n\n &mut self,\n\n to: &PlainAccountIdOf,\n\n value: PlainBalanceOf,\n\n gas_meter: &mut GasMeterPlain,\n\n ) -> Result<(), DispatchError>;\n\n\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/plain_standards.rs", "rank": 33, "score": 164804.9911442844 }, { "content": "type Randomness = pallet_randomness_collective_flip::Module<Test>;\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/mock.rs", "rank": 34, "score": 164383.28369050744 }, { "content": "type Randomness = pallet_randomness_collective_flip::Module<Test>;\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/mock.rs", "rank": 35, "score": 164383.28369050744 }, { "content": "type System = frame_system::Module<Test>;\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 36, "score": 164019.51186603477 }, { "content": "/// Generate a crypto pair from seed.\n\npub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {\n\n TPublic::Pair::from_string(&format!(\"//{}\", seed), None)\n\n .expect(\"static values are valid; qed\")\n\n .public()\n\n}\n\n\n", "file_path": "gateway/demo-runtime/node/src/chain_spec.rs", "rank": 37, "score": 163037.748050395 }, { "content": "type System = system::Module<Test>;\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/mock.rs", "rank": 38, "score": 160953.39952535307 }, { "content": "type System = system::Module<Test>;\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/mock.rs", "rank": 39, "score": 160953.39952535307 }, { "content": "/// Helper function to generate an account ID from seed\n\npub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId\n\nwhere\n\n AccountPublic: From<<TPublic::Pair as Pair>::Public>,\n\n{\n\n AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()\n\n}\n\n\n", "file_path": "circuit/src/chain_spec.rs", "rank": 40, "score": 159640.96458699007 }, { "content": "pub trait ExtendedEnv: ExtStandards {\n\n fn define_extended_env();\n\n}\n\n\n\npub struct ExtendedRuntimeEnv<'a, T: EscrowTrait + SystemTrait + VersatileWasm> {\n\n pub standard_env: DefaultRuntimeEnv<'a, T>,\n\n}\n\n\n\nimpl<'a, T: EscrowTrait + SystemTrait> ExtendedEnv for ExtendedRuntimeEnv<'a, T>\n\nwhere\n\n T: EscrowTrait + SystemTrait + VersatileWasm,\n\n{\n\n fn define_extended_env() {\n\n unimplemented!()\n\n }\n\n}\n\n\n\nimpl<'a, T: EscrowTrait + SystemTrait> ExtStandards for ExtendedRuntimeEnv<'a, T>\n\nwhere\n\n T: EscrowTrait + SystemTrait + VersatileWasm,\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/ext.rs", "rank": 41, "score": 157429.23483100382 }, { "content": "/// Test for all kind of removals for the given trigger:\n\n/// * if balance is reached and balance > subsistence threshold\n\n/// * if allowance is exceeded\n\n/// * if balance is reached and balance < subsistence threshold\n\n///\t * this case cannot be triggered by a contract: we check whether a tombstone is left\n\nfn removals(trigger_call: impl Fn() -> bool) {\n\n let (wasm, code_hash) = compile_module::<Test>(\"set_rent\").unwrap();\n\n\n\n // Balance reached and superior to subsistence threshold\n\n ExtBuilder::default()\n\n .existential_deposit(50)\n\n .build()\n\n .execute_with(|| {\n\n // Create\n\n let _ = Balances::deposit_creating(&ALICE, 1_000_000);\n\n assert_ok!(Contracts::put_code(Origin::signed(ALICE), wasm.clone()));\n\n assert_ok!(Contracts::instantiate(\n\n Origin::signed(ALICE),\n\n 100,\n\n GAS_LIMIT,\n\n code_hash.into(),\n\n <Test as pallet_balances::Trait>::Balance::from(1_000u32).encode() // rent allowance\n\n ));\n\n\n\n let subsistence_threshold = 50 /*existential_deposit*/ + 16 /*tombstone_deposit*/;\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 42, "score": 156867.918366271 }, { "content": "pub fn stamp_failed_execution<T: Trait>(\n\n cause_code: u8,\n\n requester: &T::AccountId,\n\n code_hash: &T::Hash,\n\n) {\n\n <ExecutionStamps<T>>::insert(\n\n requester,\n\n code_hash,\n\n ExecutionStamp {\n\n call_stamps: vec![],\n\n timestamp: TryInto::<u64>::try_into(<T as EscrowTrait>::Time::now())\n\n .ok()\n\n .unwrap(),\n\n phase: 0,\n\n proofs: None,\n\n failure: Option::from(cause_code),\n\n },\n\n );\n\n}\n\n// The pallet's dispatchable functions.\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/lib.rs", "rank": 43, "score": 155092.35784815977 }, { "content": "pub fn cleanup_failed_execution<T: Trait>(\n\n escrow_account: T::AccountId,\n\n requester: T::AccountId,\n\n transfers: &mut Vec<TransferEntry>,\n\n) -> DispatchResult {\n\n // Give the money back to the requester from the transfers that succeeded.\n\n for transfer in transfers.iter() {\n\n just_transfer::<T>(\n\n &escrow_account,\n\n &requester,\n\n BalanceOf::<T>::from(transfer.value),\n\n )\n\n .map_err(|e| e)?;\n\n }\n\n transfers.clear();\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Encode, Decode, Default, Clone)]\n\npub struct ExecutionProofs {\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/lib.rs", "rank": 44, "score": 155092.3578481598 }, { "content": "pub fn stamp_failed_execution<T: Trait>(\n\n cause_code: u8,\n\n requester: &T::AccountId,\n\n code_hash: &T::Hash,\n\n) {\n\n <ExecutionStamps<T>>::insert(\n\n requester,\n\n code_hash,\n\n ExecutionStamp {\n\n call_stamps: vec![],\n\n timestamp: TryInto::<u64>::try_into(<T as EscrowTrait>::Time::now())\n\n .ok()\n\n .unwrap(),\n\n phase: 0,\n\n proofs: None,\n\n failure: Option::from(cause_code),\n\n },\n\n );\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/lib.rs", "rank": 45, "score": 155092.35784815977 }, { "content": "pub fn cleanup_failed_execution<T: Trait>(\n\n escrow_account: T::AccountId,\n\n requester: T::AccountId,\n\n transfers: &mut Vec<TransferEntry>,\n\n) -> DispatchResult {\n\n // Give the money back to the requester from the transfers that succeeded.\n\n for transfer in transfers.iter() {\n\n just_transfer::<T>(\n\n &escrow_account,\n\n &requester,\n\n BalanceOf::<T>::from(transfer.value),\n\n )\n\n .map_err(|e| e)?;\n\n }\n\n transfers.clear();\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Encode, Decode, Default, Clone)]\n\npub struct ExecutionProofs {\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/lib.rs", "rank": 46, "score": 155092.35784815977 }, { "content": "/// Generate an account ID from seed.\n\npub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId\n\nwhere\n\n AccountPublic: From<<TPublic::Pair as Pair>::Public>,\n\n{\n\n AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()\n\n}\n\n\n", "file_path": "gateway/demo-runtime/node/src/chain_spec.rs", "rank": 47, "score": 153892.23646564235 }, { "content": "fn deposit_event<T: Trait>(topics: Vec<T::Hash>, event: Event<T>) {\n\n <frame_system::Module<T>>::deposit_event_indexed(\n\n &*topics,\n\n <T as Trait>::Event::from(event).into(),\n\n )\n\n}\n\n\n\n/// These tests exercise the executive layer.\n\n///\n\n/// In these tests the VM/loader are mocked. Instead of dealing with wasm bytecode they use simple closures.\n\n/// This allows you to tackle executive logic more thoroughly without writing a\n\n/// wasm VM code.\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{\n\n BalanceOf, ErrorOrigin, Event, ExecError, ExecFeeToken, ExecResult, ExecutionContext, Ext,\n\n Loader, RawEvent, ReturnFlags, TransferFeeKind, TransferFeeToken, Vm,\n\n };\n\n use crate::tests::test_utils::{get_balance, place_contract, set_balance};\n\n use crate::{\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/exec.rs", "rank": 48, "score": 153307.83309561302 }, { "content": "/// Put code in the storage. The hash of code is used as a key and is returned\n\n/// as a result of this function.\n\n///\n\n/// This function instruments the given code and caches it in the storage.\n\npub fn save<T: Trait>(\n\n original_code: Vec<u8>,\n\n schedule: &Schedule,\n\n) -> Result<CodeHash<T>, &'static str> {\n\n let prefab_module = prepare::prepare_contract::<Env>(&original_code, schedule)?;\n\n let code_hash = T::Hashing::hash(&original_code);\n\n\n\n <CodeStorage<T>>::insert(code_hash, prefab_module);\n\n <PristineCode<T>>::insert(code_hash, original_code);\n\n\n\n Ok(code_hash)\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/code_cache.rs", "rank": 49, "score": 152997.9923074434 }, { "content": "/// Load code with the given code hash.\n\n///\n\n/// If the module was instrumented with a lower version of schedule than\n\n/// the current one given as an argument, then this function will perform\n\n/// re-instrumentation and update the cache in the storage.\n\npub fn load<T: Trait>(\n\n code_hash: &CodeHash<T>,\n\n schedule: &Schedule,\n\n) -> Result<PrefabWasmModule, &'static str> {\n\n let mut prefab_module = <CodeStorage<T>>::get(code_hash).ok_or_else(|| \"code is not found\")?;\n\n\n\n if prefab_module.schedule_version < schedule.version {\n\n // The current schedule version is greater than the version of the one cached\n\n // in the storage.\n\n //\n\n // We need to re-instrument the code with the latest schedule here.\n\n let original_code =\n\n <PristineCode<T>>::get(code_hash).ok_or_else(|| \"pristine code is not found\")?;\n\n prefab_module = prepare::prepare_contract::<Env>(&original_code, schedule)?;\n\n <CodeStorage<T>>::insert(&code_hash, &prefab_module);\n\n }\n\n Ok(prefab_module)\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/code_cache.rs", "rank": 50, "score": 152993.6436085051 }, { "content": "pub fn get_child_storage_for_current_execution<T: EscrowTrait>(\n\n escrow_account: &T::AccountId,\n\n code: T::Hash,\n\n) -> ChildInfo {\n\n let mut buf = Vec::new();\n\n buf.extend_from_slice(b\"gateway_escrow\");\n\n buf.extend_from_slice(&escrow_account.encode()[..]);\n\n buf.extend_from_slice(&code.encode()[..]);\n\n child::ChildInfo::new_default(T::Hashing::hash(&buf[..]).as_ref())\n\n}\n\n\n\ndefine_env!(Env, <E: ExtStandards>,\n\n gas (_ctx, amount: u32) => {\n\n let amount = Gas::from(amount);\n\n if !amount.is_zero() {\n\n Ok(())\n\n } else {\n\n Err(sp_sandbox::HostError)\n\n }\n\n },\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/runtime.rs", "rank": 51, "score": 152883.27213869515 }, { "content": "pub fn execute_attached_code<'a, T: Trait>(\n\n origin: T::Origin,\n\n escrow_account: &T::AccountId,\n\n requester: &T::AccountId,\n\n target_dest: &T::AccountId,\n\n value: BalanceOf<T>,\n\n code: Vec<u8>,\n\n input_data: Vec<u8>,\n\n endowment: ContractsBalanceOf<T>,\n\n mut gas_meter: &mut GasMeter<T>,\n\n cfg: &Config<T>,\n\n transfers: &mut Vec<TransferEntry>,\n\n deferred_storage_writes: &mut Vec<DeferredStorageWrite>,\n\n call_stamps: &mut Vec<CallStamp>,\n\n) -> ExecResult {\n\n // Step 1: Temporarily instantiate the contract for the purpose following execution, so it's possible to set_storage etc.\n\n instantiate_temp_execution_contract::<T>(\n\n origin,\n\n code.clone(),\n\n &input_data.clone(),\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/lib.rs", "rank": 52, "score": 151311.81827702362 }, { "content": "/// Creates a new contract descriptor in the storage with the given code hash at the given address.\n\n///\n\n/// Returns `Err` if there is already a contract (or a tombstone) exists at the given address.\n\npub fn place_contract<T: Trait>(\n\n account: &AccountIdOf<T>,\n\n trie_id: TrieId,\n\n ch: CodeHash<T>,\n\n) -> Result<(), &'static str> {\n\n <ContractInfoOf<T>>::mutate(account, |maybe_contract_info| {\n\n if maybe_contract_info.is_some() {\n\n return Err(\"Alive contract or tombstone already exists\");\n\n }\n\n\n\n *maybe_contract_info = Some(\n\n AliveContractInfo::<T> {\n\n code_hash: ch,\n\n storage_size: 0,\n\n trie_id,\n\n deduct_block: <frame_system::Module<T>>::block_number(),\n\n rent_allowance: <BalanceOf<T>>::max_value(),\n\n empty_pair_count: 0,\n\n total_pair_count: 0,\n\n last_write: None,\n\n }\n\n .into(),\n\n );\n\n\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/storage.rs", "rank": 53, "score": 150978.04111337027 }, { "content": "/// Returns the rent allowance set for the contract give by the account id.\n\npub fn rent_allowance<T: Trait>(\n\n account: &AccountIdOf<T>,\n\n) -> Result<BalanceOf<T>, ContractAbsentError> {\n\n <ContractInfoOf<T>>::get(account)\n\n .and_then(|i| i.as_alive().map(|i| i.rent_allowance))\n\n .ok_or(ContractAbsentError)\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/storage.rs", "rank": 54, "score": 150978.04111337027 }, { "content": "pub fn instantiate_temp_execution_contract<'a, T: Trait>(\n\n origin: T::Origin,\n\n code: Vec<u8>,\n\n input_data: &Vec<u8>,\n\n endowment: ContractsBalanceOf<T>,\n\n gas_limit: Gas,\n\n) -> dispatch::DispatchResult {\n\n let code_hash_res =\n\n <escrow_contracts_wrapper::Module<T>>::put_code(origin.clone(), code.clone());\n\n debug::info!(\n\n \"DEBUG gateway_contract_exec -- escrow_contracts_wrapper::put_code {:?}\",\n\n code_hash_res\n\n );\n\n code_hash_res.map_err(|_e| <Error<T>>::PutCodeFailure)?;\n\n let code_hash = T::Hashing::hash(&code.clone());\n\n // ToDo: Instantiate works - but charging accounts in unit tests doesn't (due to GenesisConfig not present in Balance err)\n\n // Step 2: escrow_contracts_wrapper::instantiate\n\n // ToDo: Smart way of calculating endowment that would be enough for initialization + one call.\n\n let init_res = <escrow_contracts_wrapper::Module<T>>::instantiate(\n\n origin.clone(),\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/lib.rs", "rank": 55, "score": 149213.1040373689 }, { "content": "pub fn execute_code_in_escrow_sandbox<'a, T: Trait>(\n\n escrow_account: &T::AccountId,\n\n requester: &T::AccountId,\n\n target_dest: &T::AccountId,\n\n value: BalanceOf<T>,\n\n code: Vec<u8>,\n\n input_data: Vec<u8>,\n\n gas_meter: &'a mut GasMeter<T>,\n\n transfers: &mut Vec<TransferEntry>,\n\n deferred_storage_writes: &mut Vec<DeferredStorageWrite>,\n\n call_stamps: &mut Vec<CallStamp>,\n\n) -> ExecResult {\n\n // That only works for code that is received by the call and will be executed and cleaned up after.\n\n let prefab_module = prepare_contract::<Env>(&code).map_err(|e| e)?;\n\n\n\n let executable = WasmExecutable {\n\n entrypoint_name: \"call\",\n\n prefab_module,\n\n };\n\n\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/lib.rs", "rank": 56, "score": 149213.1040373689 }, { "content": "/// Put code in the storage. The hash of code is used as a key and is returned\n\n/// as a result of this function.\n\n///\n\n/// This function instruments the given code and caches it in the storage.\n\npub fn save<T: Trait>(\n\n _original_code: Vec<u8>,\n\n _schedule: &Schedule,\n\n) -> Result<CodeHash<T>, &'static str> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/wasm/code_cache.rs", "rank": 57, "score": 149045.0579236251 }, { "content": "/// Set the rent allowance for the contract given by the account id.\n\n///\n\n/// Returns `Err` if the contract doesn't exist or is a tombstone.\n\npub fn set_rent_allowance<T: Trait>(\n\n account: &AccountIdOf<T>,\n\n rent_allowance: BalanceOf<T>,\n\n) -> Result<(), ContractAbsentError> {\n\n <ContractInfoOf<T>>::mutate(account, |maybe_contract_info| match maybe_contract_info {\n\n Some(ContractInfo::Alive(ref mut alive_info)) => {\n\n alive_info.rent_allowance = rent_allowance;\n\n Ok(())\n\n }\n\n _ => Err(ContractAbsentError),\n\n })\n\n}\n\n\n\n/// Returns the code hash of the contract specified by `account` ID.\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/storage.rs", "rank": 58, "score": 149040.7092246868 }, { "content": "/// Load code with the given code hash.\n\n///\n\n/// If the module was instrumented with a lower version of schedule than\n\n/// the current one given as an argument, then this function will perform\n\n/// re-instrumentation and update the cache in the storage.\n\npub fn load<T: Trait>(\n\n code_hash: &CodeHash<T>,\n\n _schedule: &Schedule,\n\n) -> Result<PrefabWasmModule, &'static str> {\n\n let prefab_module = <CodeStorage<T>>::get(code_hash).ok_or_else(|| \"code is not found\")?;\n\n match Decode::decode(&mut prefab_module.encode().as_slice()) {\n\n Ok(decoded) => Ok(decoded),\n\n Err(_err) => Err(\"Can't decode stored contract.\"),\n\n }\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/wasm/code_cache.rs", "rank": 59, "score": 149040.7092246868 }, { "content": "/// Update a storage entry into a contract's kv storage.\n\n///\n\n/// If the `opt_new_value` is `None` then the kv pair is removed.\n\n///\n\n/// This function also updates the bookkeeping info such as: number of total non-empty pairs a\n\n/// contract owns, the last block the storage was written to, etc. That's why, in contrast to\n\n/// `read_contract_storage`, this function also requires the `account` ID.\n\n///\n\n/// If the contract specified by the id `account` doesn't exist `Err` is returned.`\n\npub fn write_contract_storage<T: Trait>(\n\n account: &AccountIdOf<T>,\n\n trie_id: &TrieId,\n\n key: &StorageKey,\n\n opt_new_value: Option<Vec<u8>>,\n\n) -> Result<(), ContractAbsentError> {\n\n let mut new_info = match <ContractInfoOf<T>>::get(account) {\n\n Some(ContractInfo::Alive(alive)) => alive,\n\n None | Some(ContractInfo::Tombstone(_)) => return Err(ContractAbsentError),\n\n };\n\n\n\n let hashed_key = blake2_256(key);\n\n let child_trie_info = &crate::child_trie_info(&trie_id);\n\n\n\n // In order to correctly update the book keeping we need to fetch the previous\n\n // value of the key-value pair.\n\n //\n\n // It might be a bit more clean if we had an API that supported getting the size\n\n // of the value without going through the loading of it. But at the moment of\n\n // writing, there is no such API.\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/storage.rs", "rank": 60, "score": 149040.7092246868 }, { "content": "pub fn raw_escrow_call<T: EscrowTrait + VersatileWasm + SystemTrait, E: ExtStandards<T = T>>(\n\n escrow_account: &T::AccountId,\n\n requester: &T::AccountId,\n\n transfer_dest: &T::AccountId,\n\n value: EscrowBalanceOf<T>,\n\n gas_meter: &mut GasMeter<T>,\n\n input_data: Vec<u8>,\n\n mut transfers: &mut Vec<TransferEntry>,\n\n _deferred_storage_writes: &mut Vec<DeferredStorageWrite>,\n\n call_stamps: &mut Vec<CallStamp>,\n\n exec: &WasmExecutable,\n\n code_hash: T::Hash,\n\n) -> ExecResult {\n\n if value > EscrowBalanceOf::<T>::zero() {\n\n escrow_transfer::<T>(\n\n &escrow_account.clone(),\n\n &requester.clone(),\n\n &transfer_dest.clone(),\n\n EscrowBalanceOf::<T>::from(TryInto::<u32>::try_into(value).ok().unwrap()),\n\n transfers,\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/runtime.rs", "rank": 61, "score": 142009.19284516483 }, { "content": "/// Claim surcharge with the given trigger_call at the given blocks.\n\n/// If `removes` is true then assert that the contract is a tombstone.\n\nfn claim_surcharge(blocks: u64, trigger_call: impl Fn() -> bool, removes: bool) {\n\n let (wasm, code_hash) = compile_module::<Test>(\"set_rent\").unwrap();\n\n\n\n ExtBuilder::default()\n\n .existential_deposit(50)\n\n .build()\n\n .execute_with(|| {\n\n // Create\n\n let _ = Balances::deposit_creating(&ALICE, 1_000_000);\n\n assert_ok!(Contracts::put_code(Origin::signed(ALICE), wasm));\n\n assert_ok!(Contracts::instantiate(\n\n Origin::signed(ALICE),\n\n 100,\n\n GAS_LIMIT,\n\n code_hash.into(),\n\n <Test as pallet_balances::Trait>::Balance::from(1_000u32).encode() // rent allowance\n\n ));\n\n\n\n // Advance blocks\n\n initialize_block(blocks);\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 62, "score": 139526.14397229935 }, { "content": "// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n system::GenesisConfig::default()\n\n .build_storage::<Test>()\n\n .unwrap()\n\n .into()\n\n}\n", "file_path": "gateway/demo-runtime/pallets/template/src/mock.rs", "rank": 63, "score": 137964.68868751806 }, { "content": "#[test]\n\nfn instantiate_and_call_and_deposit_event() {\n\n let (wasm, code_hash) = compile_module::<Test>(\"return_from_start_fn\").unwrap();\n\n\n\n ExtBuilder::default()\n\n .existential_deposit(100)\n\n .build()\n\n .execute_with(|| {\n\n let _ = Balances::deposit_creating(&ALICE, 1_000_000);\n\n let subsistence = super::Config::<Test>::subsistence_threshold_uncached();\n\n\n\n assert_ok!(Contracts::put_code(Origin::signed(ALICE), wasm));\n\n\n\n // Check at the end to get hash on error easily\n\n let creation = Contracts::instantiate(\n\n Origin::signed(ALICE),\n\n subsistence,\n\n GAS_LIMIT,\n\n code_hash.into(),\n\n vec![],\n\n );\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 64, "score": 137400.72580963504 }, { "content": "/// Charge the gas meter with the specified token.\n\n///\n\n/// Returns `Err(HostError)` if there is not enough gas.\n\npub fn charge_gas<T: Trait, Tok: Token<T>>(\n\n gas_meter: &mut GasMeter<T>,\n\n metadata: &Tok::Metadata,\n\n trap_reason: &mut Option<TrapReason>,\n\n token: Tok,\n\n) -> Result<(), sp_sandbox::HostError> {\n\n match gas_meter.charge(metadata, token) {\n\n GasMeterResult::Proceed => Ok(()),\n\n GasMeterResult::OutOfGas => {\n\n *trap_reason = Some(TrapReason::SupervisorError(DispatchError::Other(\n\n \"Out of gas\",\n\n )));\n\n Err(sp_sandbox::HostError)\n\n }\n\n }\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/fees.rs", "rank": 65, "score": 137294.96906663242 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// For testing the module, we construct a mock runtime.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Pallet, Call, Config, Storage, Event<T>},\n\n ExecDelivery: example_offchain_worker::{Pallet, Call, Storage, Event<T>, ValidateUnsigned},\n\n }\n\n);\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n\n pub BlockWeights: frame_system::limits::BlockWeights =\n\n frame_system::limits::BlockWeights::simple_max(1024);\n\n}\n\nimpl frame_system::Config for Test {\n", "file_path": "circuit/src/execution-delivery/src/tests.rs", "rank": 66, "score": 134518.28190692244 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "circuit/src/execution-delivery/src/tests.rs", "rank": 67, "score": 131718.75438319132 }, { "content": "pub fn just_transfer<'a, T: EscrowTrait>(\n\n transactor: &T::AccountId,\n\n dest: &T::AccountId,\n\n value: BalanceOf<T>,\n\n) -> DispatchResult {\n\n <T as EscrowTrait>::Currency::transfer(transactor, dest, value, ExistenceRequirement::KeepAlive)\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/src/transfers.rs", "rank": 68, "score": 127374.30241817131 }, { "content": "pub fn escrow_transfer<'a, T: EscrowTrait>(\n\n escrow_account: &T::AccountId,\n\n requester: &T::AccountId,\n\n target_to: &T::AccountId,\n\n value: BalanceOf<T>,\n\n transfers: &mut Vec<TransferEntry>,\n\n) -> Result<(), DispatchError> {\n\n // Verify that requester has enough money to make the transfers from within the contract.\n\n if <T as EscrowTrait>::Currency::total_balance(&requester.clone())\n\n < <T as EscrowTrait>::Currency::minimum_balance() + value\n\n {\n\n return Err(DispatchError::Other(\n\n \"Escrow Transfer failed as the requester doesn't have enough balance.\",\n\n ));\n\n }\n\n // Just transfer here the value of internal for contract transfer to escrow account.\n\n return match just_transfer::<T>(requester, escrow_account, value) {\n\n Ok(_) => {\n\n transfers.push(TransferEntry {\n\n to: account_encode_to_h256(target_to.encode().as_slice()),\n\n value: TryInto::<u32>::try_into(value).ok().unwrap(),\n\n data: Vec::new(),\n\n });\n\n Ok(())\n\n }\n\n Err(err) => Err(err),\n\n };\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/src/transfers.rs", "rank": 69, "score": 127374.30241817131 }, { "content": "pub fn commit_deferred_transfers<T: EscrowTrait>(\n\n escrow_account: T::AccountId,\n\n transfers: &mut Vec<TransferEntry>,\n\n) -> DispatchResult {\n\n // Give the money back to the requester from the transfers that succeeded.s\n\n for transfer in transfers.iter() {\n\n just_transfer::<T>(\n\n &escrow_account,\n\n &h256_to_account(transfer.to),\n\n BalanceOf::<T>::from(transfer.value),\n\n )\n\n .map_err(|e| e)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/src/transfers.rs", "rank": 70, "score": 127147.6492107029 }, { "content": "pub fn account_encode_to_h256(account_bytes: &[u8]) -> H256 {\n\n match account_bytes.len() {\n\n // Normal case, expect 32-bytes long account id (public key) for regular runtime.\n\n 32 => H256::from_slice(account_bytes),\n\n // Shorter (8-bytes) account id (represented as u64) for tests.\n\n 8 => {\n\n // H256::from_low_u64_be doesn't work for runtime as it has no std.\n\n H256::from_slice(\n\n &[\n\n [0 as u8; 24].to_vec(),\n\n u64::from_le_bytes(account_bytes.try_into().unwrap())\n\n .to_be_bytes()\n\n .to_vec(),\n\n ]\n\n .concat()[..],\n\n )\n\n }\n\n _ => {\n\n assert!(\n\n false,\n\n \"Surprised by AccountId bytes length different than 32 or 8 bytes while serializing. Not supported.\"\n\n );\n\n H256::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/src/transfers.rs", "rank": 71, "score": 123526.88363001135 }, { "content": "fn default_multistep_call_args() -> (u8, Vec<u8>, Vec<u8>, BalanceOf<Test>, Gas) {\n\n let phase = 0 as u8;\n\n let code: Vec<u8> = Vec::new();\n\n let input_data: Vec<u8> = Vec::new();\n\n let value = BalanceOf::<Test>::from(500_000 as u64);\n\n let gas_limit: Gas = 155_000_000 + 187_500_000 + 107_500_000 + 210_000; // Actual gas costs of \"return_from_start_fn\" instantiation cost\n\n return (phase, code, input_data, value, gas_limit);\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/tests.rs", "rank": 72, "score": 122884.75056633356 }, { "content": "/// Removes the contract and all the storage associated with it.\n\n///\n\n/// This function doesn't affect the account.\n\npub fn destroy_contract<T: Trait>(address: &AccountIdOf<T>, trie_id: &TrieId) {\n\n <ContractInfoOf<T>>::remove(address);\n\n child::kill_storage(&crate::child_trie_info(&trie_id));\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/storage.rs", "rank": 73, "score": 122741.2138189517 }, { "content": "/// Gateway account ownership digest from Circuit.\n\n///\n\n/// The byte vector returned by this function should be signed with a Gateway account private key.\n\n/// This way, the owner of `circuit_account_id` on Circuit proves that the Gateway account private key\n\n/// is also under his control.\n\npub fn gateway_account_ownership_digest<Call, AccountId, SpecVersion>(\n\n\tgateway_call: &Call,\n\n\tcircuit_account_id: AccountId,\n\n\tgateway_spec_version: SpecVersion,\n\n) -> sp_std::vec::Vec<u8>\n\nwhere\n\n\tCall: codec::Encode,\n\n\tAccountId: codec::Encode,\n\n\tSpecVersion: codec::Encode,\n\n{\n\n\tpallet_bridge_dispatch::account_ownership_digest(\n\n\t\tgateway_call,\n\n\t\tcircuit_account_id,\n\n\t\tgateway_spec_version,\n\n\t\tbp_runtime::MILLAU_BRIDGE_INSTANCE,\n\n\t)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "circuit/src/runtime/src/lib.rs", "rank": 74, "score": 119934.28064975065 }, { "content": "pub fn new_test_ext_builder(deposit: u64, escrow_account: u64) -> sp_io::TestExternalities {\n\n ExtBuilder::default()\n\n .existential_deposit(deposit)\n\n .build(escrow_account)\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/mock.rs", "rank": 75, "score": 119654.52342290917 }, { "content": "pub fn new_test_ext_builder(deposit: u64, escrow_account: u64) -> sp_io::TestExternalities {\n\n ExtBuilder::default()\n\n .existential_deposit(deposit)\n\n .build(escrow_account)\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/mock.rs", "rank": 76, "score": 119654.52342290917 }, { "content": "const types = pickTypesBasedOnNodeType();\n", "file_path": "gateway/test-integration/multistep_call.spec.js", "rank": 77, "score": 119002.0958314277 }, { "content": " /// The API to interact with contracts without using executive.\n\n pub trait CircuitApi<AccountId, Balance, BlockNumber> where\n\n AccountId: Codec,\n\n Balance: Codec,\n\n BlockNumber: Codec,\n\n {\n\n /// Perform a composable execution from a specified account to a appointed gateways.\n\n ///\n\n /// See the contracts' `call` dispatchable function for more details.\n\n fn composable_exec(\n\n origin: AccountId,\n\n components: Vec<Compose<AccountId, Balance>>,\n\n io: Vec<u8>,\n\n gas_limit: u64,\n\n input_data: Vec<u8>,\n\n ) -> ComposableExecResult;\n\n\n\n /// Returns the contracts searchable by name or author\n\n fn fetch_contracts(\n\n author: AccountId,\n\n name: Box<str>,\n\n ) -> FetchContractsResult;\n\n\n\n }\n\n}\n", "file_path": "circuit/src/rpc/runtime-api/src/lib.rs", "rank": 78, "score": 115646.0329407659 }, { "content": "#[rpc]\n\npub trait CircuitApi<BlockHash, BlockNumber, AccountId, Balance> {\n\n /// Executes all attached or appointed by ID composable contracts on appointed gateways.\n\n ///\n\n /// IO flow between components on different chains can be described using Input-Output schedule.\n\n ///\n\n /// Circuit queues the request and awaits for an execution agent to volounteer to facilitate the execution\n\n /// across connected chains via gateways - acts as an escrow account and is accountable\n\n /// with her stake for proven misbehaviour.\n\n #[rpc(name = \"composable_exec\")]\n\n fn composable_exec(\n\n &self,\n\n call_request: InterExecRequest<AccountId, Balance>,\n\n at: Option<BlockHash>,\n\n ) -> Result<RpcComposableExecResult>;\n\n\n\n /// Returns the contracts searchable by name or author\n\n #[rpc(name = \"circuit_fetchContracts\")]\n\n fn fetch_contracts(\n\n &self,\n\n author: AccountId,\n", "file_path": "circuit/src/rpc/src/lib.rs", "rank": 79, "score": 115641.20318693499 }, { "content": "pub fn h256_to_account<D: Decode + Encode>(account_h256: H256) -> D {\n\n let decoded_account = D::decode(&mut &account_h256[..]).unwrap();\n\n\n\n match decoded_account.encode().len() {\n\n 32 => decoded_account,\n\n 8 => {\n\n let mut last_8b = account_h256.as_bytes()[24..].to_vec();\n\n last_8b.reverse();\n\n D::decode(&mut &last_8b[..]).unwrap()\n\n }\n\n _ => {\n\n assert!(\n\n false,\n\n \"Surprised by AccountId bytes length different than 32 or 8 bytes while deserializing. Not supported.\"\n\n );\n\n D::decode(&mut &H256::default()[..]).unwrap()\n\n }\n\n }\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/src/transfers.rs", "rank": 80, "score": 115457.65099561268 }, { "content": "/// Converts a runtime trap into an RPC error.\n\nfn runtime_error_into_rpc_err(err: impl std::fmt::Debug) -> Error {\n\n Error {\n\n code: ErrorCode::ServerError(RUNTIME_ERROR),\n\n message: \"Runtime trapped\".into(),\n\n data: Some(format!(\"{:?}\", err).into()),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use core::primitive::str;\n\n use sp_core::U256;\n\n\n\n #[test]\n\n fn composable_execution_request_should_serialize_deserialize_properly() {\n\n type Req = InterExecRequest<String, u128>;\n\n let req: Req = serde_json::from_str(\n\n r#\"\n\n\t\t{\n", "file_path": "circuit/src/rpc/src/lib.rs", "rank": 81, "score": 115378.28497586634 }, { "content": "#[cfg(test)]\n\npub fn code_hash<T: Trait>(account: &AccountIdOf<T>) -> Result<CodeHash<T>, ContractAbsentError> {\n\n <ContractInfoOf<T>>::get(account)\n\n .and_then(|i| i.as_alive().map(|i| i.code_hash))\n\n .ok_or(ContractAbsentError)\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/storage.rs", "rank": 82, "score": 114798.46932153417 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(mut config: Configuration) -> Result<TaskManager, ServiceError> {\n\n let sc_service::PartialComponents {\n\n client,\n\n backend,\n\n mut task_manager,\n\n import_queue,\n\n mut keystore_container,\n\n select_chain,\n\n transaction_pool,\n\n inherent_data_providers,\n\n other: (block_import, grandpa_link),\n\n } = new_partial(&config)?;\n\n\n\n if let Some(url) = &config.keystore_remote {\n\n match remote_keystore(url) {\n\n Ok(k) => keystore_container.set_remote_keystore(k),\n\n Err(e) => {\n\n return Err(ServiceError::Other(format!(\n\n \"Error hooking up remote keystore for {}: {}\",\n\n url, e\n", "file_path": "circuit/src/service.rs", "rank": 83, "score": 112691.95855317816 }, { "content": "/// Builds a new service for a light client.\n\npub fn new_light(mut config: Configuration) -> Result<TaskManager, ServiceError> {\n\n let (client, backend, keystore_container, mut task_manager, on_demand) =\n\n sc_service::new_light_parts::<Block, RuntimeApi, Executor>(&config)?;\n\n\n\n config\n\n .network\n\n .extra_sets\n\n .push(sc_finality_grandpa::grandpa_peers_set_config());\n\n\n\n let select_chain = sc_consensus::LongestChain::new(backend.clone());\n\n\n\n let transaction_pool = Arc::new(sc_transaction_pool::BasicPool::new_light(\n\n config.transaction_pool.clone(),\n\n config.prometheus_registry(),\n\n task_manager.spawn_handle(),\n\n client.clone(),\n\n on_demand.clone(),\n\n ));\n\n\n\n let (grandpa_block_import, _) = sc_finality_grandpa::block_import(\n", "file_path": "circuit/src/service.rs", "rank": 84, "score": 112691.95855317816 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(config: Configuration) -> Result<TaskManager, ServiceError> {\n\n let sc_service::PartialComponents {\n\n client,\n\n backend,\n\n mut task_manager,\n\n import_queue,\n\n keystore,\n\n select_chain,\n\n transaction_pool,\n\n inherent_data_providers,\n\n other: (block_import, grandpa_link),\n\n } = new_partial(&config)?;\n\n\n\n let finality_proof_provider =\n\n GrandpaFinalityProofProvider::new_for_service(backend.clone(), client.clone());\n\n\n\n let (network, network_status_sinks, system_rpc_tx, network_starter) =\n\n sc_service::build_network(sc_service::BuildNetworkParams {\n\n config: &config,\n\n client: client.clone(),\n", "file_path": "gateway/demo-runtime/node/src/service.rs", "rank": 85, "score": 112334.68052617772 }, { "content": "/// Builds a new service for a light client.\n\npub fn new_light(config: Configuration) -> Result<TaskManager, ServiceError> {\n\n let (client, backend, keystore, mut task_manager, on_demand) =\n\n sc_service::new_light_parts::<Block, RuntimeApi, Executor>(&config)?;\n\n\n\n let transaction_pool = Arc::new(sc_transaction_pool::BasicPool::new_light(\n\n config.transaction_pool.clone(),\n\n config.prometheus_registry(),\n\n task_manager.spawn_handle(),\n\n client.clone(),\n\n on_demand.clone(),\n\n ));\n\n\n\n let grandpa_block_import = sc_finality_grandpa::light_block_import(\n\n client.clone(),\n\n backend.clone(),\n\n &(client.clone() as Arc<_>),\n\n Arc::new(on_demand.checker().clone()) as Arc<_>,\n\n )?;\n\n let finality_proof_import = grandpa_block_import.clone();\n\n let finality_proof_request_builder =\n", "file_path": "gateway/demo-runtime/node/src/service.rs", "rank": 86, "score": 112334.68052617772 }, { "content": " // A unique name is used to ensure that the pallet's storage items are isolated.\n\n // This name may be updated, but each pallet in the runtime must use a unique name.\n\n // ---------------------------------vvvvvvvvvvvvvv\n\n trait Store for Module<T: Trait> as TemplateModule {\n\n // Learn more about declaring storage items:\n\n // https://substrate.dev/docs/en/knowledgebase/runtime/storage#declaring-storage-items\n\n Something get(fn something): Option<u32>;\n\n }\n\n}\n\n\n\n// Pallets use events to inform users when important changes are made.\n\n// https://substrate.dev/docs/en/knowledgebase/runtime/events\n\ndecl_event!(\n\n pub enum Event<T>\n\n where\n\n AccountId = <T as frame_system::Trait>::AccountId,\n\n {\n\n /// Event documentation should end with an array that provides descriptive names for event\n\n /// parameters. [something, who]\n\n SomethingStored(u32, AccountId),\n\n }\n\n);\n\n\n", "file_path": "gateway/demo-runtime/pallets/template/src/lib.rs", "rank": 87, "score": 110842.86183093637 }, { "content": " trait Store for Module<T: Trait> as EscrowGateway {\n\n // Just a dummy storage item.\n\n // Here we are declaring a StorageValue, `Something` as a Option<u32>\n\n // `get(fn something)` is the default getter which returns either the stored `u32` or `None` if nothing stored\n\n Something get(fn something): Option<u32>;\n\n\n\n // For each requester address\n\n // For each transaction_tx (temporarily dest address)\n\n // Store deferred transfers - Vec<TransferEntry>\n\n DeferredTransfers get(fn deferred_transfers):\n\n double_map hasher(blake2_128_concat) T::AccountId, hasher(blake2_128_concat) T::AccountId => Vec<TransferEntry>;\n\n\n\n // ( Requester , CodeHash ) -> [ ExecutionStamp ]\n\n ExecutionStamps get(fn execution_stamps):\n\n double_map hasher(blake2_128_concat) T::AccountId, hasher(blake2_128_concat) T::Hash => ExecutionStamp;\n\n\n\n DeferredResults get(fn deferred_results):\n\n double_map hasher(blake2_128_concat) T::AccountId, hasher(blake2_128_concat) T::Hash => Vec<u8>;\n\n\n\n DeferredStorageWrites get(fn deferred_storage_writes):\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/lib.rs", "rank": 88, "score": 109516.21860042895 }, { "content": " trait Store for Module<T: Trait> as ChildStorage {\n\n // Just a dummy storage item.\n\n // Here we are declaring a StorageValue, `Something` as a Option<u32>\n\n // `get(fn something)` is the default getter which returns either the stored `u32` or `None` if nothing stored\n\n Something get(fn something): Option<u32>;\n\n\n\n // For each requester address\n\n // For each transaction_tx (temporarily dest address)\n\n // Store deferred transfers - Vec<TransferEntry>\n\n DeferredTransfers get(fn deferred_transfers):\n\n double_map hasher(blake2_128_concat) T::AccountId, hasher(blake2_128_concat) T::AccountId => Vec<TransferEntry>;\n\n\n\n // ( Requester , CodeHash ) -> [ ExecutionStamp ]\n\n ExecutionStamps get(fn execution_stamps):\n\n double_map hasher(blake2_128_concat) T::AccountId, hasher(blake2_128_concat) T::Hash => ExecutionStamp;\n\n\n\n DeferredResults get(fn deferred_results):\n\n double_map hasher(blake2_128_concat) T::AccountId, hasher(blake2_128_concat) T::Hash => Vec<u8>;\n\n\n\n DeferredStorageWrites get(fn deferred_storage_writes):\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/lib.rs", "rank": 89, "score": 109516.21860042895 }, { "content": " trait Store for Module<T: Trait> as FlipperStorage {\n\n pub Value get(fn get_value): bool = false;\n\n }\n\n}\n\n\n\ndecl_module! {\n\n pub struct Module<T: Trait> for enum Call where origin: T::Origin {\n\n fn deposit_event() = default;\n\n\n\n /// Flip!\n\n #[weight = 10_000]\n\n pub fn flip(origin) -> DispatchResult {\n\n ensure_signed(origin)?;\n\n Value::put(!Value::get());\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/fixtures/flipper/src/lib.rs", "rank": 90, "score": 107014.26602931452 }, { "content": " trait Store for Module<T: Trait> as SimpleMap {\n\n StoredValue get(fn stored_value): u32;\n\n }\n\n}\n\n\n\n// A \"scale\" to weigh transactions. This scale can be used with any transactions that take a\n\n// single argument of type u32. The ultimate weight of the transaction is the / product of the\n\n// transaction parameter and the field of this struct.\n\npub struct Linear(u32);\n\n\n\n// The actual weight calculation happens in the `impl WeighData` block\n\nimpl WeighData<(&u32,)> for Linear {\n\n fn weigh_data(&self, (x,): (&u32,)) -> Weight {\n\n // Use saturation so that an extremely large parameter value\n\n // Does not cause overflow.\n\n x.saturating_mul(self.0).into()\n\n }\n\n}\n\n\n\n// The PaysFee trait indicates whether fees should actually be charged from the caller. If not,\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/fixtures/weights/src/lib.rs", "rank": 91, "score": 107014.26602931452 }, { "content": "type AccountPublic = <Signature as Verify>::Signer;\n\n\n", "file_path": "circuit/src/chain_spec.rs", "rank": 92, "score": 106442.08834716525 }, { "content": "#[test]\n\nfn crypto_hashes() {\n\n let (wasm, code_hash) = compile_module::<Test>(\"crypto_hashes\").unwrap();\n\n\n\n ExtBuilder::default()\n\n .existential_deposit(50)\n\n .build()\n\n .execute_with(|| {\n\n let _ = Balances::deposit_creating(&ALICE, 1_000_000);\n\n assert_ok!(Contracts::put_code(Origin::signed(ALICE), wasm));\n\n\n\n // Instantiate the CRYPTO_HASHES contract.\n\n assert_ok!(Contracts::instantiate(\n\n Origin::signed(ALICE),\n\n 100_000,\n\n GAS_LIMIT,\n\n code_hash.into(),\n\n vec![],\n\n ));\n\n // Perform the call.\n\n let input = b\"_DEAD_BEEF\";\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 93, "score": 105725.55052775702 }, { "content": "#[test]\n\nfn should_only_allow_to_be_called_by_escrow_account_being_sudo() {\n\n let (phase, code, input_data, value, gas_limit) = default_multistep_call_args();\n\n\n\n new_test_ext_builder(50, ESCROW_ACCOUNT).execute_with(|| {\n\n let _ = Balances::deposit_creating(&REQUESTER, 10_000_000_000);\n\n\n\n let err_rec = EscrowGateway::multistep_call(\n\n Origin::signed(OTHER_ACCOUNT),\n\n REQUESTER,\n\n TARGET_DEST,\n\n phase,\n\n code,\n\n value,\n\n gas_limit,\n\n input_data,\n\n );\n\n assert_noop!(err_rec, Error::<Test>::UnauthorizedCallAttempt);\n\n });\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/runtime-gateway/src/tests.rs", "rank": 94, "score": 105606.14022980919 }, { "content": "#[test]\n\nfn should_only_allow_to_be_called_by_escrow_account_being_sudo() {\n\n let (phase, code, input_data, value, gas_limit) = default_gateway_contract_exec_args();\n\n\n\n new_test_ext_builder(50, ESCROW_ACCOUNT).execute_with(|| {\n\n let _ = Balances::deposit_creating(&REQUESTER, 10_000_000_000);\n\n\n\n let err_rec = EscrowGateway::gateway_contract_exec(\n\n Origin::signed(OTHER_ACCOUNT),\n\n REQUESTER,\n\n TARGET_DEST,\n\n phase,\n\n code,\n\n value,\n\n gas_limit,\n\n input_data,\n\n );\n\n assert_noop!(err_rec, Error::<Test>::UnauthorizedCallAttempt);\n\n });\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/contracts-gateway/src/tests.rs", "rank": 95, "score": 105606.14022980919 }, { "content": "struct ContractModule {\n\n /// A deserialized module. The module is valid (this is Guaranteed by `new` method).\n\n module: elements::Module,\n\n}\n\n\n\nimpl ContractModule {\n\n /// Creates a new instance of `ContractModule`.\n\n ///\n\n /// Returns `Err` if the `original_code` couldn't be decoded or\n\n /// if it contains an invalid module.\n\n fn new(original_code: &[u8]) -> Result<Self, &'static str> {\n\n use wasmi_validation::{validate_module, PlainValidator};\n\n\n\n let module =\n\n elements::deserialize_buffer(original_code).map_err(|_| \"Can't decode wasm code\")?;\n\n\n\n // Make sure that the module is valid.\n\n validate_module::<PlainValidator>(&module).map_err(|_| \"Module is not valid\")?;\n\n\n\n // Return a `ContractModule` instance with\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/versatile-wasm/src/prepare.rs", "rank": 96, "score": 105455.5555290928 }, { "content": "#[test]\n\nfn call_contract_removals() {\n\n removals(|| {\n\n // Call on already-removed account might fail, and this is fine.\n\n let _ = Contracts::call(Origin::signed(ALICE), BOB, 0, GAS_LIMIT, call::null());\n\n true\n\n });\n\n}\n\n\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 97, "score": 103428.95254244648 }, { "content": "#[test]\n\nfn deploy_and_call_other_contract() {\n\n let (callee_wasm, callee_code_hash) = compile_module::<Test>(\"return_with_data\").unwrap();\n\n let (caller_wasm, caller_code_hash) = compile_module::<Test>(\"caller_contract\").unwrap();\n\n\n\n ExtBuilder::default()\n\n .existential_deposit(50)\n\n .build()\n\n .execute_with(|| {\n\n // Create\n\n let _ = Balances::deposit_creating(&ALICE, 1_000_000);\n\n assert_ok!(Contracts::put_code(Origin::signed(ALICE), callee_wasm));\n\n assert_ok!(Contracts::put_code(Origin::signed(ALICE), caller_wasm));\n\n\n\n assert_ok!(Contracts::instantiate(\n\n Origin::signed(ALICE),\n\n 100_000,\n\n GAS_LIMIT,\n\n caller_code_hash.into(),\n\n vec![],\n\n ));\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 98, "score": 103428.95254244648 }, { "content": "#[test]\n\nfn call_removed_contract() {\n\n let (wasm, code_hash) = compile_module::<Test>(\"set_rent\").unwrap();\n\n\n\n // Balance reached and superior to subsistence threshold\n\n ExtBuilder::default()\n\n .existential_deposit(50)\n\n .build()\n\n .execute_with(|| {\n\n // Create\n\n let _ = Balances::deposit_creating(&ALICE, 1_000_000);\n\n assert_ok!(Contracts::put_code(Origin::signed(ALICE), wasm.clone()));\n\n assert_ok!(Contracts::instantiate(\n\n Origin::signed(ALICE),\n\n 100,\n\n GAS_LIMIT,\n\n code_hash.into(),\n\n <Test as pallet_balances::Trait>::Balance::from(1_000u32).encode() // rent allowance\n\n ));\n\n\n\n // Calling contract should succeed.\n", "file_path": "gateway/pallet-escrow-gateway/escrow-engine/escrow-contracts-wrapper/src/tests.rs", "rank": 99, "score": 103428.95254244648 } ]
Rust
key_config/src/old_key_map.rs
NordGeit/MMAccel
a45b2f8eecb795cb784d3a89be70ee41e60c84a9
use crate::*; use std::io::BufRead; fn str_to_vk(k: &str) -> Option<u32> { match k.trim() { "esc" => Some(VK_ESCAPE.0 as u32), "tab" => Some(VK_TAB.0 as u32), "capslock" => Some(VK_CAPITAL.0 as u32), "shift" => Some(VK_SHIFT.0 as u32), "ctrl" => Some(VK_CONTROL.0 as u32), "alt" => Some(VK_MENU.0 as u32), "backspace" => Some(VK_BACK.0 as u32), "enter" => Some(VK_RETURN.0 as u32), "space" => Some(VK_SPACE.0 as u32), "printscreen" => Some(VK_SNAPSHOT.0 as u32), "pause" => Some(VK_PAUSE.0 as u32), "insert" => Some(VK_INSERT.0 as u32), "delete" => Some(VK_DELETE.0 as u32), "home" => Some(VK_HOME.0 as u32), "end" => Some(VK_END.0 as u32), "pageup" => Some(VK_PRIOR.0 as u32), "pagedown" => Some(VK_NEXT.0 as u32), "up" => Some(VK_UP.0 as u32), "down" => Some(VK_DOWN.0 as u32), "left" => Some(VK_LEFT.0 as u32), "right" => Some(VK_RIGHT.0 as u32), "num+" => Some(VK_ADD.0 as u32), "num-" => Some(VK_SUBTRACT.0 as u32), "num*" => Some(VK_MULTIPLY.0 as u32), "num/" => Some(VK_DIVIDE.0 as u32), "num." => Some(VK_DECIMAL.0 as u32), "-" => Some(VK_OEM_MINUS.0 as u32), ";" => Some(VK_OEM_PLUS.0 as u32), "," => Some(VK_OEM_COMMA.0 as u32), "." => Some(VK_OEM_PERIOD.0 as u32), ":" => Some(VK_OEM_1.0 as u32), "/" => Some(VK_OEM_2.0 as u32), "@" => Some(VK_OEM_3.0 as u32), "[" => Some(VK_OEM_4.0 as u32), "\\" => Some(VK_OEM_5.0 as u32), "]" => Some(VK_OEM_6.0 as u32), "^" => Some(VK_OEM_7.0 as u32), "_" => Some(VK_OEM_102.0 as u32), _ if k.len() == 1 => { let c = k.chars().next().unwrap(); (!c.is_ascii_control()).then(|| c.to_ascii_uppercase() as u32) } _ if k.starts_with("num") => k .trim_matches(|c| !char::is_numeric(c)) .parse() .map(|n: u32| VK_NUMPAD0.0 as u32 + n) .ok(), _ if k.starts_with('f') => k .trim_matches(|c| !char::is_numeric(c)) .parse() .map(|n: u32| VK_F1.0 as u32 + n - 1) .ok(), _ => None, } } #[derive(Debug)] pub struct Item { pub id: String, pub keys: Option<Keys>, } #[derive(Debug)] pub struct OldKeyMap(pub Vec<Item>); impl OldKeyMap { pub fn from_file(path: impl AsRef<std::path::Path>) -> Result<Self, Error> { let file = std::fs::File::open(path)?; let mut reader = std::io::BufReader::new(file); let mut key_map = vec![]; let mut buffer = String::new(); loop { buffer.clear(); if reader.read_line(&mut buffer)? == 0 { break; } if buffer.is_empty() { continue; } if buffer.starts_with('#') { continue; } let ss = buffer.split('=').collect::<Vec<_>>(); if ss.len() != 2 { continue; } let keys = ss[1] .trim() .to_ascii_lowercase() .split('+') .map(|s| str_to_vk(s)) .collect::<Option<Vec<_>>>(); if keys.is_none() { continue; } key_map.push(Item { id: ss[0].trim().to_string(), keys: Some(Keys::from_slice(&keys.unwrap())), }); } Ok(Self(key_map)) } } #[cfg(test)] mod tests { use super::*; #[test] fn load_key_map() { let data = OldKeyMap::from_file("key_map.txt").unwrap(); let prev = data.0.iter().find(|item| item.id == "FramePrev").unwrap(); let mut keys = Keys::new(); keys.vk(b'A' as _); assert!(prev.keys.as_ref().unwrap() == &keys); let next = data.0.iter().find(|item| item.id == "FrameNext").unwrap(); let mut keys = Keys::new(); keys.vk(b'D' as _); assert!(next.keys.as_ref().unwrap() == &keys); } }
use crate::*; use std::io::BufRead; fn str_to_vk(k: &str) -> Option<u32> { match k.trim() { "esc" => Some(VK_ESCAPE.0 as u32), "tab" => Some(VK_TAB.0 as u32), "capslock" => Some(VK_CAPITAL.0 as u32), "shift" => Some(VK_SHIFT.0 as u32), "ctrl" => Some(VK_CO
wrap(); let prev = data.0.iter().find(|item| item.id == "FramePrev").unwrap(); let mut keys = Keys::new(); keys.vk(b'A' as _); assert!(prev.keys.as_ref().unwrap() == &keys); let next = data.0.iter().find(|item| item.id == "FrameNext").unwrap(); let mut keys = Keys::new(); keys.vk(b'D' as _); assert!(next.keys.as_ref().unwrap() == &keys); } }
NTROL.0 as u32), "alt" => Some(VK_MENU.0 as u32), "backspace" => Some(VK_BACK.0 as u32), "enter" => Some(VK_RETURN.0 as u32), "space" => Some(VK_SPACE.0 as u32), "printscreen" => Some(VK_SNAPSHOT.0 as u32), "pause" => Some(VK_PAUSE.0 as u32), "insert" => Some(VK_INSERT.0 as u32), "delete" => Some(VK_DELETE.0 as u32), "home" => Some(VK_HOME.0 as u32), "end" => Some(VK_END.0 as u32), "pageup" => Some(VK_PRIOR.0 as u32), "pagedown" => Some(VK_NEXT.0 as u32), "up" => Some(VK_UP.0 as u32), "down" => Some(VK_DOWN.0 as u32), "left" => Some(VK_LEFT.0 as u32), "right" => Some(VK_RIGHT.0 as u32), "num+" => Some(VK_ADD.0 as u32), "num-" => Some(VK_SUBTRACT.0 as u32), "num*" => Some(VK_MULTIPLY.0 as u32), "num/" => Some(VK_DIVIDE.0 as u32), "num." => Some(VK_DECIMAL.0 as u32), "-" => Some(VK_OEM_MINUS.0 as u32), ";" => Some(VK_OEM_PLUS.0 as u32), "," => Some(VK_OEM_COMMA.0 as u32), "." => Some(VK_OEM_PERIOD.0 as u32), ":" => Some(VK_OEM_1.0 as u32), "/" => Some(VK_OEM_2.0 as u32), "@" => Some(VK_OEM_3.0 as u32), "[" => Some(VK_OEM_4.0 as u32), "\\" => Some(VK_OEM_5.0 as u32), "]" => Some(VK_OEM_6.0 as u32), "^" => Some(VK_OEM_7.0 as u32), "_" => Some(VK_OEM_102.0 as u32), _ if k.len() == 1 => { let c = k.chars().next().unwrap(); (!c.is_ascii_control()).then(|| c.to_ascii_uppercase() as u32) } _ if k.starts_with("num") => k .trim_matches(|c| !char::is_numeric(c)) .parse() .map(|n: u32| VK_NUMPAD0.0 as u32 + n) .ok(), _ if k.starts_with('f') => k .trim_matches(|c| !char::is_numeric(c)) .parse() .map(|n: u32| VK_F1.0 as u32 + n - 1) .ok(), _ => None, } } #[derive(Debug)] pub struct Item { pub id: String, pub keys: Option<Keys>, } #[derive(Debug)] pub struct OldKeyMap(pub Vec<Item>); impl OldKeyMap { pub fn from_file(path: impl AsRef<std::path::Path>) -> Result<Self, Error> { let file = std::fs::File::open(path)?; let mut reader = std::io::BufReader::new(file); let mut key_map = vec![]; let mut buffer = String::new(); loop { buffer.clear(); if reader.read_line(&mut buffer)? == 0 { break; } if buffer.is_empty() { continue; } if buffer.starts_with('#') { continue; } let ss = buffer.split('=').collect::<Vec<_>>(); if ss.len() != 2 { continue; } let keys = ss[1] .trim() .to_ascii_lowercase() .split('+') .map(|s| str_to_vk(s)) .collect::<Option<Vec<_>>>(); if keys.is_none() { continue; } key_map.push(Item { id: ss[0].trim().to_string(), keys: Some(Keys::from_slice(&keys.unwrap())), }); } Ok(Self(key_map)) } } #[cfg(test)] mod tests { use super::*; #[test] fn load_key_map() { let data = OldKeyMap::from_file("key_map.txt").un
random
[ { "content": "fn error(msg: &str) {\n\n message_box(None, msg, \"d3d9.dll エラー\", MB_OK | MB_ICONERROR);\n\n}\n\n\n\n#[inline]\n\nunsafe fn mmaccel_run(base_addr: usize) {\n\n if let Some(mmaccel) = MMACCEL.get() {\n\n let f = mmaccel.get::<unsafe fn(usize)>(b\"mmaccel_run\").unwrap();\n\n f(base_addr)\n\n }\n\n}\n\n\n\n#[inline]\n\nunsafe fn mmaccel_end() {\n\n if let Some(mmaccel) = MMACCEL.get() {\n\n let f = mmaccel.get::<unsafe fn()>(b\"mmaccel_end\").unwrap();\n\n f()\n\n }\n\n}\n\n\n", "file_path": "d3d9/src/lib.rs", "rank": 1, "score": 86813.73188063352 }, { "content": "fn error(msg: &str) {\n\n message_box(None, msg, \"MMAccelエラー\", MB_OK | MB_ICONERROR);\n\n}\n\n\n\nunsafe extern \"system\" fn hook_call_window_proc_ret(code: i32, wparam: WPARAM, lparam: LPARAM) -> LRESULT {\n\n if code < 0 || code != HC_ACTION as i32 {\n\n return CallNextHookEx(HHOOK(0), code, wparam, lparam);\n\n }\n\n let ret = std::panic::catch_unwind(|| {\n\n CONTEXT\n\n .get_mut()\n\n .unwrap()\n\n .call_window_proc_ret(&*(lparam.0 as *const CWPRETSTRUCT));\n\n });\n\n if ret.is_err() {\n\n PostQuitMessage(1);\n\n return LRESULT(0);\n\n }\n\n CallNextHookEx(HHOOK(0), code, wparam, lparam)\n\n}\n", "file_path": "mmaccel/src/lib.rs", "rank": 2, "score": 86813.73188063352 }, { "content": "fn write_version_file(version: &str) {\n\n let file = std::fs::File::create(\"../version\").unwrap();\n\n let mut writer = std::io::BufWriter::new(file);\n\n write!(writer, \"{}\", version).unwrap();\n\n}\n\n\n", "file_path": "mmaccel/build.rs", "rank": 3, "score": 84727.77907773526 }, { "content": "fn write_package_ps1(version: &str) {\n\n let file = std::fs::File::create(\"../package.ps1\").unwrap();\n\n let mut writer = std::io::BufWriter::new(file);\n\n write!(writer, include_str!(\"src/template_package.ps1\"), version).unwrap();\n\n}\n\n\n", "file_path": "mmaccel/build.rs", "rank": 4, "score": 84727.77907773526 }, { "content": "#[inline]\n\npub fn get_current_thread_id() -> u32 {\n\n unsafe { GetCurrentThreadId() }\n\n}\n\n\n", "file_path": "wrapper/src/lib.rs", "rank": 5, "score": 81159.84471284342 }, { "content": "pub fn vk_to_string(k: u32) -> String {\n\n const ZERO: u16 = b'0' as _;\n\n const Z: u16 = b'Z' as _;\n\n const NUMPAD0: u16 = VK_NUMPAD0.0 as _;\n\n const NUMPAD9: u16 = VK_NUMPAD9.0 as _;\n\n const F1: u16 = VK_F1.0 as _;\n\n const F24: u16 = VK_F24.0 as _;\n\n match VIRTUAL_KEY(k as _) {\n\n VK_ESCAPE => \"Esc\".into(),\n\n VK_TAB => \"Tab\".into(),\n\n VK_CAPITAL => \"CapsLock\".into(),\n\n VK_SHIFT => \"Shift\".into(),\n\n VK_CONTROL => \"Ctrl\".into(),\n\n VK_MENU => \"Alt\".into(),\n\n VK_BACK => \"BackSpace\".into(),\n\n VK_RETURN => \"Enter\".into(),\n\n VK_SPACE => \"Space\".into(),\n\n VK_SNAPSHOT => \"PrintScreen\".into(),\n\n // VK_SCROLL => \"ScrollLock\".into(),\n\n VK_PAUSE => \"Pause\".into(),\n", "file_path": "key_map/src/lib.rs", "rank": 6, "score": 76211.2072667807 }, { "content": "fn error(text: impl AsRef<str>) {\n\n message_box(None, text, \"MMAccel キー設定\", MB_OK | MB_ICONERROR);\n\n}\n\n\n", "file_path": "key_config/src/main.rs", "rank": 7, "score": 76082.36329421449 }, { "content": "#[inline]\n\npub fn get_window_thread_process_id(hwnd: HWND) -> u32 {\n\n unsafe {\n\n let mut id = 0;\n\n GetWindowThreadProcessId(hwnd, &mut id);\n\n id\n\n }\n\n}\n\n\n", "file_path": "wrapper/src/lib.rs", "rank": 8, "score": 71555.09894565656 }, { "content": "fn calc_layout(window_size: wita::LogicalSize<u32>) -> Layout {\n\n let height = window_size.height as i32 - MARGIN * 2;\n\n let side_menu = Rect {\n\n position: (MARGIN, MARGIN).into(),\n\n size: (SIDE_MENU_WIDTH, height).into(),\n\n };\n\n let width = window_size.width as i32 - SIDE_MENU_WIDTH - MARGIN * 3;\n\n let shortcut_list = Rect {\n\n position: ((SIDE_MENU_WIDTH + MARGIN * 2) as _, MARGIN as _).into(),\n\n size: (width, height).into(),\n\n };\n\n Layout {\n\n side_menu,\n\n shortcut_list,\n\n }\n\n}\n\n\n\npub struct Application {\n\n settings: Settings,\n\n main_window: wita::Window,\n", "file_path": "key_config/src/application.rs", "rank": 9, "score": 68229.55173670928 }, { "content": "#[inline]\n\npub fn to_wchar(src: impl AsRef<str>) -> Vec<u16> {\n\n src.as_ref().encode_utf16().chain(Some(0)).collect()\n\n}\n\n\n", "file_path": "wrapper/src/lib.rs", "rank": 10, "score": 67683.73538879438 }, { "content": "fn main() {\n\n let version = env!(\"CARGO_PKG_VERSION\");\n\n write_package_ps1(&version.replace(\".\", \"_\"));\n\n write_version_file(&version);\n\n}\n", "file_path": "mmaccel/build.rs", "rank": 11, "score": 47763.62995176652 }, { "content": "struct TimePeriod(u32);\n\n\n\nimpl TimePeriod {\n\n #[inline]\n\n fn new(n: u32) -> Self {\n\n unsafe {\n\n timeBeginPeriod(n);\n\n Self(n)\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for TimePeriod {\n\n fn drop(&mut self) {\n\n unsafe {\n\n timeEndPeriod(self.0);\n\n }\n\n }\n\n}\n\n\n", "file_path": "mmaccel/src/context.rs", "rank": 12, "score": 47702.41282588073 }, { "content": "fn main() {\n\n embed_resource::compile(\"src/resource.rc\");\n\n}\n", "file_path": "key_config/build.rs", "rank": 13, "score": 46432.584209917186 }, { "content": "fn main() {\n\n if let Err(e) = build_logger() {\n\n error(&format!(\"MMAccel キー設定のログを取れません ({})\", e));\n\n }\n\n std::panic::set_hook(Box::new(|info| {\n\n let msg = if let Some(location) = info.location() {\n\n if let Some(s) = info.payload().downcast_ref::<&str>() {\n\n format!(\"panic!!! {} ({}:{})\", s, location.file(), location.line())\n\n } else {\n\n format!(\"panic!!! ({}:{})\", location.file(), location.line())\n\n }\n\n } else {\n\n \"panic!!! unknown\".into()\n\n };\n\n log::error!(\"{}\", &msg);\n\n error(&msg);\n\n log::info!(\"key_config panic\");\n\n }));\n\n log::info!(\"key_config start\");\n\n wita::run(wita::RunType::Wait, Application::new).unwrap();\n\n log::info!(\"key_config end\");\n\n}\n", "file_path": "key_config/src/main.rs", "rank": 14, "score": 45215.54886805651 }, { "content": "pub fn message_box(\n\n hwnd: Option<HWND>,\n\n text: impl AsRef<str>,\n\n caption: impl AsRef<str>,\n\n style: MESSAGEBOX_STYLE,\n\n) -> MESSAGEBOX_RESULT {\n\n unsafe {\n\n let text = to_wchar(text.as_ref());\n\n let caption = to_wchar(caption.as_ref());\n\n MessageBoxW(\n\n hwnd.unwrap_or(HWND(0)),\n\n PWSTR(text.as_ptr() as _),\n\n PWSTR(caption.as_ptr() as _),\n\n style,\n\n )\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Debug)]\n\npub struct HookHandle(HHOOK);\n", "file_path": "wrapper/src/lib.rs", "rank": 15, "score": 42424.79241332963 }, { "content": "pub fn inject_functions(\n\n base_addr: usize,\n\n img_desc: &IMAGE_IMPORT_DESCRIPTOR,\n\n functions: &[(&[u8], u64)],\n\n) -> windows::core::Result<()> {\n\n unsafe {\n\n let mut iat_ptr = (base_addr + img_desc.FirstThunk as usize) as *mut IMAGE_THUNK_DATA64;\n\n let mut int_ptr = (base_addr + img_desc.Anonymous.OriginalFirstThunk as usize) as *mut IMAGE_THUNK_DATA64;\n\n while iat_ptr.as_ref().unwrap().u1.Function != 0 {\n\n let mut iat = &mut *iat_ptr;\n\n let int = &*int_ptr;\n\n if (int.u1.Ordinal & 0x8000000000000000) != 0 {\n\n continue;\n\n }\n\n let name_ptr = (base_addr + int.u1.AddressOfData as usize) as *const IMAGE_IMPORT_BY_NAME;\n\n for &(function_name, fp) in functions.iter() {\n\n let name = std::slice::from_raw_parts(\n\n name_ptr.as_ref().unwrap().Name.as_ptr() as *const u8,\n\n function_name.len(),\n\n );\n", "file_path": "mmaccel/src/injection.rs", "rank": 16, "score": 42424.79241332963 }, { "content": "fn version_info(hwnd: HWND) {\n\n let text = format!(\"MMAccel {}\\nby LNSEAB\", env!(\"CARGO_PKG_VERSION\"));\n\n message_box(Some(hwnd), text, \"\", MB_OK);\n\n}\n\n\n", "file_path": "mmaccel/src/context.rs", "rank": 17, "score": 40006.71558653427 }, { "content": "#[inline]\n\npub fn enum_windows() -> Vec<HWND> {\n\n extern \"system\" fn callback(hwnd: HWND, lparam: LPARAM) -> BOOL {\n\n unsafe {\n\n let windows = &mut *(lparam.0 as *mut Vec<HWND>);\n\n windows.push(hwnd);\n\n true.into()\n\n }\n\n }\n\n\n\n unsafe {\n\n let mut windows = vec![];\n\n if EnumWindows(Some(callback), LPARAM(&mut windows as *mut _ as _)) == BOOL(0) {\n\n vec![]\n\n } else {\n\n windows\n\n }\n\n }\n\n}\n\n\n", "file_path": "wrapper/src/lib.rs", "rank": 18, "score": 37884.40485880892 }, { "content": "#[inline]\n\npub fn get_keyboard_state(v: &mut [u8]) {\n\n unsafe {\n\n debug_assert_eq!(v.len(), 256);\n\n GetKeyboardState(v.as_mut_ptr());\n\n }\n\n}\n\n\n", "file_path": "wrapper/src/lib.rs", "rank": 19, "score": 36001.850402097036 }, { "content": "#[inline]\n\npub fn get_last_error() -> windows::core::Error {\n\n windows::core::Error::from_win32()\n\n}\n\n\n", "file_path": "wrapper/src/lib.rs", "rank": 20, "score": 35182.71795253545 }, { "content": "#[inline]\n\npub fn get_class_name(hwnd: HWND) -> String {\n\n unsafe {\n\n let mut buffer = vec![0; 256];\n\n let size = GetClassNameW(hwnd, PWSTR(buffer.as_mut_ptr()), buffer.len() as _);\n\n if size == 0 {\n\n return String::new();\n\n }\n\n String::from_utf16_lossy(&buffer[..size as usize])\n\n }\n\n}\n\n\n\npub const LVN_ITEMCHANGED: u32 = -101i32 as _;\n\npub const LVN_ITEMACTIVATE: u32 = -114i32 as _;\n\npub const NM_CLICK: u32 = -2i32 as _;\n\npub const NM_DBLCLK: u32 = -3i32 as _;\n\npub const NM_RCLICK: u32 = -5i32 as _;\n\npub const NM_SETFOCUS: u32 = -7i32 as _;\n\npub const NM_KILLFOCUS: u32 = -8i32 as _;\n\npub const NM_CUSTOMDRAW: u32 = -12i32 as _;\n", "file_path": "wrapper/src/lib.rs", "rank": 21, "score": 35182.71795253545 }, { "content": "#[inline]\n\npub fn bstr(src: &[u8]) -> Vec<u8> {\n\n src.iter().cloned().chain(Some(0)).collect()\n\n}\n\n\n", "file_path": "wrapper/src/lib.rs", "rank": 22, "score": 35136.23738294021 }, { "content": "pub fn get_system_directory() -> std::path::PathBuf {\n\n unsafe {\n\n let mut buffer = Vec::new();\n\n buffer.resize(MAX_PATH as _, 0);\n\n SHGetFolderPathW(HWND(0), CSIDL_SYSTEM as _, HANDLE(0), 0, PWSTR(buffer.as_mut_ptr())).unwrap();\n\n let len = buffer.iter().position(|&v| v == 0).unwrap();\n\n String::from_utf16_lossy(&buffer[..len]).into()\n\n }\n\n}\n\n\n", "file_path": "wrapper/src/lib.rs", "rank": 23, "score": 34419.404584502096 }, { "content": "pub fn get_module_path() -> std::path::PathBuf {\n\n unsafe {\n\n let mut buffer = vec![0; MAX_PATH as usize * 2];\n\n let size = GetModuleFileNameW(HINSTANCE(0), PWSTR(buffer.as_mut_ptr()), buffer.len() as _);\n\n String::from_utf16_lossy(&buffer[..size as _]).into()\n\n }\n\n}\n\n\n", "file_path": "wrapper/src/lib.rs", "rank": 24, "score": 34419.404584502096 }, { "content": "fn theme_font(hwnd: HWND) -> windows::core::Result<HFONT> {\n\n unsafe {\n\n let theme_name = to_wchar(\"TEXTSTYLE\");\n\n let theme = OpenThemeData(hwnd, PWSTR(theme_name.as_ptr() as _));\n\n if theme == 0 {\n\n return Err(get_last_error().into());\n\n }\n\n let log_font = GetThemeFont(theme, HDC(0), 4, 0, TMT_FONT.0 as _)?;\n\n let font = CreateFontIndirectW(&log_font);\n\n if font == HFONT(0) {\n\n return Err(get_last_error().into());\n\n }\n\n CloseThemeData(theme)?;\n\n Ok(font)\n\n }\n\n}\n\n\n\npub struct EditResult {\n\n pub category: usize,\n\n pub item: usize,\n", "file_path": "key_config/src/editor.rs", "rank": 25, "score": 32084.988909412248 }, { "content": "fn build_logger() -> Result<(), Box<dyn std::error::Error + 'static>> {\n\n const FORMAT: &str = \"[{d(%Y-%m-%d %H:%M:%S%z)} {l} (({f}:{L}))] {m}\\n\";\n\n let stdout = ConsoleAppender::builder()\n\n .encoder(Box::new(PatternEncoder::new(FORMAT)))\n\n .target(console::Target::Stderr)\n\n .build();\n\n let file = FileAppender::builder()\n\n .encoder(Box::new(PatternEncoder::new(FORMAT)))\n\n .append(false)\n\n .build(\"key_config.log\")?;\n\n let config = Config::builder()\n\n .appender(Appender::builder().build(\"stdout\", Box::new(stdout)))\n\n .appender(Appender::builder().build(\"file\", Box::new(file)))\n\n .build(\n\n Root::builder()\n\n .appender(\"stdout\")\n\n .appender(\"file\")\n\n .build(log::LevelFilter::Debug),\n\n )?;\n\n log4rs::init_config(config)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "key_config/src/main.rs", "rank": 26, "score": 30751.843014757258 }, { "content": "#[inline]\n\nfn from_file<T>(path: impl AsRef<std::path::Path>) -> Result<T, Error>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n{\n\n let file = std::fs::File::open(&path).map_err(|e| Error::file(e, &path))?;\n\n serde_json::from_reader(std::io::BufReader::new(file)).map_err(|e| Error::json_file(e, path))\n\n}\n\n\n", "file_path": "key_config/src/application.rs", "rank": 27, "score": 27944.16953831385 }, { "content": "#[inline]\n\nfn to_file<T>(path: impl AsRef<std::path::Path>, value: &T) -> Result<(), Error>\n\nwhere\n\n T: serde::Serialize,\n\n{\n\n let file = std::fs::File::create(&path)?;\n\n serde_json::to_writer_pretty(std::io::BufWriter::new(file), value).map_err(|e| Error::json_file(e, path))\n\n}\n\n\n", "file_path": "key_config/src/application.rs", "rank": 28, "score": 26961.116802902947 }, { "content": "fn build_logger(path: &std::path::Path) -> Result<(), Box<dyn std::error::Error + 'static>> {\n\n const FORMAT: &str = \"[{d(%Y-%m-%d %H:%M:%S%z)} {l} (({f}:{L}))] {m}\\n\";\n\n let stdout = ConsoleAppender::builder()\n\n .encoder(Box::new(PatternEncoder::new(FORMAT)))\n\n .target(console::Target::Stderr)\n\n .build();\n\n let file = FileAppender::builder()\n\n .encoder(Box::new(PatternEncoder::new(FORMAT)))\n\n .append(false)\n\n .build(path.join(\"MMAccel\").join(\"mmaccel.log\"))?;\n\n let config = Config::builder()\n\n .appender(Appender::builder().build(\"stdout\", Box::new(stdout)))\n\n .appender(Appender::builder().build(\"file\", Box::new(file)))\n\n .build(\n\n Root::builder()\n\n .appender(\"stdout\")\n\n .appender(\"file\")\n\n .build(log::LevelFilter::Debug),\n\n )?;\n\n log4rs::init_config(config)?;\n", "file_path": "mmaccel/src/lib.rs", "rank": 29, "score": 26961.116802902947 }, { "content": "pub fn image_import_desc(base_addr: usize, target: &[u8]) -> windows::core::Result<IMAGE_IMPORT_DESCRIPTOR> {\n\n unsafe {\n\n let mut size = 0;\n\n let mut img_desc_ptr =\n\n ImageDirectoryEntryToData(base_addr as _, BOOLEAN(1), IMAGE_DIRECTORY_ENTRY_IMPORT, &mut size)\n\n as *mut IMAGE_IMPORT_DESCRIPTOR;\n\n if img_desc_ptr.is_null() {\n\n return Err(get_last_error().into());\n\n }\n\n loop {\n\n let img_desc = &*img_desc_ptr;\n\n if img_desc.Name == 0 {\n\n return Err(get_last_error().into());\n\n }\n\n let p = (base_addr + img_desc.Name as usize) as *const u8;\n\n let name = std::slice::from_raw_parts(p, target.len());\n\n let name = name.iter().map(|c| c.to_ascii_lowercase()).collect::<Vec<_>>();\n\n if name.iter().eq(target) {\n\n break;\n\n }\n\n img_desc_ptr = img_desc_ptr.offset(1);\n\n }\n\n Ok(*img_desc_ptr)\n\n }\n\n}\n\n\n", "file_path": "mmaccel/src/injection.rs", "rank": 30, "score": 26846.741169471097 }, { "content": " UnfoldAll,\n\n}\n\n\n\nimpl ItemKind {\n\n fn new(a: &[serde_json::Value]) -> Option<Self> {\n\n let kind = match a[1].as_str()? {\n\n \"key\" if a.len() == 3 => Self::Key(u32::from_str_radix(a[2].as_str()?, 16).ok()?),\n\n \"button\" if a.len() == 3 => Self::Button(u32::from_str_radix(a[2].as_str()?, 16).ok()?),\n\n \"edit\" if a.len() == 3 => Self::Edit(u32::from_str_radix(a[2].as_str()?, 16).ok()?),\n\n \"combo_prev\" if a.len() == 3 => Self::Combo(ComboDir::Prev, u32::from_str_radix(a[2].as_str()?, 16).ok()?),\n\n \"combo_next\" if a.len() == 3 => Self::Combo(ComboDir::Next, u32::from_str_radix(a[2].as_str()?, 16).ok()?),\n\n \"menu\" if a.len() == 4 => Self::Menu(a[2].as_u64()? as _, a[3].as_u64()? as _),\n\n \"fold\" if a.len() == 4 => Self::Fold(\n\n u32::from_str_radix(a[2].as_str()?, 16).ok()?,\n\n u32::from_str_radix(a[3].as_str()?, 16).ok()?,\n\n ),\n\n \"kill_focus\" => Self::KillFocus,\n\n \"fold_all\" => Self::FoldAll,\n\n \"unfold_all\" => Self::UnfoldAll,\n\n _ => return None,\n", "file_path": "mmaccel/src/mmd_map.rs", "rank": 31, "score": 9.346227161326011 }, { "content": "use crate::mmd_map::*;\n\nuse crate::*;\n\nuse key_map::*;\n\nuse std::collections::HashMap;\n\n\n\npub struct Handler {\n\n input: Vec<u8>,\n\n input_keys: Keys,\n\n handler: HashMap<Keys, ItemKind>,\n\n key_states: HashMap<u32, bool>,\n\n folds: Vec<u32>,\n\n unfolds: Vec<u32>,\n\n tabstop: bool,\n\n}\n\n\n\nimpl Handler {\n\n pub fn new(mmd_map: &MmdMap, key_map: KeyMap) -> Self {\n\n let mut key_states = HashMap::new();\n\n mmd_map\n\n .iter()\n", "file_path": "mmaccel/src/handler.rs", "rank": 32, "score": 9.315890934389188 }, { "content": " key_states: &mut HashMap<u32, bool>,\n\n folds: &[u32],\n\n unfolds: &[u32],\n\n mmd_window: HWND,\n\n sub_window: Option<HWND>,\n\n hwnd: HWND,\n\n ) {\n\n if get_class_name(hwnd).to_ascii_uppercase() == \"EDIT\" {\n\n unsafe {\n\n if item == &ItemKind::KillFocus {\n\n SetFocus(mmd_window);\n\n log::debug!(\"KillFocus\")\n\n }\n\n }\n\n return;\n\n }\n\n match item {\n\n ItemKind::Key(k) => {\n\n if key_states.get(k).is_some() && *k != VK_SHIFT.0 as u32 && *k != VK_CONTROL.0 as u32 {\n\n *key_states.get_mut(&(VK_SHIFT.0 as u32)).unwrap() = false;\n", "file_path": "mmaccel/src/handler.rs", "rank": 33, "score": 8.410251044706817 }, { "content": "use serde_json::Value;\n\nuse std::fs::File;\n\nuse std::io::BufReader;\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub enum ComboDir {\n\n Prev,\n\n Next,\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub enum ItemKind {\n\n Key(u32),\n\n Button(u32),\n\n Edit(u32),\n\n Combo(ComboDir, u32),\n\n Menu(u32, u32),\n\n Fold(u32, u32),\n\n KillFocus,\n\n FoldAll,\n", "file_path": "mmaccel/src/mmd_map.rs", "rank": 35, "score": 7.400546658875284 }, { "content": "use crate::*;\n\n\n", "file_path": "mmaccel/src/injection.rs", "rank": 36, "score": 7.381156465076646 }, { "content": "use crate::*;\n\n\n", "file_path": "key_config/src/editor.rs", "rank": 37, "score": 7.381156465076646 }, { "content": " };\n\n Some(kind)\n\n }\n\n\n\n #[inline]\n\n pub fn as_key(&self) -> Option<u32> {\n\n match self {\n\n Self::Key(v) => Some(*v),\n\n _ => None,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn as_fold(&self) -> Option<(u32, u32)> {\n\n match self {\n\n Self::Fold(hide, show) => Some((*hide, *show)),\n\n _ => None,\n\n }\n\n }\n\n}\n", "file_path": "mmaccel/src/mmd_map.rs", "rank": 38, "score": 7.336698696078242 }, { "content": "use crate::*;\n\n\n\n#[inline]\n", "file_path": "key_config/src/application.rs", "rank": 39, "score": 7.266248570832271 }, { "content": " }\n\n }\n\n\n\n pub fn is_pressed(&self, vk: u32) -> bool {\n\n if vk == VK_TAB.0 as u32 && self.tabstop {\n\n return true;\n\n }\n\n *self.key_states.get(&vk).unwrap_or(&false)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[allow(clippy::eq_op)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn keys_eq() {\n\n let a = Keys::from_slice(&[VK_LEFT.0 as _, VK_CONTROL.0 as _]);\n\n let b = Keys::from_slice(&[VK_RIGHT.0 as _, VK_CONTROL.0 as _]);\n\n let c = Keys::from_slice(&[VK_LEFT.0 as _, VK_CONTROL.0 as _]);\n\n assert!(a == a);\n\n assert!(a != b);\n\n assert!(a == c);\n\n }\n\n}\n", "file_path": "mmaccel/src/handler.rs", "rank": 41, "score": 7.037473036607595 }, { "content": "use crate::*;\n\nuse std::cell::RefCell;\n\nuse std::os::windows::io::AsRawHandle;\n\nuse std::sync::{atomic, atomic::AtomicBool, Arc};\n\nuse std::thread::JoinHandle;\n\n\n\npub struct FileMonitor {\n\n th: RefCell<Option<JoinHandle<()>>>,\n\n exit_flag: Arc<AtomicBool>,\n\n}\n\n\n\nimpl FileMonitor {\n\n pub fn new() -> Self {\n\n Self {\n\n th: RefCell::new(None),\n\n exit_flag: Arc::new(AtomicBool::new(false)),\n\n }\n\n }\n\n\n\n pub fn start(&self, dir_path: impl AsRef<std::path::Path>, mut f: impl FnMut(&std::path::Path) + Send + 'static) {\n", "file_path": "mmaccel/src/file_monitor.rs", "rank": 42, "score": 6.898573879145229 }, { "content": "use crate::*;\n\nuse handler::Handler;\n\nuse key_map::KeyMap;\n\nuse mmd_map::MmdMap;\n\nuse std::sync::{atomic, atomic::AtomicBool, Arc};\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub enum MenuItem {\n\n LaunchConfig,\n\n RaiseTimerResolution(bool),\n\n KillFocusWithClick(bool),\n\n Version,\n\n}\n\n\n\nimpl MenuCommand for MenuItem {\n\n fn from_command(v: std::mem::Discriminant<Self>, item_type: MenuItemType) -> Self {\n\n match v {\n\n _ if v == std::mem::discriminant(&Self::LaunchConfig) => Self::LaunchConfig,\n\n _ if v == std::mem::discriminant(&Self::RaiseTimerResolution(false)) => {\n\n Self::RaiseTimerResolution(item_type.as_with_check().unwrap())\n", "file_path": "mmaccel/src/context.rs", "rank": 43, "score": 6.625480984440008 }, { "content": "use crate::*;\n\n\n\npub const IDM_MENU_DETACH: u32 = 10;\n\n\n\npub struct PopupMenu {\n\n menu: HMENU,\n\n category: usize,\n\n item: usize,\n\n}\n\n\n\nimpl PopupMenu {\n\n pub fn new() -> Self {\n\n unsafe {\n\n let menu = CreatePopupMenu();\n\n let text = to_wchar(\"解除\");\n\n AppendMenuW(menu, MF_STRING, IDM_MENU_DETACH as _, PWSTR(text.as_ptr() as _));\n\n Self {\n\n menu,\n\n category: 0,\n\n item: 0,\n", "file_path": "key_config/src/popup_menu.rs", "rank": 44, "score": 6.537977827269204 }, { "content": " VK_INSERT => \"Insert\".into(),\n\n VK_DELETE => \"Delete\".into(),\n\n VK_HOME => \"Home\".into(),\n\n VK_END => \"End\".into(),\n\n VK_PRIOR => \"PageUp\".into(),\n\n VK_NEXT => \"PageDown\".into(),\n\n VK_UP => \"Up\".into(),\n\n VK_DOWN => \"Down\".into(),\n\n VK_LEFT => \"Left\".into(),\n\n VK_RIGHT => \"Right\".into(),\n\n // VK_NUMLOCK => \"NumLock\".into(),\n\n VK_ADD => \"Num+\".into(),\n\n VK_SUBTRACT => \"Num-\".into(),\n\n VK_MULTIPLY => \"Num*\".into(),\n\n VK_DIVIDE => \"Num/\".into(),\n\n VK_DECIMAL => \"Num.\".into(),\n\n // VK_LSHIFT => \"LShift\".into(),\n\n // VK_RSHIFT => \"RShift\".into(),\n\n // VK_LCONTROL => \"LCtrl\".into(),\n\n // VK_RCONTROL => \"RCtrl\".into(),\n", "file_path": "key_map/src/lib.rs", "rank": 45, "score": 5.62674621619811 }, { "content": " }\n\n }\n\n\n\n #[inline]\n\n pub fn end(&mut self) -> Option<EditResult> {\n\n unsafe {\n\n SetFocus(GetParent(self.hwnd));\n\n ShowWindow(self.hwnd, SW_HIDE);\n\n self.result.take().and_then(|ret| {\n\n (ret.keys != Keys::from_slice(&[VK_SHIFT.0 as u32])\n\n && ret.keys != Keys::from_slice(&[VK_CONTROL.0 as u32]))\n\n .then(|| ret)\n\n })\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn is_visible(&self) -> bool {\n\n unsafe { IsWindowVisible(self.hwnd).as_bool() }\n\n }\n", "file_path": "key_config/src/editor.rs", "rank": 46, "score": 5.248571811143558 }, { "content": " let mut v = vec![];\n\n for id in item_order.iter() {\n\n let id = id.as_str().ok_or(Error::InvalidData)?;\n\n let name = item\n\n .get(id)\n\n .and_then(|a| a.as_array())\n\n .and_then(|a| a[0].as_str())\n\n .ok_or(Error::InvalidData)?;\n\n let keys = key_map\n\n .get(id)\n\n .and_then(|v| v.as_array())\n\n .and_then(|a| {\n\n a.iter()\n\n .map(|v| v.as_u64().map(|v| v as u32))\n\n .collect::<Option<Vec<_>>>()\n\n })\n\n .map(|a| Keys::from_slice(&a))\n\n .unwrap_or_default();\n\n v.push(Item {\n\n id: id.to_string(),\n", "file_path": "key_config/src/application.rs", "rank": 47, "score": 5.106168128733878 }, { "content": " hwnd,\n\n );\n\n }\n\n }\n\n\n\n pub fn key_up(&mut self, vk: u32) {\n\n get_keyboard_state(&mut self.input);\n\n self.input_keys.keyboard_state(&self.input);\n\n log::debug!(\"key_up input_keys = {:?}\", self.input_keys);\n\n for (keys, kind) in self.handler.iter() {\n\n if let ItemKind::Key(k) = kind {\n\n if !keys.is_included(&self.input_keys) {\n\n if let Some(ks) = self.key_states.get_mut(k) {\n\n *ks = false;\n\n }\n\n }\n\n }\n\n }\n\n if vk == VK_TAB.0 as u32 && self.tabstop {\n\n self.tabstop = false;\n", "file_path": "mmaccel/src/handler.rs", "rank": 48, "score": 5.0566918042602245 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn load_mmd_map() {\n\n let m = MmdMap::from_file(\"src/mmd_map.json\").unwrap();\n\n let item = &m.iter().find(|(key, _)| key == \"Undo\").unwrap().1;\n\n assert!(item.name == \"元に戻す\");\n\n assert!(matches!(item.kind, ItemKind::Button(0x190)));\n\n let item = &m.iter().find(|(key, _)| key == \"MenuHelpAbout\").unwrap().1;\n\n assert!(item.name == \"バージョン情報\");\n\n assert!(matches!(item.kind, ItemKind::Menu(7, 6)));\n\n }\n\n}\n", "file_path": "mmaccel/src/mmd_map.rs", "rank": 49, "score": 5.022823194821602 }, { "content": " }\n\n }\n\n handler.insert(Keys::from_slice(&[VK_CONTROL.0 as _]), ItemKind::Key(VK_CONTROL.0 as _));\n\n handler.insert(Keys::from_slice(&[VK_SHIFT.0 as _]), ItemKind::Key(VK_SHIFT.0 as _));\n\n key_states.insert(VK_CONTROL.0 as _, false);\n\n key_states.insert(VK_SHIFT.0 as _, false);\n\n Self {\n\n input: vec![0; 256],\n\n input_keys: Keys::with_capacity(3),\n\n handler,\n\n key_states,\n\n folds,\n\n unfolds,\n\n tabstop: false,\n\n }\n\n }\n\n\n\n pub fn key_down(&mut self, vk: u32, mmd_window: HWND, sub_window: Option<HWND>, hwnd: HWND) {\n\n fn handle(\n\n item: &ItemKind,\n", "file_path": "mmaccel/src/handler.rs", "rank": 50, "score": 5.009231043676208 }, { "content": "#![allow(clippy::mem_discriminant_non_enum)]\n\n\n\nuse crate::*;\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub enum MenuItemType {\n\n Item,\n\n WithCheck(bool),\n\n}\n\n\n\nimpl MenuItemType {\n\n #[inline]\n\n pub fn as_with_check(&self) -> Option<bool> {\n\n if let Self::WithCheck(b) = self {\n\n Some(*b)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "mmaccel/src/menu.rs", "rank": 51, "score": 4.9010181389204694 }, { "content": " fn key_map_test() {\n\n let mut key_map = KeyMap(HashMap::new());\n\n key_map.insert(\"Undo\", Keys(vec![VK_CONTROL.0 as _, b'Z' as _]));\n\n key_map.insert(\"Redo\", Keys(vec![VK_CONTROL.0 as _, VK_SHIFT.0 as _, b'Z' as _]));\n\n let ret: KeyMap = serde_json::from_str(&serde_json::to_string(&key_map).unwrap()).unwrap();\n\n assert!(ret.get(\"Undo\").unwrap() == &Keys(vec![VK_CONTROL.0 as _, b'Z' as _]));\n\n assert!(ret.get(\"Redo\").unwrap() == &Keys(vec![VK_CONTROL.0 as _, VK_SHIFT.0 as _, b'Z' as _]));\n\n assert!(ret.get(\"Undo\").unwrap() != &Keys(vec![VK_SHIFT.0 as _, b'Z' as _]));\n\n }\n\n}\n", "file_path": "key_map/src/lib.rs", "rank": 52, "score": 4.778317646623736 }, { "content": " E_FAIL.0 as _\n\n }\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"system\" fn DllMain(_: HINSTANCE, reason: u32, _: *mut std::ffi::c_void) -> BOOL {\n\n match reason {\n\n DLL_PROCESS_ATTACH => {\n\n let path = get_module_path().parent().unwrap().to_path_buf();\n\n if path.join(\"MMPlus.dll\").exists() {\n\n let msimg = Library::new(path.join(\"MSIMG32.dll\"));\n\n if let Ok(msimg) = msimg {\n\n MSIMG32.set(msimg).ok();\n\n }\n\n }\n\n let d3d9 = Library::new(get_system_directory().join(\"d3d9.dll\"));\n\n match d3d9 {\n\n Ok(d3d9) => {\n\n D3D9.set(d3d9).unwrap();\n\n }\n", "file_path": "d3d9/src/lib.rs", "rank": 54, "score": 4.410501769081959 }, { "content": "use serde::ser::SerializeMap;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::BufReader;\n\nuse windows::Win32::UI::Input::KeyboardAndMouse::*;\n\n\n", "file_path": "key_map/src/lib.rs", "rank": 55, "score": 4.184895872339968 }, { "content": " || k == VK_LCONTROL.0 as u32\n\n || k == VK_RCONTROL.0 as u32\n\n || k == VK_LMENU.0 as u32\n\n || k == VK_RMENU.0 as u32\n\n }\n\n\n\n self.0.clear();\n\n for (i, k) in v.iter().enumerate() {\n\n if (0x07..0xe0).contains(&i) && (k & 0x80) != 0 && !is_lr_key(i as _) {\n\n self.0.push(i as u32);\n\n }\n\n }\n\n self.0.sort_unstable();\n\n }\n\n\n\n #[inline]\n\n pub fn vk(&mut self, vk: u32) {\n\n self.0.clear();\n\n self.0.push(vk);\n\n }\n", "file_path": "key_map/src/lib.rs", "rank": 56, "score": 4.132607515408515 }, { "content": "#![cfg_attr(not(debug_assertions), windows_subsystem = \"windows\")]\n\n\n\nmod application;\n\nmod editor;\n\nmod error;\n\nmod old_key_map;\n\nmod popup_menu;\n\nmod shortcut_list;\n\nmod side_menu;\n\n\n\nuse application::*;\n\nuse editor::*;\n\nuse error::*;\n\nuse key_map::*;\n\nuse log4rs::append::{console, console::ConsoleAppender, file::FileAppender};\n\nuse log4rs::config::{Appender, Config, Root};\n\nuse log4rs::encode::pattern::PatternEncoder;\n\nuse old_key_map::OldKeyMap;\n\nuse popup_menu::*;\n\nuse shortcut_list::*;\n\nuse side_menu::*;\n\nuse windows::Win32::{\n\n Foundation::*, Graphics::Gdi::*, Storage::FileSystem::*, UI::Controls::RichEdit::WM_NOTIFY, UI::Controls::*,\n\n UI::HiDpi::*, UI::Input::KeyboardAndMouse::*, UI::Shell::*, UI::WindowsAndMessaging::*,\n\n};\n\nuse wrapper::*;\n\n\n", "file_path": "key_config/src/main.rs", "rank": 57, "score": 4.1058940021275445 }, { "content": " .flat_map(|cat| &cat.items)\n\n .filter(|i| i.id != item.id && !i.keys.is_empty() && i.keys == item.keys)\n\n .map(|i| i.name.as_str())\n\n .collect::<Vec<_>>();\n\n if dup.is_empty() {\n\n self.shortcut_list.set_dup(index, None);\n\n } else {\n\n self.shortcut_list.set_dup(index, Some(&dup.join(\", \")));\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl wita::EventHandler for Box<Application> {\n\n fn resizing(&mut self, window: &wita::Window, size: wita::PhysicalSize<u32>) {\n\n let dpi = window.dpi();\n\n let mut window_size = size.to_logical(dpi);\n\n const WIDTH: u32 = (SHORTCUT_MENU_NAME_COLUMN_WIDTH + SHORTCUT_MENU_KEYS_COLUMN_WIDTH) as _;\n\n if window_size.width < WIDTH {\n\n window_size.width = WIDTH;\n", "file_path": "key_config/src/application.rs", "rank": 58, "score": 4.09441437310857 }, { "content": " .stdout(std::process::Stdio::piped())\n\n .spawn();\n\n match key_config_process {\n\n Ok(process) => {\n\n use std::os::windows::io::AsRawHandle;\n\n let mut p = 0u64;\n\n let mut byte = 0;\n\n unsafe {\n\n let handle = HANDLE(process.stdout.as_ref().unwrap().as_raw_handle() as _);\n\n let ret = ReadFile(\n\n handle,\n\n &mut p as *mut _ as _,\n\n std::mem::size_of::<u64>() as _,\n\n &mut byte,\n\n std::ptr::null_mut(),\n\n );\n\n if ret.as_bool() {\n\n self.key_config = Some(HWND(p as _));\n\n }\n\n }\n", "file_path": "mmaccel/src/context.rs", "rank": 59, "score": 4.0632202800598325 }, { "content": " DeleteObject(font);\n\n }\n\n }\n\n }\n\n}\n\n\n\nunsafe extern \"system\" fn proc(\n\n hwnd: HWND,\n\n msg: u32,\n\n wparam: WPARAM,\n\n lparam: LPARAM,\n\n _id: usize,\n\n data_ptr: usize,\n\n) -> LRESULT {\n\n let editor = (data_ptr as *mut Editor).as_mut().unwrap();\n\n match msg {\n\n WM_KEYDOWN | WM_SYSKEYDOWN => {\n\n let result = editor.result.as_mut().unwrap();\n\n get_keyboard_state(&mut editor.input_keys);\n\n result.keys.keyboard_state(&editor.input_keys);\n", "file_path": "key_config/src/editor.rs", "rank": 60, "score": 4.037827462779789 }, { "content": "use log4rs::append::{console, console::ConsoleAppender, file::FileAppender};\n\nuse log4rs::config::{Appender, Config, Root};\n\nuse log4rs::encode::pattern::PatternEncoder;\n\nuse menu::*;\n\nuse once_cell::sync::OnceCell;\n\nuse windows::Win32::{\n\n Foundation::*, Media::*, Storage::FileSystem::*, System::Diagnostics::Debug::*, System::Memory::*,\n\n System::SystemServices::*, System::WindowsProgramming::*, System::IO::*, UI::Input::KeyboardAndMouse::*,\n\n UI::WindowsAndMessaging::*,\n\n};\n\nuse wrapper::*;\n\n\n\nstatic mut CONTEXT: OnceCell<Context> = OnceCell::new();\n\n\n", "file_path": "mmaccel/src/lib.rs", "rank": 61, "score": 3.950016451109973 }, { "content": "use crate::*;\n\n\n\npub struct SideMenu {\n\n hwnd: HWND,\n\n}\n\n\n\nimpl SideMenu {\n\n pub fn new(\n\n parent: &wita::Window,\n\n pt: impl Into<wita::LogicalPosition<i32>>,\n\n size: impl Into<wita::LogicalSize<i32>>,\n\n ) -> Result<Self, Error> {\n\n let dpi = parent.dpi();\n\n let pt = pt.into().to_physical(dpi as _);\n\n let size = size.into().to_physical(dpi as _);\n\n let class_name = to_wchar(\"SysListView32\");\n\n unsafe {\n\n let hwnd = CreateWindowExW(\n\n WINDOW_EX_STYLE(0),\n\n PWSTR(class_name.as_ptr() as _),\n", "file_path": "key_config/src/side_menu.rs", "rank": 62, "score": 3.9145290686224987 }, { "content": "use crate::*;\n\n\n\npub struct ShortcutList {\n\n hwnd: HWND,\n\n}\n\n\n\nimpl ShortcutList {\n\n pub fn new(\n\n parent: &wita::Window,\n\n pt: impl Into<wita::LogicalPosition<i32>>,\n\n size: impl Into<wita::LogicalSize<i32>>,\n\n columns_size: [i32; 2],\n\n ) -> Result<Self, Error> {\n\n let dpi = parent.dpi() as i32;\n\n let pt = pt.into().to_physical(dpi);\n\n let size = size.into().to_physical(dpi);\n\n let class_name = to_wchar(\"SysListView32\");\n\n unsafe {\n\n let hwnd = CreateWindowExW(\n\n WINDOW_EX_STYLE(0),\n", "file_path": "key_config/src/shortcut_list.rs", "rank": 63, "score": 3.881971863955632 }, { "content": " let hwnd = GetDlgItem(mmd_window, *id as _);\n\n if IsWindowVisible(hwnd).as_bool() {\n\n PostMessageW(hwnd, BM_CLICK, WPARAM(0), LPARAM(0));\n\n }\n\n }\n\n log::debug!(\"UnfoldAll\");\n\n },\n\n }\n\n }\n\n\n\n get_keyboard_state(&mut self.input);\n\n self.input_keys.keyboard_state(&self.input);\n\n log::debug!(\"key_down input_keys = {:?}\", self.input_keys);\n\n if get_class_name(hwnd).to_ascii_uppercase() == \"EDIT\" {\n\n if self.input_keys == Keys::from_slice(&[VK_TAB.0 as u32]) {\n\n self.tabstop = true;\n\n return;\n\n }\n\n }\n\n if let Some(item) = self.handler.get(&self.input_keys) {\n", "file_path": "mmaccel/src/handler.rs", "rank": 64, "score": 3.8439086243410197 }, { "content": "#[no_mangle]\n\npub unsafe extern \"system\" fn Direct3DCreate9(version: u32) -> *mut std::ffi::c_void {\n\n if let Some(d3d9) = D3D9.get() {\n\n let f = d3d9\n\n .get::<unsafe fn(u32) -> *mut std::ffi::c_void>(b\"Direct3DCreate9\")\n\n .unwrap();\n\n f(version)\n\n } else {\n\n std::ptr::null_mut()\n\n }\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"system\" fn Direct3DCreate9Ex(version: u32, pp: *mut std::ffi::c_void) -> u32 {\n\n if let Some(d3d9) = D3D9.get() {\n\n let f = d3d9\n\n .get::<unsafe fn(u32, *mut std::ffi::c_void) -> u32>(b\"Direct3DCreate9Ex\")\n\n .unwrap();\n\n f(version, pp)\n\n } else {\n", "file_path": "d3d9/src/lib.rs", "rank": 65, "score": 3.7067460267272425 }, { "content": "#![allow(clippy::fn_to_numeric_cast)]\n\n#![allow(clippy::missing_safety_doc)]\n\n\n\nmod context;\n\nmod file_monitor;\n\nmod handler;\n\nmod injection;\n\nmod menu;\n\nmod mmd;\n\nmod mmd_map;\n\n\n\n/*\n\nuse bindings::Windows::Win32::{\n\n Debug::*, FileSystem::*, KeyboardAndMouseInput::*, Multimedia::*, SystemServices::*, WindowsAndMessaging::*,\n\n WindowsProgramming::*,\n\n};\n\n*/\n\nuse context::*;\n\nuse file_monitor::*;\n\nuse injection::*;\n", "file_path": "mmaccel/src/lib.rs", "rank": 66, "score": 3.668725326068879 }, { "content": " pub fn with_capacity(n: usize) -> Self {\n\n Self(Vec::with_capacity(n))\n\n }\n\n\n\n #[inline]\n\n pub fn is_empty(&self) -> bool {\n\n self.0.is_empty()\n\n }\n\n\n\n #[inline]\n\n pub fn clear(&mut self) {\n\n self.0.clear();\n\n }\n\n\n\n #[inline]\n\n pub fn keyboard_state(&mut self, v: &[u8]) {\n\n #[inline]\n\n fn is_lr_key(k: u32) -> bool {\n\n k == VK_LSHIFT.0 as u32\n\n || k == VK_RSHIFT.0 as u32\n", "file_path": "key_map/src/lib.rs", "rank": 67, "score": 3.6076296117405238 }, { "content": "#![allow(clippy::missing_safety_doc)]\n\n\n\nuse libloading::Library;\n\nuse once_cell::sync::OnceCell;\n\nuse windows::Win32::{Foundation::*, System::LibraryLoader::*, System::SystemServices::*, UI::WindowsAndMessaging::*};\n\nuse wrapper::*;\n\n\n\nstatic mut MSIMG32: OnceCell<Library> = OnceCell::new();\n\nstatic mut D3D9: OnceCell<Library> = OnceCell::new();\n\nstatic mut MME: OnceCell<Library> = OnceCell::new();\n\nstatic mut MMACCEL: OnceCell<Library> = OnceCell::new();\n\n\n", "file_path": "d3d9/src/lib.rs", "rank": 68, "score": 3.5693972107486087 }, { "content": "use std::io::Write;\n\n\n", "file_path": "mmaccel/build.rs", "rank": 69, "score": 3.5366533232645754 }, { "content": " Ok(serde_json::from_reader(BufReader::new(file))?)\n\n }\n\n\n\n #[inline]\n\n pub fn insert(&mut self, k: impl AsRef<str>, v: Keys) {\n\n self.0.insert(k.as_ref().into(), v);\n\n }\n\n\n\n #[inline]\n\n pub fn get(&self, k: impl AsRef<str>) -> Option<&Keys> {\n\n self.0.get(k.as_ref())\n\n }\n\n\n\n #[inline]\n\n pub fn get_mut(&mut self, k: impl AsRef<str>) -> Option<&mut Keys> {\n\n self.0.get_mut(k.as_ref())\n\n }\n\n\n\n #[inline]\n\n pub fn iter(&self) -> std::collections::hash_map::Iter<String, Keys> {\n", "file_path": "key_map/src/lib.rs", "rank": 70, "score": 3.3712564210558753 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]\n\npub struct Keys(Vec<u32>);\n\n\n\nimpl Keys {\n\n #[inline]\n\n pub fn new() -> Self {\n\n Self(vec![])\n\n }\n\n\n\n #[inline]\n\n pub fn from_slice(v: &[u32]) -> Self {\n\n let mut v = v.to_vec();\n\n v.sort_unstable();\n\n Self(v)\n\n }\n\n\n\n #[inline]\n", "file_path": "key_map/src/lib.rs", "rank": 71, "score": 3.20099706281829 }, { "content": " self.settings.window_position = self.main_window.position();\n\n self.settings.window_size = self.main_window.inner_size().to_logical(self.main_window.dpi());\n\n if let Err(e) = self.settings.to_file() {\n\n log::error!(\"{}\", e);\n\n }\n\n }\n\n}\n\n\n\npub const WM_KEY_CONFIG_EDIT_APPLY: u32 = WM_APP + 10;\n\npub const WM_KEY_CONFIG_EDIT_CANCEL: u32 = WM_APP + 11;\n\n\n\nunsafe extern \"system\" fn main_window_proc(\n\n hwnd: HWND,\n\n msg: u32,\n\n wparam: WPARAM,\n\n lparam: LPARAM,\n\n _id: usize,\n\n data_ptr: usize,\n\n) -> LRESULT {\n\n let app = (data_ptr as *mut Application).as_mut().unwrap();\n", "file_path": "key_config/src/application.rs", "rank": 72, "score": 3.0724301413690824 }, { "content": " unsafe {\n\n SendMessageW(\n\n self.hwnd,\n\n LVM_GETNEXTITEM,\n\n WPARAM(std::usize::MAX),\n\n LPARAM((LVNI_ALL | LVNI_SELECTED) as _),\n\n )\n\n .0 as _\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn set_index(&mut self, index: u32) {\n\n unsafe {\n\n const STATES: u32 = LVIS_SELECTED | LVIS_FOCUSED;\n\n let item = LVITEMW {\n\n iItem: index as _,\n\n mask: LVIF_STATE,\n\n stateMask: STATES,\n\n state: STATES,\n", "file_path": "key_config/src/side_menu.rs", "rank": 73, "score": 3.0509566050937327 }, { "content": "use windows::Win32::{\n\n Foundation::*, System::LibraryLoader::*, System::Threading::*, UI::Input::KeyboardAndMouse::*, UI::Shell::*,\n\n UI::WindowsAndMessaging::*,\n\n};\n\n\n\n#[inline]\n", "file_path": "wrapper/src/lib.rs", "rank": 74, "score": 2.8754764757713924 }, { "content": " NM_SETFOCUS => {\n\n if app.editor.is_visible() {\n\n if let Some(ret) = app.editor.end() {\n\n app.update_keys_to_file(ret.category, ret.item, Some(ret.keys));\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n\n LRESULT(0)\n\n } else if nmhdr.hwndFrom == app.shortcut_list.handle() {\n\n match nmhdr.code {\n\n NM_CUSTOMDRAW => {\n\n let subitem_stage = CDDS_ITEMPREPAINT.0 | CDDS_SUBITEM.0;\n\n let mut ncd = (lparam.0 as *mut NMLVCUSTOMDRAW).as_mut().unwrap();\n\n match ncd.nmcd.dwDrawStage {\n\n CDDS_PREPAINT => {\n\n return LRESULT(CDRF_NOTIFYITEMDRAW as _);\n\n }\n\n CDDS_ITEMPREPAINT => {\n", "file_path": "key_config/src/application.rs", "rank": 75, "score": 2.8410514839734082 }, { "content": " fn default() -> Self {\n\n Self {\n\n raise_timer_resolution: true,\n\n kill_focus_with_click: true,\n\n }\n\n }\n\n}\n\n\n\nconst MMD_MAP_PATH: &str = \"MMAccel/mmd_map.json\";\n\nconst KEY_MAP_PATH: &str = \"MMAccel/key_map.json\";\n\n\n\npub struct Context {\n\n module_path: std::path::PathBuf,\n\n settings: Settings,\n\n mmd_map: MmdMap,\n\n _call_window_proc_ret: HookHandle,\n\n _get_message_handle: HookHandle,\n\n mmd_window: Option<MmdWindow>,\n\n handler: Handler,\n\n file_monitor: FileMonitor,\n", "file_path": "mmaccel/src/context.rs", "rank": 76, "score": 2.7053025608661567 }, { "content": " m\n\n });\n\n self.handler = Handler::new(&self.mmd_map, key_map);\n\n }\n\n }\n\n _ => {}\n\n }\n\n false\n\n }\n\n\n\n pub fn get_key_state(&self, vk: u32) -> Option<u16> {\n\n if vk >= 0x07 {\n\n if self.handler.is_pressed(vk) {\n\n Some(0xff80)\n\n } else {\n\n Some(0x0000)\n\n }\n\n } else {\n\n None\n\n }\n", "file_path": "mmaccel/src/context.rs", "rank": 77, "score": 2.672765653006795 }, { "content": "\n\n pub fn call_window_proc_ret(&mut self, data: &CWPRETSTRUCT) {\n\n match data.message {\n\n WM_CREATE if get_class_name(data.hwnd) == \"Polygon Movie Maker\" => {\n\n log::debug!(\"created MainWindow\");\n\n self.mmd_window = Some(MmdWindow::new(data.hwnd, &self.settings));\n\n let latest_key_map = self.latest_key_map.clone();\n\n let mmd_window = self.mmd_window.as_ref().unwrap().window;\n\n self.file_monitor.start(\"MMAccel\", move |path| unsafe {\n\n if path.file_name() == Some(std::ffi::OsStr::new(\"key_map.json\")) {\n\n latest_key_map.store(false, atomic::Ordering::SeqCst);\n\n PostMessageW(mmd_window, WM_APP, WPARAM(0), LPARAM(0));\n\n log::debug!(\"update key_map.json\");\n\n }\n\n });\n\n }\n\n WM_CREATE if get_class_name(data.hwnd) == \"MicWindow\" => {\n\n log::debug!(\"created SubWindow\");\n\n if let Some(mmd_window) = self.mmd_window.as_mut() {\n\n mmd_window.sub_window = Some(data.hwnd);\n", "file_path": "mmaccel/src/context.rs", "rank": 78, "score": 2.632453351549878 }, { "content": " .filter(|(_, item)| matches!(item.kind, mmd_map::ItemKind::Key(_)))\n\n .for_each(|(_, item)| {\n\n key_states.insert(item.kind.as_key().unwrap(), false);\n\n });\n\n let mut folds = vec![];\n\n let mut unfolds = vec![];\n\n mmd_map\n\n .iter()\n\n .filter(|(_, item)| matches!(item.kind, mmd_map::ItemKind::Fold(_, _)))\n\n .for_each(|(_, item)| {\n\n let (hide, show) = item.kind.as_fold().unwrap();\n\n folds.push(hide);\n\n unfolds.push(show);\n\n });\n\n let mut handler = HashMap::new();\n\n for (k, v) in key_map.into_iter() {\n\n if let Some(item) = mmd_map.get(&k) {\n\n handler.insert(v, item.kind);\n\n } else {\n\n log::error!(\"handler.insert error: {}\", k);\n", "file_path": "mmaccel/src/handler.rs", "rank": 79, "score": 2.588989108629257 }, { "content": " match msg {\n\n WM_NOTIFY => {\n\n let nmhdr = (lparam.0 as *const NMHDR).as_ref().unwrap();\n\n if nmhdr.hwndFrom == app.side_menu.handle() {\n\n match nmhdr.code {\n\n LVN_ITEMCHANGED => {\n\n let nlv = (lparam.0 as *const NMLISTVIEW).as_ref().unwrap();\n\n if app.editor.is_visible() {\n\n if let Some(ret) = app.editor.end() {\n\n app.update_keys_to_file(ret.category, ret.item, Some(ret.keys));\n\n }\n\n }\n\n if nlv.uNewState & LVIS_SELECTED != 0 {\n\n app.shortcut_list.clear();\n\n for item in app.key_table[app.side_menu.current_index()].items.iter() {\n\n app.shortcut_list.push(&item.name, &item.keys);\n\n }\n\n app.update_shortcut_list();\n\n }\n\n }\n", "file_path": "key_config/src/application.rs", "rank": 80, "score": 2.574712087247387 }, { "content": " .map_or(false, |mw| mw.sub_window == Some(data.hwnd)) =>\n\n {\n\n let mmd_window = self.mmd_window.as_mut().unwrap();\n\n mmd_window.sub_window = None;\n\n log::debug!(\"destroyed SubWindow\");\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n pub fn get_message(&mut self, data: &mut MSG) -> bool {\n\n match data.message {\n\n WM_COMMAND => {\n\n if let Some(mmd_window) = self.mmd_window.as_ref() {\n\n match mmd_window.menu.recv_command(data.wParam) {\n\n Some(MenuItem::LaunchConfig) => {\n\n let path = self.module_path.join(\"MMAccel/key_config.exe\");\n\n let key_config_process = std::process::Command::new(&path)\n\n .current_dir(self.module_path.join(\"MMAccel\"))\n\n .arg(\"--mmd\")\n", "file_path": "mmaccel/src/context.rs", "rank": 81, "score": 2.5056255292227934 }, { "content": "\n\n#[derive(Clone, Debug)]\n\npub struct Item {\n\n pub name: String,\n\n pub kind: ItemKind,\n\n}\n\n\n\nimpl Item {\n\n fn new(a: &[serde_json::Value]) -> Option<Self> {\n\n if a.len() < 2 {\n\n return None;\n\n }\n\n Some(Self {\n\n name: a[0].as_str()?.to_string(),\n\n kind: ItemKind::new(a)?,\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "mmaccel/src/mmd_map.rs", "rank": 82, "score": 2.5002876202995457 }, { "content": " if ret == LRESULT(0) {\n\n None\n\n } else {\n\n Some(rc)\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn clear(&mut self) {\n\n unsafe {\n\n SendMessageW(self.hwnd, LVM_DELETEALLITEMS, WPARAM(0), LPARAM(0));\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn push(&mut self, name: impl AsRef<str>, keys: &Keys) {\n\n unsafe {\n\n let name = to_wchar(name.as_ref());\n\n let item = LVITEMW {\n", "file_path": "key_config/src/shortcut_list.rs", "rank": 83, "score": 2.447780103499345 }, { "content": "\n\nimpl HookHandle {\n\n pub fn new(id: WINDOWS_HOOK_ID, f: HOOKPROC, thread_id: u32) -> Self {\n\n unsafe { Self(SetWindowsHookExA(id, f, HINSTANCE(0), thread_id)) }\n\n }\n\n}\n\n\n\nimpl Drop for HookHandle {\n\n fn drop(&mut self) {\n\n unsafe {\n\n if !self.0.is_invalid() {\n\n UnhookWindowsHookEx(self.0);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "wrapper/src/lib.rs", "rank": 84, "score": 2.342914772169499 }, { "content": " || GetParent(data.hwnd) == main_window\n\n || Some(data.hwnd) == sub_window\n\n || sub_window.map_or(false, |sw| GetParent(data.hwnd) == sw);\n\n if cond {\n\n self.handler\n\n .key_down(data.wParam.0 as u32, main_window, sub_window, data.hwnd);\n\n return true;\n\n }\n\n },\n\n WM_KEYUP | WM_SYSKEYUP => unsafe {\n\n let mmd_window = self.mmd_window.as_ref().unwrap();\n\n let main_window = mmd_window.window;\n\n let sub_window = mmd_window.sub_window;\n\n let cond = data.hwnd == main_window\n\n || GetParent(data.hwnd) == main_window\n\n || Some(data.hwnd) == sub_window\n\n || sub_window.map_or(false, |sw| GetParent(data.hwnd) == sw);\n\n if cond {\n\n self.handler.key_up(data.wParam.0 as u32);\n\n return true;\n", "file_path": "mmaccel/src/context.rs", "rank": 85, "score": 2.332494932147372 }, { "content": "\n\n #[inline]\n\n fn set_keys(&mut self, category: usize, item: usize, keys: Option<Keys>) {\n\n (self.0)[category].items[item].keys = keys.unwrap_or_default();\n\n }\n\n}\n\n\n\nimpl std::ops::Index<usize> for KeyTable {\n\n type Output = Category;\n\n\n\n fn index(&self, index: usize) -> &Self::Output {\n\n &self.0[index]\n\n }\n\n}\n\n\n\nconst SETTINGS_FILE_NAME: &str = \"key_config_settrings.json\";\n\n\n", "file_path": "key_config/src/application.rs", "rank": 87, "score": 2.280182011926427 }, { "content": " pub fn build(self) -> Menu<T> {\n\n unsafe {\n\n DrawMenuBar(self.hwnd);\n\n Menu {\n\n menu: self.menu,\n\n table: self.table,\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct Menu<T> {\n\n menu: HMENU,\n\n table: Vec<(std::mem::Discriminant<T>, std::mem::Discriminant<MenuItemType>)>,\n\n}\n\n\n\nimpl<T: MenuCommand> Menu<T> {\n\n #[inline]\n\n fn is_checked_item(&self, id: u32) -> bool {\n\n unsafe {\n", "file_path": "mmaccel/src/menu.rs", "rank": 88, "score": 2.1648063104837765 }, { "content": " let item = LVITEMW {\n\n iItem: index as _,\n\n iSubItem: 1,\n\n mask: LVIF_TEXT,\n\n pszText: PWSTR(text.as_ptr() as _),\n\n cchTextMax: text.len() as _,\n\n ..Default::default()\n\n };\n\n SendMessageW(self.hwnd, LVM_SETITEMW, WPARAM(0), LPARAM(&item as *const _ as _));\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn set_dup(&mut self, index: usize, text: Option<&str>) {\n\n unsafe {\n\n let text = if let Some(text) = text {\n\n to_wchar(text)\n\n } else {\n\n to_wchar(\"\")\n\n };\n", "file_path": "key_config/src/shortcut_list.rs", "rank": 89, "score": 2.153780760904386 }, { "content": "pub struct MmdMap(Vec<(String, Item)>);\n\n\n\nimpl MmdMap {\n\n pub fn from_file(path: impl AsRef<std::path::Path>) -> std::io::Result<Self> {\n\n fn items(m: &mut Vec<(String, Item)>, v: &Value) -> Option<()> {\n\n match v {\n\n Value::Object(obj) => {\n\n for (key, value) in obj.iter() {\n\n if value.is_object() {\n\n items(m, value)?;\n\n } else if let Some(a) = value.as_array() {\n\n m.push((key.clone(), Item::new(a)?));\n\n }\n\n }\n\n Some(())\n\n }\n\n _ => None,\n\n }\n\n }\n\n\n", "file_path": "mmaccel/src/mmd_map.rs", "rank": 90, "score": 2.1371146435477115 }, { "content": " m.insert(\"MenuFileSave\", Keys::from_slice(&[VK_CONTROL.0 as _, b'S' as _]));\n\n m.insert(\"InterpolationAuto\", Keys::from_slice(&[VK_OEM_6.0 as _]));\n\n m\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn vk_to_string_test() {\n\n assert!(vk_to_string(VK_LEFT.0 as _) == \"Left\");\n\n assert!(vk_to_string(b'A' as _) == \"A\");\n\n assert!(vk_to_string(VK_NUMPAD0.0 as _) == \"Num0\");\n\n assert!(vk_to_string(VK_F5.0 as _) == \"F5\");\n\n assert!(vk_to_string(0xdf) == \"(223)\");\n\n }\n\n\n\n #[test]\n", "file_path": "key_map/src/lib.rs", "rank": 91, "score": 2.0374813811819705 }, { "content": " ..Default::default()\n\n };\n\n SendMessageW(self.hwnd, LVM_SETITEMW, WPARAM(0), LPARAM(&item as *const _ as _));\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn push(&mut self, text: impl AsRef<str>) {\n\n unsafe {\n\n let text = to_wchar(text.as_ref());\n\n let item = LVITEMW {\n\n iItem: self.size() as _,\n\n iSubItem: 0,\n\n mask: LVIF_TEXT,\n\n pszText: PWSTR(text.as_ptr() as _),\n\n cchTextMax: text.len() as _,\n\n ..Default::default()\n\n };\n\n SendMessageW(self.hwnd, LVM_INSERTITEMW, WPARAM(0), LPARAM(&item as *const _ as _));\n\n }\n", "file_path": "key_config/src/side_menu.rs", "rank": 92, "score": 2.0229488885301024 }, { "content": " // VK_LMENU => \"LAlt\".into(),\n\n // VK_RMENU => \"RAlt\".into(),\n\n VK_OEM_MINUS => \"-\".into(),\n\n VK_OEM_PLUS => \";\".into(),\n\n VK_OEM_COMMA => \",\".into(),\n\n VK_OEM_PERIOD => \".\".into(),\n\n VK_OEM_1 => \":\".into(),\n\n VK_OEM_2 => \"/\".into(),\n\n VK_OEM_3 => \"@\".into(),\n\n VK_OEM_4 => \"[\".into(),\n\n VK_OEM_5 => \"\\\\\".into(),\n\n VK_OEM_6 => \"]\".into(),\n\n VK_OEM_7 => \"^\".into(),\n\n VK_OEM_102 => \"_\".into(),\n\n l => match l.0 {\n\n v @ ZERO..=Z => (v as u8 as char).to_string(),\n\n v @ NUMPAD0..=NUMPAD9 => format!(\"Num{}\", v - VK_NUMPAD0.0),\n\n v @ F1..=F24 => format!(\"F{}\", v - VK_F1.0 + 1),\n\n _ => format!(\"({})\", l.0),\n\n },\n", "file_path": "key_map/src/lib.rs", "rank": 93, "score": 2.0084120248980546 }, { "content": " let file = File::open(path)?;\n\n let data: Value = serde_json::from_reader(BufReader::new(file))?;\n\n let mut m = Vec::new();\n\n items(&mut m, &data).ok_or(std::io::ErrorKind::InvalidData)?;\n\n Ok(Self(m))\n\n }\n\n\n\n #[inline]\n\n pub fn get(&self, key: impl AsRef<str>) -> Option<&Item> {\n\n self.0.iter().find(|(k, _)| k == key.as_ref()).map(|(_, item)| item)\n\n }\n\n\n\n #[inline]\n\n pub fn iter(&self) -> std::slice::Iter<(String, Item)> {\n\n self.0.iter()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "mmaccel/src/mmd_map.rs", "rank": 94, "score": 1.971620387843208 }, { "content": " }\n\n }\n\n WM_ERASEBKGND => {\n\n let mut rc = RECT::default();\n\n GetClientRect(hwnd, &mut rc);\n\n FillRect(\n\n HDC(wparam.0 as _),\n\n &rc,\n\n HBRUSH(GetStockObject(GET_STOCK_OBJECT_FLAGS(COLOR_BTNFACE.0 + 1)).0 as _),\n\n );\n\n LRESULT(1)\n\n }\n\n WM_COMMAND => {\n\n if (wparam.0 & 0xffff) as u32 == IDM_MENU_DETACH {\n\n app.update_keys_to_file(app.popup_menu.category(), app.popup_menu.item(), None);\n\n }\n\n LRESULT(0)\n\n }\n\n WM_KEY_CONFIG_EDIT_APPLY => {\n\n if app.editor.is_visible() {\n", "file_path": "key_config/src/application.rs", "rank": 95, "score": 1.9106260577196204 }, { "content": " .accept_drag_files(true)\n\n .style(\n\n wita::WindowStyle::default()\n\n .has_maximize_box(false)\n\n .has_minimize_box(false),\n\n )\n\n .build()?;\n\n if std::env::args().any(|arg| arg == \"--mmd\") {\n\n use std::os::windows::io::AsRawHandle;\n\n let stdout = std::io::stdout();\n\n let handle = HANDLE(stdout.lock().as_raw_handle() as _);\n\n let p = main_window.raw_handle() as u64;\n\n let mut byte = 0;\n\n unsafe {\n\n WriteFile(\n\n handle,\n\n &p as *const _ as _,\n\n std::mem::size_of::<u64>() as _,\n\n &mut byte,\n\n std::ptr::null_mut(),\n", "file_path": "key_config/src/application.rs", "rank": 96, "score": 1.9030334492358816 }, { "content": " let order = order.as_object().ok_or(Error::InvalidData)?;\n\n let category_order = order\n\n .get(\"categories\")\n\n .and_then(|a| a.as_array())\n\n .ok_or(Error::InvalidData)?;\n\n let item_order = order\n\n .get(\"items\")\n\n .and_then(|a| a.as_object())\n\n .ok_or(Error::InvalidData)?;\n\n let mut table = vec![];\n\n for category in category_order.iter() {\n\n let category = category.as_str().ok_or(Error::InvalidData)?.to_string();\n\n let item = mmd_map\n\n .get(&category)\n\n .and_then(|a| a.as_object())\n\n .ok_or(Error::InvalidData)?;\n\n let item_order = item_order\n\n .get(&category)\n\n .and_then(|a| a.as_array())\n\n .ok_or(Error::InvalidData)?;\n", "file_path": "key_config/src/application.rs", "rank": 97, "score": 1.8764001789489972 }, { "content": " } else {\n\n Self::Io(e)\n\n }\n\n }\n\n\n\n pub fn hresult(e: windows::core::Error, text: impl AsRef<str>) -> Self {\n\n Self::HResult(e, text.as_ref().to_string())\n\n }\n\n\n\n pub fn json_file(e: serde_json::Error, path: impl AsRef<std::path::Path>) -> Self {\n\n Self::JsonFile(e, path.as_ref().to_string_lossy().to_string())\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(src: std::io::Error) -> Error {\n\n Error::Io(src)\n\n }\n\n}\n\n\n\nimpl From<wita::ApiError> for Error {\n\n fn from(src: wita::ApiError) -> Error {\n\n Error::Wita(src)\n\n }\n\n}\n", "file_path": "key_config/src/error.rs", "rank": 98, "score": 1.8466715936043032 }, { "content": " pub fn item(mut self, v: &T, text: impl AsRef<str>) -> Self {\n\n unsafe {\n\n let name = to_wchar(text);\n\n let mut info = MENUITEMINFOW {\n\n cbSize: std::mem::size_of::<MENUITEMINFOW>() as _,\n\n fMask: MIIM_TYPE | MIIM_ID,\n\n fType: MFT_STRING,\n\n dwTypeData: PWSTR(name.as_ptr() as _),\n\n wID: ROOT_ID + self.id,\n\n ..Default::default()\n\n };\n\n InsertMenuItemW(self.menu, self.index, false, &mut info);\n\n self.table\n\n .push((std::mem::discriminant(v), std::mem::discriminant(&MenuItemType::Item)));\n\n self.index += 1;\n\n self.id += 1;\n\n self\n\n }\n\n }\n\n\n", "file_path": "mmaccel/src/menu.rs", "rank": 99, "score": 1.8466715936043032 } ]
Rust
src/processor.rs
jswh/wslexe
964319b2cb2b830c54289138db7c2deeb3b938b9
use std::borrow::Cow; use std::env; use std::io::{self, Write}; use std::path::{Component, Path, Prefix, PrefixComponent}; use std::process; use std::process::{Command, Stdio}; use regex::bytes::Regex; fn get_drive_letter(pc: &PrefixComponent) -> Option<String> { let drive_byte = match pc.kind() { Prefix::VerbatimDisk(d) => Some(d), Prefix::Disk(d) => Some(d), _ => None, }; drive_byte.map(|drive_letter| { String::from_utf8(vec![drive_letter]) .expect(&format!("Invalid drive letter: {}", drive_letter)) .to_lowercase() }) } fn get_prefix_for_drive(drive: &str) -> String { format!("/mnt/{}", drive) } fn translate_path_to_unix(argument: String) -> String { { let (argname, arg) = if argument.starts_with("--") && argument.contains('=') { let parts: Vec<&str> = argument.splitn(2, '=').collect(); (format!("{}=", parts[0]), parts[1]) } else { ("".to_owned(), argument.as_ref()) }; let win_path = Path::new(arg); if win_path.is_absolute() || win_path.exists() { let wsl_path: String = win_path.components().fold(String::new(), |mut acc, c| { match c { Component::Prefix(prefix_comp) => { let d = get_drive_letter(&prefix_comp) .expect(&format!("Cannot handle path {:?}", win_path)); acc.push_str(&get_prefix_for_drive(&d)); } Component::RootDir => {} _ => { let d = c.as_os_str() .to_str() .expect(&format!("Cannot represent path {:?}", win_path)) .to_owned(); if !acc.is_empty() && !acc.ends_with('/') { acc.push('/'); } acc.push_str(&d); } }; acc }); return format!("{}{}", &argname, &wsl_path); } } argument } fn translate_path_to_win(line: &[u8]) -> Cow<[u8]> { lazy_static! { static ref WSLPATH_RE: Regex = Regex::new(r"(?m-u)/mnt/(?P<drive>[A-Za-z])(?P<path>/\S*)") .expect("Failed to compile WSLPATH regex"); } WSLPATH_RE.replace_all(line, &b"${drive}:${path}"[..]) } fn shell_escape(arg: String) -> String { if arg.contains(" ") { return vec![String::from("\""), arg, String::from("\"")].join(""); } arg.replace("\n", "$'\n'"); arg.replace(";", "$';'") } pub fn execute(interactive: bool) { let mut exe_path = env::current_exe().unwrap(); exe_path.pop(); let wslexerc_path = format!("{}\\.wslexerc", exe_path.display()); let mut cmd_args = Vec::new(); let mut wsl_args: Vec<String> = vec![]; let wsl_cmd: String; let exe: String = env::args().next().unwrap(); let path = Path::new(&exe); let file_stem = path.file_stem().unwrap().to_str().unwrap(); wsl_args.push(String::from(file_stem)); wsl_args.extend(env::args().skip(1).map(translate_path_to_unix)); if Path::new(&wslexerc_path).exists() { wsl_cmd = format!( "source {};{}", translate_path_to_unix(wslexerc_path), if interactive { wsl_args.join(" ") }else{ wsl_args.into_iter().map(shell_escape).collect::<Vec<String>>().join(" ")} ); } else { wsl_cmd = wsl_args.join(" "); } let exe_cmd: String; if interactive { exe_cmd = "-ic".to_string(); } else { exe_cmd = "-c".to_string(); } cmd_args.push("bash".to_string()); cmd_args.push(exe_cmd); cmd_args.push(wsl_cmd.clone()); let stdin_mode = if wsl_cmd.ends_with("--version") { Stdio::null() } else { Stdio::inherit() }; let mut wsl_proc_setup = Command::new("wsl.exe"); wsl_proc_setup.args(&cmd_args).stdin(stdin_mode); let status; const TRANSLATED_CMDS: &[&str] = &["rev-parse", "remote"]; let translate_output = env::args() .skip(1) .position(|arg| { TRANSLATED_CMDS .iter() .position(|&tcmd| tcmd == arg) .is_some() }) .is_some(); if translate_output { let wsl_proc = wsl_proc_setup .stdout(Stdio::piped()) .spawn() .expect(&format!("Failed to execute command '{}'", &wsl_cmd)); let output = wsl_proc .wait_with_output() .expect(&format!("Failed to wait for wsl call '{}'", &wsl_cmd)); status = output.status; let output_bytes = output.stdout; let mut stdout = io::stdout(); stdout .write_all(&translate_path_to_win(&output_bytes)) .expect("Failed to write wsl output"); stdout.flush().expect("Failed to flush output"); } else { status = wsl_proc_setup .status() .expect(&format!("Failed to execute command '{}'", &wsl_cmd)); } if let Some(exit_code) = status.code() { process::exit(exit_code); } } #[test] fn win_to_unix_path_trans() { assert_eq!( translate_path_to_unix("d:\\test\\file.txt".to_string()), "/mnt/d/test/file.txt" ); assert_eq!( translate_path_to_unix("C:\\Users\\test\\a space.txt".to_string()), "/mnt/c/Users/test/a space.txt" ); } #[test] fn unix_to_win_path_trans() { assert_eq!( &*translate_path_to_win(b"/mnt/d/some path/a file.md"), b"d:/some path/a file.md" ); assert_eq!( &*translate_path_to_win(b"origin /mnt/c/path/ (fetch)"), b"origin c:/path/ (fetch)" ); let multiline = b"mirror /mnt/c/other/ (fetch)\nmirror /mnt/c/other/ (push)\n"; let multiline_result = b"mirror c:/other/ (fetch)\nmirror c:/other/ (push)\n"; assert_eq!( &*translate_path_to_win(&multiline[..]), &multiline_result[..] ); } #[test] fn no_path_translation() { assert_eq!( &*translate_path_to_win(b"/mnt/other/file.sh"), b"/mnt/other/file.sh" ); } #[test] fn relative_path_translation() { assert_eq!( translate_path_to_unix(".\\src\\main.rs".to_string()), "./src/main.rs" ); } #[test] fn long_argument_path_translation() { assert_eq!( translate_path_to_unix("--file=C:\\some\\path.txt".to_owned()), "--file=/mnt/c/some/path.txt" ); }
use std::borrow::Cow; use std::env; use std::io::{self, Write}; use std::path::{Component, Path, Prefix, PrefixComponent}; use std::process; use std::process::{Command, Stdio}; use regex::bytes::Regex; fn get_drive_letter(pc: &PrefixComponent) -> Option<String> { let drive_byte = match pc.kind() { Prefix::VerbatimDisk(d) => Some(d), Prefix::Disk(d) => Some(d), _ => None, }; drive_byte.map(|drive_letter| { String::from_utf8(vec![drive_letter]) .expect(&format!("Invalid drive letter: {}", drive_letter)) .to_lowercase() }) } fn get_prefix_for_drive(drive: &str) -> String { format!("/mnt/{}", drive) } fn translate_path_to_unix(argument: String) -> String { { let (argname, arg) = if argument.starts_with("--") && argument.contains('=') { let parts: Vec<&str> = argument.splitn(2, '=').collect(); (format!("{}=", parts[0]), parts[1]) } else { ("".to_owned(), argument.as_ref()) }; let win_path = Path::new(arg); if win_path.is_absolute() || win_path.exists() { let wsl_path: String = win_path.components().fold(String::new(), |mut acc, c| { match c { Component::Prefix(prefix_comp) => { let d = get_drive_letter(&prefix_comp) .expect(&format!("Cannot handle path {:?}", win_path)); acc.push_str(&get_prefix_for_drive(&d)); } Component::RootDir => {} _ => { let d = c.as_os_str() .to_str() .expect(&format!("Cannot represent path {:?}", win_path)) .to_owned(); if !acc.is_empty() && !acc.ends_with('/') { acc.push('/'); } acc.push_str(&d); } }; acc }); return format!("{}{}", &argname, &wsl_path); } } argument } f
fn shell_escape(arg: String) -> String { if arg.contains(" ") { return vec![String::from("\""), arg, String::from("\"")].join(""); } arg.replace("\n", "$'\n'"); arg.replace(";", "$';'") } pub fn execute(interactive: bool) { let mut exe_path = env::current_exe().unwrap(); exe_path.pop(); let wslexerc_path = format!("{}\\.wslexerc", exe_path.display()); let mut cmd_args = Vec::new(); let mut wsl_args: Vec<String> = vec![]; let wsl_cmd: String; let exe: String = env::args().next().unwrap(); let path = Path::new(&exe); let file_stem = path.file_stem().unwrap().to_str().unwrap(); wsl_args.push(String::from(file_stem)); wsl_args.extend(env::args().skip(1).map(translate_path_to_unix)); if Path::new(&wslexerc_path).exists() { wsl_cmd = format!( "source {};{}", translate_path_to_unix(wslexerc_path), if interactive { wsl_args.join(" ") }else{ wsl_args.into_iter().map(shell_escape).collect::<Vec<String>>().join(" ")} ); } else { wsl_cmd = wsl_args.join(" "); } let exe_cmd: String; if interactive { exe_cmd = "-ic".to_string(); } else { exe_cmd = "-c".to_string(); } cmd_args.push("bash".to_string()); cmd_args.push(exe_cmd); cmd_args.push(wsl_cmd.clone()); let stdin_mode = if wsl_cmd.ends_with("--version") { Stdio::null() } else { Stdio::inherit() }; let mut wsl_proc_setup = Command::new("wsl.exe"); wsl_proc_setup.args(&cmd_args).stdin(stdin_mode); let status; const TRANSLATED_CMDS: &[&str] = &["rev-parse", "remote"]; let translate_output = env::args() .skip(1) .position(|arg| { TRANSLATED_CMDS .iter() .position(|&tcmd| tcmd == arg) .is_some() }) .is_some(); if translate_output { let wsl_proc = wsl_proc_setup .stdout(Stdio::piped()) .spawn() .expect(&format!("Failed to execute command '{}'", &wsl_cmd)); let output = wsl_proc .wait_with_output() .expect(&format!("Failed to wait for wsl call '{}'", &wsl_cmd)); status = output.status; let output_bytes = output.stdout; let mut stdout = io::stdout(); stdout .write_all(&translate_path_to_win(&output_bytes)) .expect("Failed to write wsl output"); stdout.flush().expect("Failed to flush output"); } else { status = wsl_proc_setup .status() .expect(&format!("Failed to execute command '{}'", &wsl_cmd)); } if let Some(exit_code) = status.code() { process::exit(exit_code); } } #[test] fn win_to_unix_path_trans() { assert_eq!( translate_path_to_unix("d:\\test\\file.txt".to_string()), "/mnt/d/test/file.txt" ); assert_eq!( translate_path_to_unix("C:\\Users\\test\\a space.txt".to_string()), "/mnt/c/Users/test/a space.txt" ); } #[test] fn unix_to_win_path_trans() { assert_eq!( &*translate_path_to_win(b"/mnt/d/some path/a file.md"), b"d:/some path/a file.md" ); assert_eq!( &*translate_path_to_win(b"origin /mnt/c/path/ (fetch)"), b"origin c:/path/ (fetch)" ); let multiline = b"mirror /mnt/c/other/ (fetch)\nmirror /mnt/c/other/ (push)\n"; let multiline_result = b"mirror c:/other/ (fetch)\nmirror c:/other/ (push)\n"; assert_eq!( &*translate_path_to_win(&multiline[..]), &multiline_result[..] ); } #[test] fn no_path_translation() { assert_eq!( &*translate_path_to_win(b"/mnt/other/file.sh"), b"/mnt/other/file.sh" ); } #[test] fn relative_path_translation() { assert_eq!( translate_path_to_unix(".\\src\\main.rs".to_string()), "./src/main.rs" ); } #[test] fn long_argument_path_translation() { assert_eq!( translate_path_to_unix("--file=C:\\some\\path.txt".to_owned()), "--file=/mnt/c/some/path.txt" ); }
n translate_path_to_win(line: &[u8]) -> Cow<[u8]> { lazy_static! { static ref WSLPATH_RE: Regex = Regex::new(r"(?m-u)/mnt/(?P<drive>[A-Za-z])(?P<path>/\S*)") .expect("Failed to compile WSLPATH regex"); } WSLPATH_RE.replace_all(line, &b"${drive}:${path}"[..]) }
function_block-function_prefixed
[ { "content": "fn main() {\n\n processor::execute(false)\n\n}\n", "file_path": "src/main.rs", "rank": 10, "score": 18098.30613938591 }, { "content": "fn main() {\n\n processor::execute(true)\n\n}\n", "file_path": "src/main_i.rs", "rank": 11, "score": 18098.30613938591 }, { "content": "# WSLGit Changelog\n\n\n\n## [0.7.0] - unreleased\n\n\n\n\n\n## [0.6.0] - 2018-04-24\n\n\n\n### Added\n\n\n\n- Allow running bash in non-interactive mode (#16, #23).\n\n\n\n### Fixed\n\n\n\n- Unix paths inside file contents are not being erroneously translated anymore (#19).\n\n- Do not assume valid UTF-8 output from git (#29).\n\n- Fix running `wslgit` without arguments (#26).\n\n- Escape `\\n` newlines in arguments to git (#27).\n\n\n\n### Changed\n\n\n\n- Change to `wsl.exe` to call into the WSL environment.\n\n- Apply path translation only to output of `rev-parse` and `remote`.\n\n\n\n\n\n## [0.5.0] - 2018-01-11\n\n\n\n### Added\n\n\n\n- Return exit code from git subprocess.\n\n\n\n### Fixed\n\n\n\n- Fix superfluous empty `.git` source control providers.\n\n\n\n\n\n## [0.4.0] - 2017-12-18\n\n\n\n### Fixed\n\n\n\n- Compatibility with VS Code 1.19, which now requires proper Windows paths\n\n (with backslashes) and a lowercase drive letter.\n\n\n\n\n\n## [0.3.0] - 2017-11-08\n\n\n\n### Added\n\n\n\n- Add proper license (MIT).\n\n\n\n### Fixed\n\n\n\n- Git waiting for input when called from VS Code to check if `git --version`\n\n works.\n\n\n\n\n\n## [0.2.0] - 2017-07-27\n\n\n\n### Added\n\n\n\n- Properly handle input via stdin (for commit messages).\n\n\n\n\n\n## [0.1.0] - 2017-07-26\n\n\n\n### Added\n\n\n\n- Initial version of `wslgit` with basic functionality.\n\n\n\n\n\n[0.1.0]: #\n\n[0.2.0]: https://github.com/andy-5/wslgit/releases/tag/v0.2.0\n\n[0.3.0]: https://github.com/andy-5/wslgit/releases/tag/v0.3.0\n\n[0.4.0]: https://github.com/andy-5/wslgit/releases/tag/v0.4.0\n\n[0.5.0]: https://github.com/andy-5/wslgit/releases/tag/v0.5.0\n\n[0.6.0]: https://github.com/andy-5/wslgit/releases/tag/v0.6.0\n", "file_path": "CHANGELOG.md", "rank": 17, "score": 3.784729832061534 }, { "content": "## WSLEXE\n\nThe WSL system makes a great convenience of development on Windows.But the integretion to IDEs and code editors is not convenient.\n\n\n\nThe project [wslgit](https://github.com/andy-5/wslgit) makes a dummy exe that trying to receive the arguments and translating all paths from windows type to unix type, and reform these arguments to the real command in wsl .\n\n\n\nThis shows a way to use wsl application in windows enviroment for IDEs and code editors. But the project is made for git only. I made a bit change using the file stem name as the real command. And it works!\n\n\n\n## Usage\n\n\n\n1. download the [wslexe.exe](https://github.com/jswh/wslexe/releases)\n\n2. rename it to the command you want to use, for example pyhon.exe\n\n3. change your ide or editor config to point to the executable file\n\n4. if you have a \".wslexerc\" in the path where the executable file exists, it will be sourced before the real command\n\n\n\n## Compatibility\n\n* python.exe\n\n - [x] vscode\n\n - [x] powershell\n\n* composer.exe\n\n - [x] phpstorm\n\n* php.exe\n\n - [x] phpstorm\n\n* git.exe\n\n - [x] vscode\n\n - [x] powershell\n\n\n\n## Screen shot\n\n#### python.exe for vscode\n\n![show](https://user-images.githubusercontent.com/6405755/41839420-caa53562-7895-11e8-8ff8-576c56d9ba7c.gif)\n", "file_path": "README.md", "rank": 19, "score": 2.743483304081923 }, { "content": "mod processor;\n\n\n\n#[macro_use]\n\nextern crate lazy_static;\n\nextern crate regex;\n\n\n", "file_path": "src/main.rs", "rank": 21, "score": 1.5290085093323809 }, { "content": "mod processor;\n\n\n\n#[macro_use]\n\nextern crate lazy_static;\n\nextern crate regex;\n\n\n", "file_path": "src/main_i.rs", "rank": 22, "score": 1.5290085093323809 }, { "content": "MIT License\n\n\n\nCopyright (c) 2017 Andreas Riffnaller-Schiefer\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n\n", "file_path": "LICENSE.md", "rank": 23, "score": 1.0035888725819 } ]
Rust
packages/server/src/middleware/tests.rs
sogrim/technion-sogrim
6d9f86266252ac0b7348725a295aabca9eba5222
use crate::{config::CONFIG, init_mongodb_client, middleware, resources::user::User}; use actix_rt::test; use actix_web::{ http::StatusCode, test::{self}, web::{self, Bytes}, App, }; use actix_web_lab::middleware::from_fn; use dotenv::dotenv; use mongodb::Client; #[test] async fn test_from_request_no_db_client() { let token_claims = jsonwebtoken_google::test_helper::TokenClaims::new(); let (jwt, parser, _server) = jsonwebtoken_google::test_helper::setup(&token_claims); dotenv().ok(); let app = test::init_service( App::new() .app_data(middleware::auth::JwtDecoder::new_with_parser(parser)) .wrap(from_fn(middleware::auth::authenticate)) .service( web::resource("/").route(web::get().to(|_: User| async { "Shouldn't get here" })), ), ) .await; let resp = test::TestRequest::get() .uri("/") .insert_header(("authorization", jwt)) .send_request(&app) .await; assert!(resp.status().is_server_error()); assert_eq!( Bytes::from("Mongodb client not found in application data"), test::read_body(resp).await ); } #[test] async fn test_from_request_no_auth_mw() { dotenv().ok(); let client = init_mongodb_client!(); let app = test::init_service(App::new().app_data(web::Data::new(client.clone())).service( web::resource("/").route(web::get().to(|_: User| async { "Shouldn't get here" })), )) .await; let resp = test::TestRequest::get() .uri("/") .insert_header(("authorization", "bugo-the-debugo")) .send_request(&app) .await; assert!(resp.status().is_server_error()); assert_eq!( Bytes::from("Middleware error: Sub not found in request extensions"), test::read_body(resp).await ); } #[test] async fn test_auth_mw_no_jwt_decoder() { dotenv().ok(); let client = init_mongodb_client!(); let app = test::init_service( App::new() .app_data(web::Data::new(client.clone())) .wrap(from_fn(middleware::auth::authenticate)) .service(web::resource("/").route(web::get().to(|| async { "Shouldn't get here" }))), ) .await; let resp = test::TestRequest::get() .uri("/") .insert_header(("authorization", "bugo-the-debugo")) .send_request(&app) .await; assert!(resp.status().is_server_error()); assert_eq!( Bytes::from("JwtDecoder not initialized"), test::read_body(resp).await ); } #[test] async fn test_auth_mw_client_errors() { let token_claims = jsonwebtoken_google::test_helper::TokenClaims::new_expired(); let (expired_jwt, parser, _server) = jsonwebtoken_google::test_helper::setup(&token_claims); dotenv().ok(); let app = test::init_service( App::new() .app_data(middleware::auth::JwtDecoder::new_with_parser(parser)) .wrap(from_fn(middleware::auth::authenticate)) .service(web::resource("/").route(web::get().to(|| async { "Shouldn't get here" }))), ) .await; let resp_no_header = test::TestRequest::get().uri("/").send_request(&app).await; assert_eq!(resp_no_header.status(), StatusCode::UNAUTHORIZED); assert_eq!( Bytes::from("No authorization header found"), test::read_body(resp_no_header).await ); let resp_bad_jwt = test::TestRequest::get() .uri("/") .insert_header(("authorization", "bad_jwt")) .send_request(&app) .await; assert_eq!(resp_bad_jwt.status(), StatusCode::UNAUTHORIZED); assert_eq!( Bytes::from("Invalid JWT: Wrong header."), test::read_body(resp_bad_jwt).await ); let resp_jwt_expired = test::TestRequest::get() .uri("/") .insert_header(("authorization", expired_jwt)) .send_request(&app) .await; assert_eq!(resp_jwt_expired.status(), StatusCode::UNAUTHORIZED); assert_eq!( Bytes::from("Invalid JWT: Wrong token format - ExpiredSignature."), test::read_body(resp_jwt_expired).await ); }
use crate::{config::CONFIG, init_mongodb_client, middleware, resources::user::User}; use actix_rt::test; use actix_web::{ http::StatusCode, test::{self}, web::{self, Bytes}, App, }; use actix_web_lab::middleware::from_fn; use dotenv::dotenv; use mongodb::Client; #[test] async fn test_from_request_no_db_client() { let token_claims = jsonwebtoken_google::test_helper::TokenClaims::new(); let (jwt, parser, _server) = jsonwebtoken_google::test_helper::setup(&token_claims); dotenv().ok(); let app = test::init_service( App::new() .app_data(middleware::auth::JwtDecoder::new_with_parser(parser)) .wrap(from_fn(middleware::auth::authenticate)) .service( web::resource("/").route(web::get().to(|_: User| async { "Shouldn't get here" })), ), ) .await; let resp = test::TestRequest::get() .uri("/") .insert_header(("authorization", jwt)) .send_request(&app) .await; assert!(resp.status().is_server_error()); assert_eq!( Bytes::from("Mongodb client not found in application data"), test::read_body(resp).await ); } #[test] async fn test_from_request_no_auth_mw() { dotenv().ok(); let client = init_mongodb_client!(); let app = test::init_service(App::new().app_data(web::Data::new(client.clone())).service( web::resource("/").route(web::get().to(|_: User| async { "Shouldn't get here" })), )) .await; let resp = test::TestRequest::get() .uri("/") .insert_header(("authorization", "bugo-the-debugo")) .send_request(&app) .await; assert!(resp.status().is_server_error()); assert_eq!( Bytes::from("Middleware error: Sub not found in request extensions"), test::read_body(resp).await ); } #[test] async fn test_auth_mw_no_jwt_decoder() { dotenv().ok(); let client = init_mongodb_client!(); let app = test::init_service( App::new() .app_data(web::Data::new(client.clone())) .wrap(from_fn(
t_header(("authorization", "bugo-the-debugo")) .send_request(&app) .await; assert!(resp.status().is_server_error()); assert_eq!( Bytes::from("JwtDecoder not initialized"), test::read_body(resp).await ); } #[test] async fn test_auth_mw_client_errors() { let token_claims = jsonwebtoken_google::test_helper::TokenClaims::new_expired(); let (expired_jwt, parser, _server) = jsonwebtoken_google::test_helper::setup(&token_claims); dotenv().ok(); let app = test::init_service( App::new() .app_data(middleware::auth::JwtDecoder::new_with_parser(parser)) .wrap(from_fn(middleware::auth::authenticate)) .service(web::resource("/").route(web::get().to(|| async { "Shouldn't get here" }))), ) .await; let resp_no_header = test::TestRequest::get().uri("/").send_request(&app).await; assert_eq!(resp_no_header.status(), StatusCode::UNAUTHORIZED); assert_eq!( Bytes::from("No authorization header found"), test::read_body(resp_no_header).await ); let resp_bad_jwt = test::TestRequest::get() .uri("/") .insert_header(("authorization", "bad_jwt")) .send_request(&app) .await; assert_eq!(resp_bad_jwt.status(), StatusCode::UNAUTHORIZED); assert_eq!( Bytes::from("Invalid JWT: Wrong header."), test::read_body(resp_bad_jwt).await ); let resp_jwt_expired = test::TestRequest::get() .uri("/") .insert_header(("authorization", expired_jwt)) .send_request(&app) .await; assert_eq!(resp_jwt_expired.status(), StatusCode::UNAUTHORIZED); assert_eq!( Bytes::from("Invalid JWT: Wrong token format - ExpiredSignature."), test::read_body(resp_jwt_expired).await ); }
middleware::auth::authenticate)) .service(web::resource("/").route(web::get().to(|| async { "Shouldn't get here" }))), ) .await; let resp = test::TestRequest::get() .uri("/") .inser
function_block-random_span
[ { "content": "pub fn parse_copy_paste_data(data: &str) -> Result<Vec<CourseStatus>, AppError> {\n\n // Sanity validation\n\n if !(data.starts_with(\"גיליון ציונים\") && data.contains(\"סוף גיליון ציונים\"))\n\n {\n\n return Err(AppError::Parser(\"Invalid copy paste data\".into()));\n\n }\n\n\n\n let mut courses = HashMap::<String, CourseStatus>::new();\n\n let mut asterisk_courses = Vec::<CourseStatus>::new();\n\n let mut sport_courses = Vec::<CourseStatus>::new();\n\n let mut semester = String::new();\n\n let mut semester_counter: f32 = 0.0;\n\n\n\n for line_ref in data.split_terminator('\\n') {\n\n let line = line_ref.to_string();\n\n\n\n let is_spring = line.contains(\"אביב\");\n\n let is_winter = line.contains(\"חורף\");\n\n let is_summer = line.contains(\"קיץ\");\n\n\n", "file_path": "packages/server/src/core/parser.rs", "rank": 0, "score": 184085.9771746632 }, { "content": "export const getUserState = async (authToken: any): Promise<UserState> => {\n\n const fallback: UserState = {} as UserState;\n\n let data: UserState;\n\n try {\n\n const res = await axios.get(`${API_URL}/students/login`, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n });\n\n data = res.data || fallback;\n\n } catch (e) {\n\n data = fallback;\n\n throw e;\n\n }\n\n return data;\n", "file_path": "packages/sogrim-app/src/services/api.ts", "rank": 1, "score": 136152.20384588392 }, { "content": "export const postUserUgData = async (\n\n authToken: any,\n\n ugData: string\n\n): Promise<UserState> => {\n\n // const fallback: UserState = {} as UserState;\n\n let res: UserState;\n\n\n\n res = await axios.post(`${API_URL}/students/courses`, ugData, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n });\n\n\n\n return res;\n", "file_path": "packages/sogrim-app/src/services/api.ts", "rank": 2, "score": 133844.7291740746 }, { "content": "fn parse_course_status_pdf_format(line: &str) -> Result<(Course, Option<Grade>), AppError> {\n\n let clean_line = line.replace('*', \"\");\n\n let id = {\n\n let number = clean_line\n\n .split(' ')\n\n .next()\n\n .ok_or_else(|| AppError::Parser(\"Bad Format\".into()))?;\n\n if number.parse::<f32>().is_ok() {\n\n Ok(String::from(number))\n\n } else {\n\n Err(AppError::Parser(\"Bad Format\".into()))\n\n }?\n\n };\n\n\n\n let mut index = 0;\n\n let mut credit = 0.0;\n\n let mut word;\n\n for part in clean_line.split(' ') {\n\n word = part.to_string();\n\n // When a grade is missing, a hyphen (מקף) char is written instead, without any whitespaces between it and the credit.\n", "file_path": "packages/server/src/core/parser.rs", "rank": 3, "score": 133674.00822654742 }, { "content": "export interface LoginRequest {\n\n // TODO\n", "file_path": "packages/sogrim-app/src/services/auth.ts", "rank": 4, "score": 103204.67973333302 }, { "content": "export const getCatalogs = async (authToken: any): Promise<Catalog[]> => {\n\n let fallback: any;\n\n let res: any;\n\n try {\n\n res =\n\n (await axios.get(`${API_URL}/catalogs`, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n })) || fallback;\n\n } catch (e) {\n\n res = fallback;\n\n throw e;\n\n }\n\n return res.data;\n", "file_path": "packages/sogrim-app/src/services/api.ts", "rank": 5, "score": 103126.70629389668 }, { "content": "export const getCourseByFilter = async (\n\n authToken: any,\n\n filterName: string,\n\n filter: string\n\n): Promise<Course[]> => {\n\n let fallback: any;\n\n let res: any;\n\n if (!filter) {\n\n return [];\n\n }\n\n try {\n\n res =\n\n (await axios.get(`${API_URL}/students/courses?${filterName}=${filter}`, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n })) || fallback;\n\n } catch (e) {\n\n res = fallback;\n\n throw e;\n\n }\n\n return res.data;\n", "file_path": "packages/sogrim-app/src/services/api.ts", "rank": 6, "score": 101358.62913358436 }, { "content": "export const putUserCatalog = async (\n\n authToken: any,\n\n userCatalogId: string\n\n): Promise<UserState> => {\n\n const fallback: UserState = {} as UserState;\n\n let res: UserState;\n\n try {\n\n res =\n\n (await axios.put(\n\n `${API_URL}/students/catalog`,\n\n userCatalogId || ({} as UserState),\n\n {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n }\n\n )) || fallback;\n\n } catch {\n\n res = fallback;\n\n }\n\n return res;\n", "file_path": "packages/sogrim-app/src/services/api.ts", "rank": 7, "score": 101230.17098480847 }, { "content": "export const putUserSettings = async (\n\n authToken: any,\n\n updatedUserSettings: UserSettings\n\n): Promise<UserSettings> => {\n\n const fallback: UserSettings = {} as UserSettings;\n\n let res: UserSettings;\n\n try {\n\n res =\n\n (await axios.put(`${API_URL}/students/settings`, updatedUserSettings, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n })) || fallback;\n\n } catch (e) {\n\n res = fallback;\n\n throw e;\n\n }\n\n return res;\n", "file_path": "packages/sogrim-app/src/services/api.ts", "rank": 8, "score": 101230.17098480847 }, { "content": "export const putUserState = async (\n\n authToken: any,\n\n updatedUserState: UserDetails\n\n): Promise<UserDetails> => {\n\n const fallback: UserDetails = {} as UserDetails;\n\n let res: UserDetails;\n\n try {\n\n res =\n\n (await axios.put(`${API_URL}/students/details`, updatedUserState, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n })) || fallback;\n\n } catch (e) {\n\n res = fallback;\n\n throw e;\n\n }\n\n return res;\n", "file_path": "packages/sogrim-app/src/services/api.ts", "rank": 9, "score": 101230.17098480847 }, { "content": "export const getComputeEndGame = async (authToken: any): Promise<UserState> => {\n\n const fallback: UserState = {} as UserState;\n\n let res: UserState;\n\n try {\n\n res =\n\n (await axios.get(`${API_URL}/students/degree-status`, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n })) || fallback;\n\n } catch (e) {\n\n res = fallback;\n\n throw e;\n\n }\n\n return res;\n", "file_path": "packages/sogrim-app/src/services/api.ts", "rank": 10, "score": 99650.15640243195 }, { "content": "fn set_grades_for_uncompleted_courses(\n\n courses: &mut Vec<CourseStatus>,\n\n asterisk_courses: Vec<CourseStatus>,\n\n) {\n\n // The canditate course statuses are those with uncomplete (לא השלים) grades.\n\n // For each uncompleted course status, we iterate the asterisk list in reverse to find\n\n // the closest (most chronologically advanced) course status with a grade (anything other than NotComplete (לא השלים)).\n\n // This course status will replace the old one.\n\n let uncompleted_courses = courses\n\n .iter_mut()\n\n .filter(|c| c.grade == Some(Grade::NotComplete))\n\n .collect::<Vec<_>>();\n\n for uncompleted_course in uncompleted_courses {\n\n for asterisk_course in asterisk_courses.iter().rev() {\n\n if let Some(grade) = &asterisk_course.grade {\n\n if uncompleted_course.course.id == asterisk_course.course.id\n\n && grade != &Grade::NotComplete\n\n {\n\n *uncompleted_course = asterisk_course.clone();\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "packages/server/src/core/parser.rs", "rank": 11, "score": 96241.80634538172 }, { "content": "fn get_sgs_courses_assignment(\n\n sgs: &[SpecializationGroup],\n\n groups_indices: &[usize],\n\n courses: &[CourseId],\n\n best_match: &mut HashMap<CourseId, usize>,\n\n) -> Option<HashMap<CourseId, usize>> {\n\n let mut optional_sgs_for_course = HashMap::<CourseId, Vec<usize>>::new();\n\n for course_id in courses {\n\n let mut relevant_groups_for_course = Vec::new();\n\n for sg_index in groups_indices {\n\n if sgs[*sg_index].course_list.contains(course_id) {\n\n relevant_groups_for_course.push(*sg_index);\n\n }\n\n }\n\n if !relevant_groups_for_course.is_empty() {\n\n // only this subset specialization groups consist course_id\n\n optional_sgs_for_course.insert(course_id.clone(), relevant_groups_for_course);\n\n }\n\n }\n\n\n", "file_path": "packages/server/src/core/bank_rule/specialization_groups.rs", "rank": 12, "score": 92343.70373696952 }, { "content": "fn get_complete_sgs_indices(\n\n sgs: &[SpecializationGroup],\n\n course_id_to_sg_index: &HashMap<CourseId, usize>,\n\n) -> Vec<usize> {\n\n let groups_indices = get_groups_indices(course_id_to_sg_index);\n\n let mut complete_sgs_indices = Vec::new();\n\n for sg_index in groups_indices {\n\n // check there are enough courses in this specialization group\n\n if (course_id_to_sg_index\n\n .values()\n\n .filter(|&&group| group == sg_index)\n\n .count())\n\n < sgs[sg_index].courses_sum\n\n {\n\n // There are not enough courses in this sg to complete the requirement\n\n continue;\n\n }\n\n // check if the user completed the mandatory courses in sg\n\n if let Some(mandatory) = &sgs[sg_index].mandatory {\n\n let mut complete_mandatory = true;\n", "file_path": "packages/server/src/core/bank_rule/specialization_groups.rs", "rank": 13, "score": 92343.70373696952 }, { "content": "const UserAppComp: React.FC = () => {\n\n const [mode] = useState<typeof LIGHT_MODE_THEME | typeof DARK_MODE_THEME>(\n\n LIGHT_MODE_THEME\n\n );\n\n const theme = useMemo(() => getAppTheme(mode), [mode]);\n\n\n\n const { userAuthToken } = useAuth();\n\n const { data, isLoading, isError, error } = useUserState(userAuthToken);\n\n const {\n\n dataStore: { updateStoreUserDetails, updateStoreUserSettings },\n\n uiStore: { computeUserRegistrationState, userRegistrationState },\n\n } = useStore();\n\n\n\n useEffect(() => {\n\n if (isError) {\n\n if ((error as any).response.status === 401) {\n\n window.location.reload();\n\n }\n\n }\n\n if (!isLoading && data) {\n\n updateStoreUserDetails(data.details);\n\n updateStoreUserSettings(data.settings);\n\n }\n\n }, [\n\n data,\n\n updateStoreUserDetails,\n\n updateStoreUserSettings,\n\n isLoading,\n\n userRegistrationState,\n\n computeUserRegistrationState,\n\n isError,\n\n error,\n\n ]);\n\n\n\n return (\n\n <ThemeProvider theme={theme}>\n\n <CssBaseline />\n\n <Layout />\n\n </ThemeProvider>\n\n );\n", "file_path": "packages/sogrim-app/src/components/App/UserApp.tsx", "rank": 14, "score": 89579.71113792025 }, { "content": "pub fn create_degree_status() -> DegreeStatus {\n\n DegreeStatus {\n\n course_statuses: vec![\n\n CourseStatus {\n\n course: Course {\n\n id: \"104031\".to_string(),\n\n credit: 5.5,\n\n name: \"infi1m\".to_string(),\n\n },\n\n state: Some(CourseState::Complete),\n\n grade: Some(Grade::Numeric(85)),\n\n ..Default::default()\n\n },\n\n CourseStatus {\n\n course: Course {\n\n id: \"104166\".to_string(),\n\n credit: 5.5,\n\n name: \"Algebra alef\".to_string(),\n\n },\n\n state: Some(CourseState::NotComplete),\n", "file_path": "packages/server/src/core/tests.rs", "rank": 15, "score": 85301.9331168291 }, { "content": "fn contains_course_number(str: &str) -> bool {\n\n for word in str.split_whitespace() {\n\n let course_number = word.parse::<u32>();\n\n match course_number {\n\n Ok(number) if 10000 < number && number < 999999 => return true,\n\n Ok(_) => continue,\n\n Err(_) => continue,\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "packages/server/src/core/parser.rs", "rank": 16, "score": 84669.88306951753 }, { "content": "export type Catalog = {\n\n name: string;\n\n total_credit: number;\n\n description: string;\n\n _id: {\n\n $oid: string;\n\n };\n\n};\n\n\n\nexport type Course = {\n\n credit: number;\n\n name: string;\n\n _id: string;\n\n};\n\n\n\nexport type CourseState = \"הושלם\" | \"לא הושלם\" | \"לא רלוונטי\" | \"בתהליך\";\n\nexport type CourseGradeOptions =\n\n | \"עבר\"\n\n | \"נכשל\"\n\n | \"פטור ללא ניקוד\"\n\n | \"פטור עם ניקוד\"\n\n | \"לא השלים\";\n\n\n\nexport type CourseStatus = {\n\n course: Course;\n\n grade?: string;\n\n semester: string;\n\n state: CourseState;\n\n type?: string;\n\n modified: boolean;\n\n specialization_group_name?: string;\n\n additional_msg?: string;\n\n};\n\n\n\nexport const ACCUMULATE_COURSES = \"accumulate courses\";\n\n\n\nexport type CourseBankReq = {\n\n bank_rule_name: string;\n\n course_bank_name: string;\n\n credit_completed: number;\n\n credit_requirement: number;\n\n course_completed: number;\n\n course_requirement: number;\n\n message?: string;\n\n completed?: boolean;\n\n type: string;\n\n};\n\n\n\nexport type DegreeStatus = {\n\n course_bank_requirements: CourseBankReq[];\n\n course_statuses: CourseStatus[];\n\n overflow_msgs: string[];\n\n total_credit: number;\n\n};\n\n\n\nexport type UserSettings = {\n\n compute_in_progress: boolean;\n\n};\n\n\n\nexport type UserDetails = {\n\n degree_status: DegreeStatus;\n\n catalog?: Catalog;\n\n modified: boolean;\n\n};\n\n\n\nexport type UserState = {\n\n _id: string;\n\n details: UserDetails;\n\n settings: UserSettings;\n\n};\n", "file_path": "packages/sogrim-app/src/types/data-types.ts", "rank": 17, "score": 82432.00123044028 }, { "content": "#[macro_export]\n\nmacro_rules! impl_from_request {\n\n (resource=$resource:ty, getter=$get_fn:ident) => {\n\n impl FromRequest for $resource {\n\n type Error = AppError;\n\n type Future = Pin<Box<dyn Future<Output = Result<Self, Self::Error>>>>;\n\n\n\n fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {\n\n let req = req.clone();\n\n Box::pin(async move {\n\n let client = match req.app_data::<Data<mongodb::Client>>() {\n\n Some(client) => client,\n\n None => {\n\n return Err(AppError::InternalServer(\n\n \"Mongodb client not found in application data\".into(),\n\n ))\n\n }\n\n };\n\n match req.extensions().get::<Sub>() {\n\n Some(key) => db::services::$get_fn(key, client).await,\n", "file_path": "packages/server/src/middleware/from_request.rs", "rank": 18, "score": 80958.16341043764 }, { "content": " None => Err(AppError::Middleware(\n\n \"Sub not found in request extensions\".into(),\n\n )),\n\n }\n\n })\n\n }\n\n\n\n fn extract(req: &HttpRequest) -> Self::Future {\n\n Self::from_request(req, &mut Payload::None)\n\n }\n\n }\n\n };\n\n}\n", "file_path": "packages/server/src/middleware/from_request.rs", "rank": 19, "score": 80948.22967351323 }, { "content": "export class AuthService {\n\n async login(loginRequest: LoginRequest) {\n\n const response = await fetch(`${API_URL}/auth/login`, {\n\n method: \"POST\",\n\n headers: {\n\n \"Content-Type\": \"application/json\",\n\n },\n\n body: JSON.stringify(loginRequest),\n\n });\n\n\n\n const parsedResponse = await response.json();\n\n\n\n if (!response.ok) {\n\n throw new Error(parsedResponse);\n\n }\n\n\n\n return parsedResponse;\n\n }\n", "file_path": "packages/sogrim-app/src/services/auth.ts", "rank": 28, "score": 76931.47802746392 }, { "content": "export class DataStore {\n\n public userDetails: UserDetails = {} as UserDetails;\n\n public userSettings: UserSettings = {} as UserSettings;\n\n public userBankNames: string[] = [];\n\n\n\n constructor(public readonly rootStore: RootStore) {\n\n makeAutoObservable(this, { rootStore: false });\n\n }\n\n\n\n updateStoreUserDetails = (newUserDitails: UserDetails) => {\n\n this.userDetails = newUserDitails;\n\n };\n\n\n\n updateStoreUserSettings = (newUserSettings: UserSettings) => {\n\n this.userSettings = newUserSettings;\n\n };\n\n\n\n get modifiedStatus() {\n\n return (\n\n this.userDetails?.degree_status?.course_statuses?.length > 0 &&\n\n this.userDetails?.modified\n\n );\n\n }\n\n\n\n getAllUserSemesters = (courseList: CourseStatus[]): string[] => {\n\n const allSemestersSet = new Set<string>();\n\n courseList.forEach((course) =>\n\n course.semester ? allSemestersSet.add(course.semester) : null\n\n );\n\n const allSemesters = Array.from(allSemestersSet);\n\n allSemesters.sort((first, second) => {\n\n let firstSplited = first.split(\"_\");\n\n let secondSplited = second.split(\"_\");\n\n return Number(firstSplited[1]) >= Number(secondSplited[1]) ? 1 : -1;\n\n });\n\n return allSemesters;\n\n };\n\n\n\n getUserBankNames = () => {\n\n if (this.userBankNames.length === 0) {\n\n this.generateUserBanksNames();\n\n }\n\n return this.userBankNames;\n\n };\n\n\n\n getUserGPA = (): number => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n let sum = 0,\n\n credit = 0;\n\n courseList.forEach((course) => {\n\n const courserGrade = course.grade ? Number(course.grade) : null;\n\n if (courserGrade === 0 || !!courserGrade) {\n\n sum += courserGrade * course.course.credit;\n\n credit += course.course.credit;\n\n }\n\n });\n\n if (credit === 0) {\n\n return 0;\n\n }\n\n const avg = sum / credit;\n\n return Math.round((avg + Number.EPSILON) * 100) / 100;\n\n };\n\n\n\n getNumberOfBankComplete = (): number => {\n\n const bankList =\n\n this.userDetails?.degree_status?.course_bank_requirements ?? [];\n\n let count = 0;\n\n bankList.forEach((bank) => {\n\n if (bank.completed) {\n\n count++;\n\n }\n\n });\n\n return count;\n\n };\n\n\n\n private displayContent = (content: string | undefined) => {\n\n if (!content) {\n\n return \"-\";\n\n }\n\n return content.toString();\n\n };\n\n\n\n private generateUserBanksNames = () => {\n\n const userBanksNamesList: string[] = [];\n\n this.userDetails?.degree_status?.course_bank_requirements?.forEach(\n\n (bankReq) => {\n\n userBanksNamesList.push(bankReq.course_bank_name);\n\n }\n\n );\n\n this.userBankNames = userBanksNamesList;\n\n };\n\n\n\n generateRowsForSemester = (\n\n semester: string | null,\n\n courseList: CourseStatus[],\n\n semester0: boolean = false\n\n ) => {\n\n if (!courseList || courseList.length === 0) {\n\n return [];\n\n }\n\n const allSemesterCourses = new Set<CourseStatus>();\n\n courseList.forEach((course) => {\n\n if (\n\n // Normal Semester\n\n (!semester0 && course.semester === semester) ||\n\n // Semester 0\n\n (semester0 &&\n\n course.semester === semester &&\n\n (course.grade === \"פטור ללא ניקוד\" ||\n\n course.grade === \"פטור עם ניקוד\"))\n\n ) {\n\n allSemesterCourses.add(course);\n\n }\n\n });\n\n const rows: RowData[] = [];\n\n allSemesterCourses.forEach((course) =>\n\n rows.push(\n\n createData(\n\n course.course.name,\n\n course.course._id,\n\n course.course.credit,\n\n course.semester,\n\n course.state,\n\n this.displayContent(course.grade),\n\n this.displayContent(course.type),\n\n course.additional_msg\n\n )\n\n )\n\n );\n\n return rows;\n\n };\n\n\n\n generateRowsForBank = (bank: string, courseList: CourseStatus[]) => {\n\n const allSemesterCourses = new Set<CourseStatus>();\n\n courseList?.forEach((course) => {\n\n if (course.type === bank) {\n\n allSemesterCourses.add(course);\n\n }\n\n });\n\n const rows: RowData[] = [];\n\n allSemesterCourses?.forEach((course) =>\n\n rows.push(\n\n createData(\n\n course.course.name,\n\n course.course._id,\n\n course.course.credit,\n\n course.semester,\n\n course.state,\n\n this.displayContent(course.grade),\n\n this.displayContent(course.type),\n\n course.specialization_group_name,\n\n course.additional_msg\n\n )\n\n )\n\n );\n\n\n\n return rows;\n\n };\n\n\n\n updateCourseInUserDetails = (rowData: RowData, semester: string) => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n\n\n const updateCourseRow: CourseStatus = {\n\n course: {\n\n _id: rowData.courseNumber,\n\n credit: +rowData.credit,\n\n name: rowData.name,\n\n },\n\n state: rowData.state as CourseState,\n\n type: rowData.type,\n\n grade: rowData.grade,\n\n semester: semester,\n\n modified: true,\n\n };\n\n\n\n const updatedCourseStatus: CourseStatus[] = courseList.map((courseStatus) =>\n\n courseStatus.course._id === rowData.courseNumber &&\n\n courseStatus.semester === rowData.semester\n\n ? updateCourseRow\n\n : courseStatus\n\n );\n\n\n\n this.userDetails.degree_status.course_statuses = updatedCourseStatus;\n\n this.userDetails.modified = true;\n\n\n\n return this.userDetails;\n\n };\n\n\n\n deleteCourseInUserDetails = (rowData: RowData, semester: string) => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n const idx = courseList.findIndex(\n\n (courseStatus) =>\n\n courseStatus.course._id === rowData.courseNumber &&\n\n courseStatus.semester === rowData.semester\n\n );\n\n const newCourseList = [...courseList];\n\n newCourseList.splice(idx, 1);\n\n\n\n this.userDetails.degree_status.course_statuses = newCourseList;\n\n this.userDetails.modified = true;\n\n\n\n return this.userDetails;\n\n };\n\n\n\n insertCourseInUserDetails = (rowData: RowData, semester: string) => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n\n\n const newCourse: CourseStatus = {\n\n course: {\n\n _id: rowData.courseNumber,\n\n credit: +rowData.credit,\n\n name: rowData.name,\n\n },\n\n state: rowData.state as CourseState,\n\n type: rowData.type,\n\n grade: rowData.grade,\n\n semester: semester,\n\n modified: true,\n\n };\n\n\n\n courseList.push(newCourse);\n\n this.userDetails.degree_status.course_statuses = courseList;\n\n this.userDetails.modified = true;\n\n\n\n return this.userDetails;\n\n };\n\n\n\n deleteSemesterInUserDetails = (semester: string) => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n const newCourseList = [...courseList];\n\n\n\n courseList.forEach((course) => {\n\n if (course.semester === semester) {\n\n const idx = newCourseList.findIndex(\n\n (courseToRemove) => courseToRemove.course._id === course.course._id\n\n );\n\n newCourseList.splice(idx, 1);\n\n }\n\n });\n\n this.userDetails.degree_status.course_statuses = newCourseList;\n\n this.userDetails.modified = true;\n\n\n\n return this.userDetails;\n\n };\n\n\n\n updateIrrelevantCourse = (\n\n course: RowData,\n\n action: \"לא רלוונטי\" | \"לא הושלם\"\n\n ): UserDetails => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n\n\n courseList.forEach((courseListItem, idx) => {\n\n if (\n\n courseListItem.course._id === course.courseNumber &&\n\n courseListItem.semester === course.semester\n\n ) {\n\n courseListItem.state = action;\n\n courseListItem.modified = true;\n\n }\n\n });\n\n\n\n this.userDetails.degree_status.course_statuses = courseList;\n\n this.userDetails.modified = true;\n\n\n\n return this.userDetails;\n\n };\n\n\n\n updateComputeInProgressInUserSettings = (\n\n computeInProgress: boolean\n\n ): UserSettings => {\n\n this.userSettings.compute_in_progress = computeInProgress;\n\n this.userDetails.modified = true;\n\n return this.userSettings;\n\n };\n", "file_path": "packages/sogrim-app/src/stores/DataStore.ts", "rank": 29, "score": 76204.79646183223 }, { "content": " async login(loginRequest: LoginRequest) {\n\n const response = await fetch(`${API_URL}/auth/login`, {\n\n method: \"POST\",\n\n headers: {\n\n \"Content-Type\": \"application/json\",\n\n },\n\n body: JSON.stringify(loginRequest),\n\n });\n\n\n\n const parsedResponse = await response.json();\n\n\n\n if (!response.ok) {\n\n throw new Error(parsedResponse);\n\n }\n\n\n\n return parsedResponse;\n", "file_path": "packages/sogrim-app/src/services/auth.ts", "rank": 30, "score": 75890.57860178663 }, { "content": "export const getAppTheme = (\n\n mode: typeof LIGHT_MODE_THEME | typeof DARK_MODE_THEME\n\n) => {\n\n let theme = createTheme({\n\n palette: {\n\n mode,\n\n primary: {\n\n main: \"#24333c\",\n\n light: \"#24333c\",\n\n },\n\n secondary: {\n\n main: \"#d66563\",\n\n light: \"#f0aeae\",\n\n dark: \"#853f3e\",\n\n },\n\n info: {\n\n main: \"#743ca5\",\n\n light: \"#743ca5\",\n\n dark: \"#743ca5\",\n\n },\n\n },\n\n typography: {\n\n fontFamily: \"Assistant\",\n\n },\n\n direction: \"rtl\",\n\n });\n\n theme = responsiveFontSizes(theme);\n\n return theme;\n", "file_path": "packages/sogrim-app/src/themes/theme.ts", "rank": 31, "score": 75362.46227861488 }, { "content": " constructor(public readonly rootStore: RootStore) {\n\n makeAutoObservable(this, { rootStore: false });\n", "file_path": "packages/sogrim-app/src/stores/DataStore.ts", "rank": 32, "score": 75189.69498580018 }, { "content": "const ErrorToastComp: React.FC<ErrorToastProps> = ({ msg }) => {\n\n const {\n\n uiStore: { setErrorMsg },\n\n } = useStore();\n\n return (\n\n <>\n\n <Snackbar\n\n autoHideDuration={1000}\n\n anchorOrigin={{ vertical: \"bottom\", horizontal: \"right\" }}\n\n open={msg !== \"\"}\n\n >\n\n <Alert\n\n icon={false}\n\n sx={{\n\n width: \"100%\",\n\n }}\n\n severity=\"error\"\n\n onClose={() => setErrorMsg(\"\")}\n\n >\n\n {msg}\n\n </Alert>\n\n </Snackbar>\n\n </>\n\n );\n", "file_path": "packages/sogrim-app/src/components/Toasts/ErrorToast.tsx", "rank": 33, "score": 74250.17458003212 }, { "content": "interface ErrorToastProps {\n\n msg: string;\n", "file_path": "packages/sogrim-app/src/components/Toasts/ErrorToast.tsx", "rank": 34, "score": 74250.17458003212 }, { "content": " get modifiedStatus() {\n\n return (\n\n this.userDetails?.degree_status?.course_statuses?.length > 0 &&\n\n this.userDetails?.modified\n\n );\n", "file_path": "packages/sogrim-app/src/stores/DataStore.ts", "rank": 35, "score": 74202.5894686355 }, { "content": "export const RemoveUserDetails: React.FC<RemoveUserDetailsProps> = ({\n\n handleClose,\n\n}) => {\n\n const { userAuthToken, logout } = useAuth();\n\n const { mutate } = useUpdateUserState(userAuthToken);\n\n\n\n const handleSend = () => {\n\n mutate(emptyUserDetails);\n\n handleClose();\n\n setTimeout(() => {\n\n logout();\n\n }, 300);\n\n };\n\n\n\n return (\n\n <>\n\n <DialogTitle>מחיקת פרטי משתמש</DialogTitle>\n\n <DialogContent>\n\n <DialogContentText>\n\n האם ברוצנך לאפס את פרטי המשתמש? לאחר אישור, העמוד יתרענן ותחזור לתהליך\n\n בחירת הקטלוג וטעינת הקורסים.\n\n </DialogContentText>\n\n </DialogContent>\n\n\n\n <DialogActions>\n\n <Button onClick={handleSend}>כן אני בטוח, מחק</Button>\n\n <Button onClick={handleClose}>בטל</Button>\n\n </DialogActions>\n\n </>\n\n );\n", "file_path": "packages/sogrim-app/src/components/Header/Menu/RemoveUserDetails.tsx", "rank": 36, "score": 72166.54954499003 }, { "content": "export interface RemoveUserDetailsProps {\n\n handleClose: () => void;\n", "file_path": "packages/sogrim-app/src/components/Header/Menu/RemoveUserDetails.tsx", "rank": 37, "score": 71258.00524874705 }, { "content": "fn get_groups_indices(course_id_to_sg_index: &HashMap<CourseId, usize>) -> Vec<usize> {\n\n let mut uniques = HashSet::new();\n\n let mut indices = course_id_to_sg_index\n\n .clone()\n\n .into_values()\n\n .collect::<Vec<_>>();\n\n indices.retain(|e| uniques.insert(*e));\n\n indices\n\n}\n\n\n", "file_path": "packages/server/src/core/bank_rule/specialization_groups.rs", "rank": 38, "score": 69214.19475272154 }, { "content": "const NissoDummyUser: User = {\n\n id: 'afgfd-4234-dfs-43242',\n\n displayName: 'Nisso Ohana',\n\n selectedCatalog: catalogExmaple,\n\n userCourses: userCoursesExample, \n", "file_path": "packages/docs/sogrim-app-mocks.ts", "rank": 39, "score": 67736.20893711565 }, { "content": "const userCoursesExample: UserCourse[] = [\n\n course1,\n\n course2,\n\n course3,\n\n course4 \n", "file_path": "packages/docs/sogrim-app-mocks.ts", "rank": 40, "score": 67728.95940477766 }, { "content": "export const StoreProvider: React.FC<StoreProviderProps> = ({\n\n store,\n\n children,\n", "file_path": "packages/sogrim-app/src/hooks/useStore.tsx", "rank": 41, "score": 66759.9500799905 }, { "content": "export const AuthProvider: React.FC<AuthProviderProps> = ({\n\n authStore,\n\n children,\n\n}) => (\n\n <AuthContext.Provider value={authStore}> {children}</AuthContext.Provider>\n", "file_path": "packages/sogrim-app/src/hooks/useAuth.tsx", "rank": 42, "score": 66759.9500799905 }, { "content": "export const ACCUMULATE_COURSES = \"accumulate courses\";\n", "file_path": "packages/sogrim-app/src/types/data-types.ts", "rank": 43, "score": 66683.47838061312 }, { "content": "export const API_URL =\n\n process.env.NODE_ENV === \"development\"\n\n ? process.env.REACT_APP_API_URL_DEV\n", "file_path": "packages/sogrim-app/src/services/api-url.ts", "rank": 44, "score": 66436.5962725089 }, { "content": "export async function callWithFallback<R>(\n\n call: Promise<R>,\n\n fallback: any\n\n): Promise<R> {\n\n let result: R;\n\n try {\n\n result = (await call) || fallback;\n\n } catch {\n\n result = fallback;\n\n }\n\n\n\n return result;\n", "file_path": "packages/sogrim-app/src/services/api-utils.ts", "rank": 45, "score": 66436.5962725089 }, { "content": "interface AuthProviderProps {\n\n authStore: AuthStore;\n", "file_path": "packages/sogrim-app/src/hooks/useAuth.tsx", "rank": 46, "score": 65634.66301543836 }, { "content": "interface StoreProviderProps {\n\n store: RootStore;\n", "file_path": "packages/sogrim-app/src/hooks/useStore.tsx", "rank": 47, "score": 65634.66301543836 }, { "content": " get userDisplyName(): string {\n\n return this.userDisplay?.given_name;\n", "file_path": "packages/sogrim-app/src/stores/UiStore.ts", "rank": 48, "score": 63311.400037580184 }, { "content": " get userAuthToken() {\n\n if (this.googleSession === GoogleClinetSession.DONE) {\n\n return this.userCredentialResponse.credential;\n\n }\n\n return null;\n", "file_path": "packages/sogrim-app/src/stores/AuthStore.ts", "rank": 49, "score": 63311.400037580184 }, { "content": "export interface RowData {\n\n name: string;\n\n courseNumber: string;\n\n credit: string | number;\n\n state: string;\n\n type?: string;\n\n grade?: string;\n\n semester: string;\n\n sg_name?: string;\n\n msg?: string;\n", "file_path": "packages/sogrim-app/src/components/Pages/SemestersPage/SemesterTabsConsts.ts", "rank": 50, "score": 62403.89257254295 }, { "content": "import { API_URL } from \"./api-url\";\n\n\n\nexport interface LoginRequest {\n\n // TODO\n\n}\n\n\n\nexport class AuthService {\n\n async login(loginRequest: LoginRequest) {\n\n const response = await fetch(`${API_URL}/auth/login`, {\n\n method: \"POST\",\n\n headers: {\n\n \"Content-Type\": \"application/json\",\n\n },\n\n body: JSON.stringify(loginRequest),\n\n });\n\n\n\n const parsedResponse = await response.json();\n\n\n\n if (!response.ok) {\n\n throw new Error(parsedResponse);\n\n }\n\n\n\n return parsedResponse;\n\n }\n\n}\n", "file_path": "packages/sogrim-app/src/services/auth.ts", "rank": 51, "score": 61191.08054641654 }, { "content": "import axios from \"axios\";\n\nimport {\n\n UserDetails,\n\n UserState,\n\n Catalog,\n\n Course,\n\n UserSettings,\n\n} from \"../types/data-types\";\n\nimport { API_URL } from \"./api-url\";\n\n\n\nexport const getCatalogs = async (authToken: any): Promise<Catalog[]> => {\n\n let fallback: any;\n\n let res: any;\n\n try {\n\n res =\n\n (await axios.get(`${API_URL}/catalogs`, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n })) || fallback;\n\n } catch (e) {\n\n res = fallback;\n\n throw e;\n\n }\n\n return res.data;\n\n};\n\n\n\nexport const getCourseByFilter = async (\n\n authToken: any,\n\n filterName: string,\n\n filter: string\n\n): Promise<Course[]> => {\n\n let fallback: any;\n\n let res: any;\n\n if (!filter) {\n\n return [];\n\n }\n\n try {\n\n res =\n\n (await axios.get(`${API_URL}/students/courses?${filterName}=${filter}`, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n })) || fallback;\n\n } catch (e) {\n\n res = fallback;\n\n throw e;\n\n }\n\n return res.data;\n\n};\n\n\n\nexport const putUserCatalog = async (\n\n authToken: any,\n\n userCatalogId: string\n\n): Promise<UserState> => {\n\n const fallback: UserState = {} as UserState;\n\n let res: UserState;\n\n try {\n\n res =\n\n (await axios.put(\n\n `${API_URL}/students/catalog`,\n\n userCatalogId || ({} as UserState),\n\n {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n }\n\n )) || fallback;\n\n } catch {\n\n res = fallback;\n\n }\n\n return res;\n\n};\n\n\n\nexport const postUserUgData = async (\n\n authToken: any,\n\n ugData: string\n\n): Promise<UserState> => {\n\n // const fallback: UserState = {} as UserState;\n\n let res: UserState;\n\n\n\n res = await axios.post(`${API_URL}/students/courses`, ugData, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n });\n\n\n\n return res;\n\n};\n\n\n\nexport const getUserState = async (authToken: any): Promise<UserState> => {\n\n const fallback: UserState = {} as UserState;\n\n let data: UserState;\n\n try {\n\n const res = await axios.get(`${API_URL}/students/login`, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n });\n\n data = res.data || fallback;\n\n } catch (e) {\n\n data = fallback;\n\n throw e;\n\n }\n\n return data;\n\n};\n\n\n\nexport const putUserState = async (\n\n authToken: any,\n\n updatedUserState: UserDetails\n\n): Promise<UserDetails> => {\n\n const fallback: UserDetails = {} as UserDetails;\n\n let res: UserDetails;\n\n try {\n\n res =\n\n (await axios.put(`${API_URL}/students/details`, updatedUserState, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n })) || fallback;\n\n } catch (e) {\n\n res = fallback;\n\n throw e;\n\n }\n\n return res;\n\n};\n\n\n\nexport const getComputeEndGame = async (authToken: any): Promise<UserState> => {\n\n const fallback: UserState = {} as UserState;\n\n let res: UserState;\n\n try {\n\n res =\n\n (await axios.get(`${API_URL}/students/degree-status`, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n })) || fallback;\n\n } catch (e) {\n\n res = fallback;\n\n throw e;\n\n }\n\n return res;\n\n};\n\n\n\nexport const putUserSettings = async (\n\n authToken: any,\n\n updatedUserSettings: UserSettings\n\n): Promise<UserSettings> => {\n\n const fallback: UserSettings = {} as UserSettings;\n\n let res: UserSettings;\n\n try {\n\n res =\n\n (await axios.put(`${API_URL}/students/settings`, updatedUserSettings, {\n\n headers: {\n\n authorization: `${authToken}`,\n\n },\n\n })) || fallback;\n\n } catch (e) {\n\n res = fallback;\n\n throw e;\n\n }\n\n return res;\n\n};\n", "file_path": "packages/sogrim-app/src/services/api.ts", "rank": 52, "score": 61191.08054641654 }, { "content": "export function getComparator<Key extends keyof any>(\n\n order: Order,\n\n orderBy: Key\n\n): (\n\n a: { [key in Key]: number | string },\n\n b: { [key in Key]: number | string }\n\n) => number {\n\n return order === \"desc\"\n\n ? (a, b) => descendingComparator(a, b, orderBy)\n\n : (a, b) => -descendingComparator(a, b, orderBy);\n", "file_path": "packages/sogrim-app/src/components/Pages/SemestersPage/SemesterTable/SemesterTableUtils.ts", "rank": 53, "score": 60475.242519088715 }, { "content": "import { makeAutoObservable } from \"mobx\";\n\nimport { createData } from \"../components/Pages/SemestersPage/SemesterTable/SemesterTableUtils\";\n\nimport { RowData } from \"../components/Pages/SemestersPage/SemesterTabsConsts\";\n\nimport {\n\n CourseStatus,\n\n UserDetails,\n\n CourseState,\n\n UserSettings,\n\n} from \"../types/data-types\";\n\nimport { RootStore } from \"./RootStore\";\n\n\n\nexport class DataStore {\n\n public userDetails: UserDetails = {} as UserDetails;\n\n public userSettings: UserSettings = {} as UserSettings;\n\n public userBankNames: string[] = [];\n\n\n\n constructor(public readonly rootStore: RootStore) {\n\n makeAutoObservable(this, { rootStore: false });\n\n }\n\n\n\n updateStoreUserDetails = (newUserDitails: UserDetails) => {\n\n this.userDetails = newUserDitails;\n\n };\n\n\n\n updateStoreUserSettings = (newUserSettings: UserSettings) => {\n\n this.userSettings = newUserSettings;\n\n };\n\n\n\n get modifiedStatus() {\n\n return (\n\n this.userDetails?.degree_status?.course_statuses?.length > 0 &&\n\n this.userDetails?.modified\n\n );\n\n }\n\n\n\n getAllUserSemesters = (courseList: CourseStatus[]): string[] => {\n\n const allSemestersSet = new Set<string>();\n\n courseList.forEach((course) =>\n\n course.semester ? allSemestersSet.add(course.semester) : null\n\n );\n\n const allSemesters = Array.from(allSemestersSet);\n\n allSemesters.sort((first, second) => {\n\n let firstSplited = first.split(\"_\");\n\n let secondSplited = second.split(\"_\");\n\n return Number(firstSplited[1]) >= Number(secondSplited[1]) ? 1 : -1;\n\n });\n\n return allSemesters;\n\n };\n\n\n\n getUserBankNames = () => {\n\n if (this.userBankNames.length === 0) {\n\n this.generateUserBanksNames();\n\n }\n\n return this.userBankNames;\n\n };\n\n\n\n getUserGPA = (): number => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n let sum = 0,\n\n credit = 0;\n\n courseList.forEach((course) => {\n\n const courserGrade = course.grade ? Number(course.grade) : null;\n\n if (courserGrade === 0 || !!courserGrade) {\n\n sum += courserGrade * course.course.credit;\n\n credit += course.course.credit;\n\n }\n\n });\n\n if (credit === 0) {\n\n return 0;\n\n }\n\n const avg = sum / credit;\n\n return Math.round((avg + Number.EPSILON) * 100) / 100;\n\n };\n\n\n\n getNumberOfBankComplete = (): number => {\n\n const bankList =\n\n this.userDetails?.degree_status?.course_bank_requirements ?? [];\n\n let count = 0;\n\n bankList.forEach((bank) => {\n\n if (bank.completed) {\n\n count++;\n\n }\n\n });\n\n return count;\n\n };\n\n\n\n private displayContent = (content: string | undefined) => {\n\n if (!content) {\n\n return \"-\";\n\n }\n\n return content.toString();\n\n };\n\n\n\n private generateUserBanksNames = () => {\n\n const userBanksNamesList: string[] = [];\n\n this.userDetails?.degree_status?.course_bank_requirements?.forEach(\n\n (bankReq) => {\n\n userBanksNamesList.push(bankReq.course_bank_name);\n\n }\n\n );\n\n this.userBankNames = userBanksNamesList;\n\n };\n\n\n\n generateRowsForSemester = (\n\n semester: string | null,\n\n courseList: CourseStatus[],\n\n semester0: boolean = false\n\n ) => {\n\n if (!courseList || courseList.length === 0) {\n\n return [];\n\n }\n\n const allSemesterCourses = new Set<CourseStatus>();\n\n courseList.forEach((course) => {\n\n if (\n\n // Normal Semester\n\n (!semester0 && course.semester === semester) ||\n\n // Semester 0\n\n (semester0 &&\n\n course.semester === semester &&\n\n (course.grade === \"פטור ללא ניקוד\" ||\n\n course.grade === \"פטור עם ניקוד\"))\n\n ) {\n\n allSemesterCourses.add(course);\n\n }\n\n });\n\n const rows: RowData[] = [];\n\n allSemesterCourses.forEach((course) =>\n\n rows.push(\n\n createData(\n\n course.course.name,\n\n course.course._id,\n\n course.course.credit,\n\n course.semester,\n\n course.state,\n\n this.displayContent(course.grade),\n\n this.displayContent(course.type),\n\n course.additional_msg\n\n )\n\n )\n\n );\n\n return rows;\n\n };\n\n\n\n generateRowsForBank = (bank: string, courseList: CourseStatus[]) => {\n\n const allSemesterCourses = new Set<CourseStatus>();\n\n courseList?.forEach((course) => {\n\n if (course.type === bank) {\n\n allSemesterCourses.add(course);\n\n }\n\n });\n\n const rows: RowData[] = [];\n\n allSemesterCourses?.forEach((course) =>\n\n rows.push(\n\n createData(\n\n course.course.name,\n\n course.course._id,\n\n course.course.credit,\n\n course.semester,\n\n course.state,\n\n this.displayContent(course.grade),\n\n this.displayContent(course.type),\n\n course.specialization_group_name,\n\n course.additional_msg\n\n )\n\n )\n\n );\n\n\n\n return rows;\n\n };\n\n\n\n updateCourseInUserDetails = (rowData: RowData, semester: string) => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n\n\n const updateCourseRow: CourseStatus = {\n\n course: {\n\n _id: rowData.courseNumber,\n\n credit: +rowData.credit,\n\n name: rowData.name,\n\n },\n\n state: rowData.state as CourseState,\n\n type: rowData.type,\n\n grade: rowData.grade,\n\n semester: semester,\n\n modified: true,\n\n };\n\n\n\n const updatedCourseStatus: CourseStatus[] = courseList.map((courseStatus) =>\n\n courseStatus.course._id === rowData.courseNumber &&\n\n courseStatus.semester === rowData.semester\n\n ? updateCourseRow\n\n : courseStatus\n\n );\n\n\n\n this.userDetails.degree_status.course_statuses = updatedCourseStatus;\n\n this.userDetails.modified = true;\n\n\n\n return this.userDetails;\n\n };\n\n\n\n deleteCourseInUserDetails = (rowData: RowData, semester: string) => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n const idx = courseList.findIndex(\n\n (courseStatus) =>\n\n courseStatus.course._id === rowData.courseNumber &&\n\n courseStatus.semester === rowData.semester\n\n );\n\n const newCourseList = [...courseList];\n\n newCourseList.splice(idx, 1);\n\n\n\n this.userDetails.degree_status.course_statuses = newCourseList;\n\n this.userDetails.modified = true;\n\n\n\n return this.userDetails;\n\n };\n\n\n\n insertCourseInUserDetails = (rowData: RowData, semester: string) => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n\n\n const newCourse: CourseStatus = {\n\n course: {\n\n _id: rowData.courseNumber,\n\n credit: +rowData.credit,\n\n name: rowData.name,\n\n },\n\n state: rowData.state as CourseState,\n\n type: rowData.type,\n\n grade: rowData.grade,\n\n semester: semester,\n\n modified: true,\n\n };\n\n\n\n courseList.push(newCourse);\n\n this.userDetails.degree_status.course_statuses = courseList;\n\n this.userDetails.modified = true;\n\n\n\n return this.userDetails;\n\n };\n\n\n\n deleteSemesterInUserDetails = (semester: string) => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n const newCourseList = [...courseList];\n\n\n\n courseList.forEach((course) => {\n\n if (course.semester === semester) {\n\n const idx = newCourseList.findIndex(\n\n (courseToRemove) => courseToRemove.course._id === course.course._id\n\n );\n\n newCourseList.splice(idx, 1);\n\n }\n\n });\n\n this.userDetails.degree_status.course_statuses = newCourseList;\n\n this.userDetails.modified = true;\n\n\n\n return this.userDetails;\n\n };\n\n\n\n updateIrrelevantCourse = (\n\n course: RowData,\n\n action: \"לא רלוונטי\" | \"לא הושלם\"\n\n ): UserDetails => {\n\n const courseList = this.userDetails?.degree_status.course_statuses ?? [];\n\n\n\n courseList.forEach((courseListItem, idx) => {\n\n if (\n\n courseListItem.course._id === course.courseNumber &&\n\n courseListItem.semester === course.semester\n\n ) {\n\n courseListItem.state = action;\n\n courseListItem.modified = true;\n\n }\n\n });\n\n\n\n this.userDetails.degree_status.course_statuses = courseList;\n\n this.userDetails.modified = true;\n\n\n\n return this.userDetails;\n\n };\n\n\n\n updateComputeInProgressInUserSettings = (\n\n computeInProgress: boolean\n\n ): UserSettings => {\n\n this.userSettings.compute_in_progress = computeInProgress;\n\n this.userDetails.modified = true;\n\n return this.userSettings;\n\n };\n\n}\n", "file_path": "packages/sogrim-app/src/stores/DataStore.ts", "rank": 54, "score": 60463.68537200728 }, { "content": "export const createData = (\n\n name: string,\n\n courseNumber: string,\n\n credit: number,\n\n semester: string,\n\n state: string,\n\n grade?: string,\n\n type?: string,\n\n sg_name?: string,\n\n msg?: string\n\n): RowData => {\n\n return {\n\n name,\n\n courseNumber,\n\n credit,\n\n grade,\n\n type,\n\n state,\n\n semester,\n\n sg_name,\n\n msg,\n\n };\n", "file_path": "packages/sogrim-app/src/components/Pages/SemestersPage/SemesterTable/SemesterTableUtils.ts", "rank": 55, "score": 60463.68537200728 }, { "content": "export const courseFromUserValidations = (\n\n course: RowData,\n\n semesterRows: RowData[]\n\n): courseFromUserValidationsValue => {\n\n if (!validCourseNumber(course.courseNumber)) {\n\n return {\n\n error: true,\n\n newRowData: emptyRow,\n\n msg: \"מספר הקורס שהכנסת לא תקין. מספר קורס מכיל 6 ספרות בלבד.\",\n\n };\n\n }\n\n if (!validCourseCredit(course.credit)) {\n\n return {\n\n error: true,\n\n newRowData: emptyRow,\n\n msg: \"נק״ז חייב להיות מספר גדול שווה מאפס, ובקפיצות של 0.5\",\n\n };\n\n }\n\n if (!validGrade(course.grade)) {\n\n return {\n\n error: true,\n\n newRowData: emptyRow,\n\n msg: \"ציון חייב להיות מספר בין 0 ל-100 או אחת מהאופציות הלא מספריות (עבר, נכשל, לא השלים)\",\n\n };\n\n }\n\n\n\n let newGrade = course.grade === \"\" ? undefined : course.grade;\n\n let newState = newGrade ? \"לא הושלם\" : \"בתהליך\"; // The hebrew flips the conditions\n\n if (\n\n course.grade &&\n\n (course.grade === \"עבר\" ||\n\n course.grade === \"פטור ללא ניקוד\" ||\n\n course.grade === \"פטור עם ניקוד\" ||\n\n +course.grade >= 55)\n\n ) {\n\n newState = \"הושלם\";\n\n }\n\n const fixedNewRowData: RowData = {\n\n name: course.name,\n\n courseNumber: course.courseNumber,\n\n semester: course.semester,\n\n credit: course.credit,\n\n state: newState,\n\n type: course.type,\n\n grade: newGrade,\n\n };\n\n\n\n return {\n\n error: false,\n\n newRowData: fixedNewRowData,\n\n msg: \"\",\n\n };\n", "file_path": "packages/sogrim-app/src/components/Pages/SemestersPage/SemesterTable/CourseValidator.ts", "rank": 56, "score": 60358.766095247665 }, { "content": "export const courseFromUserValidations = (\n\n course: RowData,\n\n semesterRows: RowData[],\n\n newFlag: boolean = false\n\n): courseFromUserValidationsValue => {\n\n if (!validCourseNumber(course.courseNumber, semesterRows, newFlag)) {\n\n return {\n\n error: true,\n\n newRowData: emptyRow,\n\n msg: \" מספר הקורס שהוזן אינו תקין. מס׳ קורס חייב להכיל 6 ספרות בלבד, וכן אי אפשר לקחת פעמיים קורס באותו הסמסטר.\",\n\n };\n\n }\n\n if (!validCourseCredit(course.credit)) {\n\n return {\n\n error: true,\n\n newRowData: emptyRow,\n\n msg: \"נק״ז חייב להיות מספר גדול שווה מאפס, ובקפיצות של 0.5\",\n\n };\n\n }\n\n if (!validGrade(course.grade)) {\n\n return {\n\n error: true,\n\n newRowData: emptyRow,\n\n msg: \"ציון חייב להיות מספר בין 0 ל-100 או אחת מהאופציות הלא מספריות (עבר, נכשל, לא השלים)\",\n\n };\n\n }\n\n\n\n let newType =\n\n course.type === \"\" || course.type === \"-\" ? undefined : course.type;\n\n let newGrade =\n\n course.grade === \"\" || course.grade === \"-\" ? undefined : course.grade;\n\n let newState = newGrade ? \"לא הושלם\" : \"בתהליך\"; // The hebrew flips the conditions\n\n if (\n\n course.grade &&\n\n (course.grade === \"עבר\" ||\n\n course.grade === \"פטור ללא ניקוד\" ||\n\n course.grade === \"פטור עם ניקוד\" ||\n\n +course.grade >= 55)\n\n ) {\n\n newState = \"הושלם\";\n\n }\n\n const fixedNewRowData: RowData = {\n\n name: course.name,\n\n courseNumber: course.courseNumber,\n\n semester: course.semester,\n\n credit: course.credit,\n\n state: newState,\n\n type: newType,\n\n grade: newGrade,\n\n };\n\n\n\n return {\n\n error: false,\n\n newRowData: fixedNewRowData,\n\n msg: \"\",\n\n };\n", "file_path": "packages/sogrim-app/src/components/Pages/SemestersPage/SemesterGrid/course-validator.ts", "rank": 57, "score": 60358.766095247665 }, { "content": "export async function callWithFallback<R>(\n\n call: Promise<R>,\n\n fallback: any\n\n): Promise<R> {\n\n let result: R;\n\n try {\n\n result = (await call) || fallback;\n\n } catch {\n\n result = fallback;\n\n }\n\n\n\n return result;\n\n}\n", "file_path": "packages/sogrim-app/src/services/api-utils.ts", "rank": 58, "score": 60239.83079106912 }, { "content": "export const API_URL =\n\n process.env.NODE_ENV === \"development\"\n\n ? process.env.REACT_APP_API_URL_DEV\n\n : process.env.REACT_APP_API_URL_REL;\n", "file_path": "packages/sogrim-app/src/services/api-url.ts", "rank": 59, "score": 60239.83079106912 }, { "content": "// jest-dom adds custom jest matchers for asserting on DOM nodes.\n\n// allows you to do things like:\n\n// expect(element).toHaveTextContent(/react/i)\n\n// learn more: https://github.com/testing-library/jest-dom\n\nimport \"@testing-library/jest-dom\";\n", "file_path": "packages/sogrim-app/src/setupTests.ts", "rank": 60, "score": 60187.154585809556 }, { "content": "export interface courseFromUserValidationsValue {\n\n error: boolean;\n\n newRowData: RowData;\n\n msg: string;\n", "file_path": "packages/sogrim-app/src/components/Pages/SemestersPage/SemesterTable/CourseValidator.ts", "rank": 61, "score": 59434.81842218227 }, { "content": "export interface courseFromUserValidationsValue {\n\n error: boolean;\n\n newRowData: RowData;\n\n msg: string;\n", "file_path": "packages/sogrim-app/src/components/Pages/SemestersPage/SemesterGrid/course-validator.ts", "rank": 62, "score": 59434.81842218227 }, { "content": "// generates all subsets of size specialization_groups.groups_number and checks if one of them is fulfilled\n\nfn generate_sgs_subsets(\n\n sgs: &[SpecializationGroup],\n\n required_number_of_groups: usize,\n\n sg_index: usize,\n\n groups_indices: &mut Vec<usize>,\n\n courses: &[CourseId],\n\n best_match: &mut HashMap<CourseId, usize>,\n\n) -> Option<HashMap<CourseId, usize>> {\n\n if groups_indices.len() == required_number_of_groups {\n\n return get_sgs_courses_assignment(sgs, groups_indices, courses, best_match);\n\n }\n\n\n\n if sg_index >= sgs.len() {\n\n return None;\n\n }\n\n\n\n // current group is included\n\n groups_indices.push(sg_index);\n\n if let Some(valid_assignment) = generate_sgs_subsets(\n\n sgs,\n", "file_path": "packages/server/src/core/bank_rule/specialization_groups.rs", "rank": 63, "score": 57439.32736992248 }, { "content": "fn run_exhaustive_search(\n\n sgs: &SpecializationGroups,\n\n courses: Vec<CourseId>, // list of all courses the user completed in specialization groups bank\n\n) -> HashMap<CourseId, usize> {\n\n let mut best_match = HashMap::new();\n\n generate_sgs_subsets(\n\n &sgs.groups_list,\n\n sgs.groups_number,\n\n 0,\n\n &mut Vec::new(),\n\n &courses,\n\n &mut best_match,\n\n )\n\n .or(Some(best_match))\n\n .unwrap() // unwraping is safe since the line above always returns Some(_)\n\n}\n\n\n\nimpl<'a> BankRuleHandler<'a> {\n\n pub fn specialization_group(\n\n mut self,\n", "file_path": "packages/server/src/core/bank_rule/specialization_groups.rs", "rank": 64, "score": 57439.32736992248 }, { "content": "// This function is looking for a valid assignment for the courses which fulfill the sgs requirements\n\n// If an assignment is found it returns it, None otherwise.\n\nfn find_valid_assignment_for_courses(\n\n sgs: &[SpecializationGroup],\n\n groups_indices: &[usize],\n\n optional_sgs_for_course: &HashMap<CourseId, Vec<usize>>, // list of all optional sgs for each course\n\n current_best_match: &mut HashMap<CourseId, usize>, // the best match of sgs\n\n course_id_to_sg_index: &mut HashMap<CourseId, usize>,\n\n course_index: usize, // course_index-th element in optional_sgs_for_course\n\n) -> Option<HashMap<CourseId, usize>> {\n\n if course_index >= optional_sgs_for_course.len() {\n\n let complete_sgs_indices = get_complete_sgs_indices(sgs, course_id_to_sg_index);\n\n if complete_sgs_indices.len() >= groups_indices.len() {\n\n return Some(course_id_to_sg_index.clone());\n\n }\n\n let complete_sgs_for_current_best_match = get_complete_sgs_indices(sgs, current_best_match);\n\n if complete_sgs_indices.len() > complete_sgs_for_current_best_match.len() {\n\n current_best_match.clear();\n\n current_best_match.extend(course_id_to_sg_index.to_owned());\n\n }\n\n return None;\n\n }\n", "file_path": "packages/server/src/core/bank_rule/specialization_groups.rs", "rank": 65, "score": 56815.566868762995 }, { "content": "pub fn set_order(\n\n course_banks: &[CourseBank],\n\n credit_overflow_rules: &[CreditOverflow],\n\n) -> Vec<CourseBank> {\n\n let mut names_to_indices = HashMap::new();\n\n let mut indices_to_names = HashMap::new();\n\n let mut g = Graph::<String, ()>::new();\n\n for course_bank in course_banks {\n\n let node_idx = g.add_node(course_bank.name.clone());\n\n names_to_indices.insert(course_bank.name.clone(), node_idx);\n\n indices_to_names.insert(node_idx, course_bank.name.clone());\n\n }\n\n for credit_rule in credit_overflow_rules {\n\n g.add_edge(\n\n names_to_indices[&credit_rule.from],\n\n names_to_indices[&credit_rule.to],\n\n (),\n\n );\n\n }\n\n let order = toposort(&g, None).unwrap();\n", "file_path": "packages/server/src/core/toposort.rs", "rank": 66, "score": 55352.19300292432 }, { "content": "pub fn init_env_logger() {\n\n env_logger::Builder::from_env(env_logger::Env::new().default_filter_or(\"info\"))\n\n .format_timestamp(None)\n\n .init();\n\n}\n", "file_path": "packages/server/src/logger.rs", "rank": 67, "score": 55352.19300292432 }, { "content": "pub fn cannot_find_course() -> String {\n\n \"שגיאה - קורס לא נמצא\".to_string()\n\n}\n", "file_path": "packages/server/src/core/messages.rs", "rank": 68, "score": 51863.00069911193 }, { "content": "pub fn init_actix_logger() -> Logger {\n\n Logger::new(\n\n format!(\n\n \"{} | {} | {} seconds\",\n\n \"%r\".yellow(),\n\n \"%s\".bold().magenta(),\n\n \"%T\".cyan()\n\n )\n\n .as_str(),\n\n )\n\n .log_target(\"sogrim_server\")\n\n}\n", "file_path": "packages/server/src/logger.rs", "rank": 69, "score": 51863.00069911193 }, { "content": "pub fn cors() -> actix_cors::Cors {\n\n let cors = Cors::default()\n\n .allowed_methods(vec![\"GET\", \"POST\", \"PUT\", \"DELETE\"])\n\n .allowed_headers(vec![header::AUTHORIZATION, header::CONTENT_TYPE]);\n\n if CONFIG.profile == \"debug\" {\n\n cors.allowed_origin_fn(|origin, _req_head| {\n\n origin.as_bytes().starts_with(b\"http://localhost\")\n\n })\n\n } else {\n\n cors.allowed_origin(\"https://sogrim.org\")\n\n .allowed_origin(\"https://students.sogrim.org\")\n\n }\n\n}\n", "file_path": "packages/server/src/cors.rs", "rank": 70, "score": 49843.96030481332 }, { "content": "import {Catalog, CourseState, SemesterTerm, User, UserCourse} from './sogrim-app-types';\n\n\n\n\n\nconst catalogExmaple: Catalog = {\n\n displayName: '2018-2019 תלת שנתי',\n\n id: '1234-ss-aa-1234',\n\n maslul: '3 years',\n\n yearOfPublish: '2018-2019', \n\n}\n\n/*\n\nGet All Catalogs Example:\n\nShould return ***ALL** valid catalogs.\n\n*/\n\nconst catalogsInit: Catalog[] = [\n\n catalogExmaple,\n\n {\n\n displayName: '2019-2020 תלת שנתי',\n\n id: '12das4-ss-aa-1234',\n\n maslul: '4 yers',\n\n yearOfPublish: '2019-2020', \n\n },\n\n {\n\n displayName: '2019-2020 ביו-אינפורמטיקה',\n\n id: '12das4-s321-asdaa-1234',\n\n maslul: '4 years bioinfo',\n\n yearOfPublish: '2019-2020', \n\n }\n\n]\n\n\n\nconst course1: UserCourse = {\n\n id: '103006', // real course number (this is not the real one for infi...)\n\n displayName: 'חשבון אינפיטיסימלי 1מ׳',\n\n points: 5.5,\n\n courseType: 'CSMandatory', // course types comes as string from server.\n\n TakenInSemester: {\n\n yeaer: '2018-2019',\n\n term: SemesterTerm.Fall,\n\n },\n\n grade: 56, // optional\n\n status: CourseState.Completed,\n\n editedByUser: false, // Means that data comes from Parsing gilion zionim... \n\n placeHolder: false, // Means that a real course. \n\n}\n\n\n\nconst course2: UserCourse = {\n\n id: '263263',\n\n displayName: 'פרוייקט בבינה מלאכותית',\n\n points: 3,\n\n courseType: 'CSProject', // course types comes as string from server.\n\n TakenInSemester: { // user can set futer semeter - for planning\n\n yeaer: '2022-2023',\n\n term: SemesterTerm.Spring,\n\n },\n\n status: CourseState.Planning, // \n\n editedByUser: true, // Means that the user insert this course and \n\n placeHolder: false, // Means that a real course. \n\n}\n\n\n\n// דוגמא די מורכבת על איך אני אמור לקבל ״מידע חסר״ עבור שרשראות מסוימות. נדבר על זה בזום.\n\nconst course3: UserCourse = {\n\n id: 'cs-sience-mandatory-000000', // special id for placeholer for each type and category!\n\n displayName: 'שרשרת מדעית',\n\n points: 8,\n\n courseType: 'CSSience', // course types comes as string from server.\n\n TakenInSemester: { // with the recomandded semster - if placeholders.\n\n yeaer: '2021-2022',\n\n term: SemesterTerm.Spring,\n\n },\n\n status: CourseState.UnCompleted, // Not done yet. will show the user that he neet to do this...\n\n editedByUser: false, \n\n placeHolder: true, // Means that the server send it to the clinet, for indicate that user have uncomplete courses.\n\n extraDetails: { // נדבר על זה בזום. זה אובייקט גנרי עבור גמישות של שפה משותפת.\n\n text: 'שרשרת מדעית זה חשוב, צריך ככה וככה וככה וככה וכו׳ וכו׳',\n\n extraDetailsType: 'CSSienceExtraDetails',\n\n payLoadJson: {} as JSON, // יאפשר לנו גמישות בשליחת מידע עבור כל מיני שרשראות ואופציות בחירה מעניינות. שפה משותפת \n\n }\n\n}\n\n\n\nconst course4: UserCourse = {\n\n id: '236512', // special id for placeholer for each type and category!\n\n displayName: 'תורת הקומפליציה',\n\n points: 3,\n\n courseType: 'CSMandatory', // course types comes as string from server.\n\n TakenInSemester: { // with the recomandded semster - if placeholders.\n\n yeaer: '2021-2022',\n\n term: SemesterTerm.Fall,\n\n },\n\n status: CourseState.UnCompleted, // Not done yet. will show the user that he neet to do this...\n\n editedByUser: false, \n\n placeHolder: false, \n\n}\n\n\n\n// חשוב רק להבהיר - שבכל קריאה לשרת, ירדו רשימת כלל הקורסים - אלו שבוצעו, ואלו שלא בוצעו. את הפליטור אני עושה בצד הלקוח.\n\n// כלומר, סכום הנקודות של כל הקורסים באובייקט זה צריך להיות עבור כל הקטלוג.\n\n// נדבר על זה בזום!\n\nconst userCoursesExample: UserCourse[] = [\n\n course1,\n\n course2,\n\n course3,\n\n course4 \n\n]\n\n\n\n/*\n\nGet User State Example 1:\n\n*/\n\nconst NissoDummyUser: User = {\n\n id: 'afgfd-4234-dfs-43242',\n\n displayName: 'Nisso Ohana',\n\n selectedCatalog: catalogExmaple,\n\n userCourses: userCoursesExample, \n\n}\n\n\n", "file_path": "packages/docs/sogrim-app-mocks.ts", "rank": 71, "score": 47506.14265422645 }, { "content": "export enum CourseState {\n\n Completed = \"Completed\",\n\n UnCompleted = \"UnCompleted\",\n\n Planning = \"Planing\", \n\n}\n\n\n\nexport enum SemesterTerm {\n\n Fall = 'Fall',\n\n Spring = 'Spring',\n\n Summer = 'Summer',\n\n}\n\n\n\nexport type Catalog = {\n\n id: string;\n\n displayName: string;\n\n maslul: string;\n\n yearOfPublish: string; \n\n}\n\n\n\nexport type Semester = {\n\n yeaer: string;\n\n term: SemesterTerm;\n\n}\n\n\n\nexport type extraDetails = {\n\n text: string;\n\n extraDetailsType: string;\n\n payLoadJson?: JSON;\n\n}\n\nexport type UserCourse = {\n\n id: string;\n\n displayName: string;\n\n points: number;\n\n grade?: number\n\n TakenInSemester: Semester;\n\n status: CourseState; \n\n courseType: string; \n\n editedByUser: boolean;\n\n placeHolder: boolean; \n\n extraDetails?: extraDetails;\n\n}\n\n\n\nexport type User = {\n\n id: string;\n\n displayName: string;\n\n avater?: any;\n\n selectedCatalog: Catalog;\n\n userCourses: UserCourse[]; \n\n\n", "file_path": "packages/docs/sogrim-app-types.ts", "rank": 72, "score": 47506.14265422645 }, { "content": "const AppComp: React.FC = () => {\n\n const [mode] = useState<typeof LIGHT_MODE_THEME | typeof DARK_MODE_THEME>(\n\n LIGHT_MODE_THEME\n\n );\n\n const theme = useMemo(() => getAppTheme(mode), [mode]);\n\n\n\n const matches = useMediaQuery(theme.breakpoints.up(\"sm\"));\n\n\n\n const {\n\n uiStore: { setUserDisplay, goToMainPage },\n\n } = useStore();\n\n const { isAuthenticated, googleSession, userAuthToken } = useAuth();\n\n\n\n useEffect(() => {\n\n if (!isMobile || !matches) {\n\n if (googleSession === GoogleClientSession.DONE) {\n\n goToMainPage();\n\n if (userAuthToken) {\n\n setUserDisplay(jwtDecode(userAuthToken));\n\n }\n\n }\n\n }\n\n // TODO check\n\n // eslint-disable-next-line react-hooks/exhaustive-deps\n\n }, [isAuthenticated, googleSession, userAuthToken, setUserDisplay]);\n\n\n\n // Create rtl cache\n\n const cacheRtl = createCache({\n\n key: \"muirtl\",\n\n stylisPlugins: [rtlPlugin],\n\n });\n\n\n\n return (\n\n <ThemeProvider theme={theme}>\n\n <CacheProvider value={cacheRtl}>\n\n {isMobile || !matches ? (\n\n <MobilePage />\n\n ) : (\n\n <>\n\n <GoogleAuth />\n\n {isAuthenticated ? <UserApp /> : <AnonymousApp />}\n\n <Footer />\n\n </>\n\n )}\n\n </CacheProvider>\n\n </ThemeProvider>\n\n );\n", "file_path": "packages/sogrim-app/src/components/App/App.tsx", "rank": 73, "score": 46826.22903423005 }, { "content": "pub fn common_replacements_msg(name: &str) -> String {\n\n format!(\n\n \"הנחנו כי קורס זה מחליף את הקורס {} בעקבות החלפות נפוצות. שימו לב כי נדרש אישור מהרכזות בשביל החלפה זו\",\n\n name\n\n )\n\n}\n\n\n", "file_path": "packages/server/src/core/messages.rs", "rank": 74, "score": 46365.61681682117 }, { "content": "pub fn catalog_replacements_msg(name: &str) -> String {\n\n format!(\"קורס זה מחליף את הקורס {}\", name)\n\n}\n\n\n", "file_path": "packages/server/src/core/messages.rs", "rank": 75, "score": 46365.61681682117 }, { "content": "pub fn credit_leftovers_msg(credit: f32) -> String {\n\n if credit == ZERO {\n\n \"אין לך נקודות עודפות\".to_string()\n\n } else if credit == SINGLE {\n\n \"יש לך נקודה עודפת אחת\".to_string()\n\n } else if credit == HALF {\n\n \"יש לך חצי נקודה עודפת\".to_string()\n\n } else {\n\n format!(\"יש לך {} נקודות עודפות\", credit)\n\n }\n\n}\n\n\n", "file_path": "packages/server/src/core/messages.rs", "rank": 76, "score": 46365.61681682117 }, { "content": "pub fn completed_chain_msg(chain: &[String]) -> String {\n\n let mut msg = \"השלמת את השרשרת: \".to_string();\n\n for course in chain {\n\n if course == chain.last().unwrap() {\n\n msg += course;\n\n } else {\n\n msg += &format!(\"{}, \", course);\n\n }\n\n }\n\n msg\n\n}\n\n\n", "file_path": "packages/server/src/core/messages.rs", "rank": 77, "score": 46365.61681682117 }, { "content": "const AnonymousAppComp: React.FC = () => {\n\n const [mode] = useState<typeof LIGHT_MODE_THEME | typeof DARK_MODE_THEME>(\n\n LIGHT_MODE_THEME\n\n );\n\n const theme = useMemo(() => getAppTheme(mode), [mode]);\n\n\n\n const {\n\n uiStore: { currentPage },\n\n } = useStore();\n\n return (\n\n <ThemeProvider theme={theme}>\n\n <BannerAnonymous />\n\n {currentPage === PageState.FAQ ? (\n\n <FAQPage />\n\n ) : (\n\n <Box sx={{ m: 2, display: \"flex\", justifyContent: \"center\" }}>\n\n <LandingPageSvg />\n\n </Box>\n\n )}\n\n </ThemeProvider>\n\n );\n", "file_path": "packages/sogrim-app/src/components/App/AnonymousApp.tsx", "rank": 78, "score": 46242.042216421076 }, { "content": "export const AppTitle: React.FC = () => {\n\n return (\n\n <Typography variant=\"h4\" noWrap sx={sxAppTitle}>\n\n {\"סוגרים\"}\n\n </Typography>\n\n );\n", "file_path": "packages/sogrim-app/src/components/Header/AppTitle/AppTitle.tsx", "rank": 79, "score": 45955.380997486536 }, { "content": "const AppPagesComp: React.FC = () => {\n\n const {\n\n uiStore: { userRegistrationState, computeUserRegistrationState },\n\n } = useStore();\n\n\n\n const { userAuthToken } = useAuth();\n\n const { data, isLoading, refetch } = useUserState(userAuthToken);\n\n\n\n React.useEffect(() => {\n\n const refreshStepper = async () => {\n\n if (data && !isLoading) {\n\n const { data: newData } = await refetch();\n\n if (newData) {\n\n computeUserRegistrationState(newData.details);\n\n }\n\n }\n\n };\n\n refreshStepper();\n\n }, [\n\n computeUserRegistrationState,\n\n data,\n\n isLoading,\n\n refetch,\n\n userRegistrationState,\n\n ]);\n\n\n\n return (\n\n <Box sx={sxPages}>\n\n {userRegistrationState === UserRegistrationState.Loading ? (\n\n <CircularProgress />\n\n ) : userRegistrationState === UserRegistrationState.Ready ? (\n\n <PagesTabs />\n\n ) : (\n\n <Box sx={{ flexDirection: \"column\" }}>\n\n <AppStepper />\n\n <Disclaimer />\n\n </Box>\n\n )}\n\n </Box>\n\n );\n", "file_path": "packages/sogrim-app/src/components/AppPages/AppPages.tsx", "rank": 80, "score": 45955.380997486536 }, { "content": "const AppStepperComp: React.FC = () => {\n\n const [coursesModalOpen, coursesModalsetOpen] = React.useState(false);\n\n const [catalogsModalOpen, catalogsModalsetOpen] = React.useState(false);\n\n const [triggerCompute, setTriggerCompute] = React.useState(false);\n\n const [skipLoading, setSkipLoading] = React.useState(false);\n\n\n\n const [activeStep, setActiveStep] = React.useState<number>(0);\n\n const {\n\n uiStore: { computeUserRegistrationState, setErrorMsg, errorMsg },\n\n } = useStore();\n\n\n\n const { userAuthToken } = useAuth();\n\n const { data, isLoading, refetch } = useUserState(userAuthToken);\n\n const {\n\n data: tcData,\n\n isLoading: tcIsLoading,\n\n isError: tcIsError,\n\n } = useComputeEndGame(userAuthToken, triggerCompute);\n\n\n\n React.useEffect(() => {\n\n let unmounted = false;\n\n const refreshStepper = async () => {\n\n if (\n\n !unmounted &&\n\n data &&\n\n !isLoading &&\n\n (!coursesModalOpen || !catalogsModalOpen)\n\n ) {\n\n const { data: newData } = await refetch();\n\n if (newData) {\n\n const rs = computeUserRegistrationState(newData.details);\n\n setActiveStep(rs);\n\n }\n\n }\n\n };\n\n if (!unmounted) {\n\n refreshStepper();\n\n }\n\n return () => {\n\n unmounted = true;\n\n };\n\n }, [\n\n coursesModalOpen,\n\n catalogsModalOpen,\n\n data,\n\n setActiveStep,\n\n computeUserRegistrationState,\n\n refetch,\n\n isLoading,\n\n ]);\n\n\n\n React.useEffect(() => {\n\n let unmounted = false;\n\n\n\n if (!unmounted && tcIsError) {\n\n // TODO: error state\n\n } else if (!unmounted && tcData && !tcIsLoading) {\n\n setTriggerCompute(false);\n\n }\n\n return () => {\n\n unmounted = true;\n\n };\n\n }, [tcData, tcIsLoading, tcIsError]);\n\n\n\n const coursesHandleClickOpen = () => {\n\n coursesModalsetOpen(true);\n\n };\n\n\n\n const coursesHandleClose = () => {\n\n coursesModalsetOpen(false);\n\n };\n\n\n\n const coursesHandleSkip = () => {\n\n coursesModalsetOpen(false);\n\n setActiveStep(3);\n\n setTriggerCompute(true);\n\n setSkipLoading(true);\n\n };\n\n\n\n const catalogsHandleClickOpen = () => {\n\n catalogsModalsetOpen(true);\n\n };\n\n\n\n const catalogsHandleClose = () => {\n\n catalogsModalsetOpen(false);\n\n };\n\n\n\n const handleTriggerCompute = () => {\n\n setTriggerCompute(true);\n\n };\n\n\n\n const handleOnClick = async (index: number) => {\n\n if (index === 0) {\n\n catalogsHandleClickOpen();\n\n } else if (index === 1) {\n\n coursesHandleClickOpen();\n\n } else if (index === 2) {\n\n handleTriggerCompute();\n\n }\n\n };\n\n\n\n const handleBack = () => {\n\n setActiveStep((prevActiveStep) => prevActiveStep - 1);\n\n };\n\n\n\n const handleError = (msg: string) => {\n\n setErrorMsg(msg);\n\n };\n\n\n\n return (\n\n <>\n\n {!skipLoading ? (\n\n <Box sx={{ minWidth: 400, marginTop: \"20px\" }}>\n\n <Stepper activeStep={activeStep} orientation=\"vertical\">\n\n {steps.map((step, index) => (\n\n <Step key={step.label}>\n\n <StepLabel\n\n color=\"white\"\n\n optional={\n\n index === 2 ? (\n\n <Typography color=\"white\" variant=\"caption\">\n\n Last step\n\n </Typography>\n\n ) : null\n\n }\n\n >\n\n <Typography variant=\"h4\">{step.label}</Typography>\n\n </StepLabel>\n\n <StepContent>\n\n <Typography color=\"white\">{step.description}</Typography>\n\n <Box sx={{ mb: 2 }}>\n\n <Box>\n\n <Button\n\n variant=\"contained\"\n\n onClick={() => handleOnClick(index)}\n\n sx={{ mt: 1, msScrollLimitXMin: 1 }}\n\n >\n\n {step.label}\n\n </Button>\n\n <Button\n\n disabled={index === 0}\n\n onClick={handleBack}\n\n sx={{ mt: 1, ml: 1 }}\n\n >\n\n חזור\n\n </Button>\n\n </Box>\n\n </Box>\n\n </StepContent>\n\n </Step>\n\n ))}\n\n </Stepper>\n\n <FormModal\n\n dialogContent={\n\n <ImportGilion\n\n handleSkip={coursesHandleSkip}\n\n handleClose={coursesHandleClose}\n\n handleError={handleError}\n\n />\n\n }\n\n handleClose={coursesHandleClose}\n\n open={coursesModalOpen}\n\n />\n\n <FormModal\n\n dialogContent={<SelectCatalog handleClose={catalogsHandleClose} />}\n\n handleClose={catalogsHandleClose}\n\n open={catalogsModalOpen}\n\n />\n\n <ErrorToast msg={errorMsg} />\n\n </Box>\n\n ) : (\n\n <CircularProgress />\n\n )}\n\n </>\n\n );\n", "file_path": "packages/sogrim-app/src/components/Banner/AppStepper.tsx", "rank": 81, "score": 43784.11887980904 }, { "content": "pub fn credit_overflow_detailed_msg(from: &str, to: &str) -> String {\n\n format!(\"הנקודות שבוצעו ב{} נספרות תחת {}\", from, to)\n\n}\n\n\n", "file_path": "packages/server/src/core/messages.rs", "rank": 82, "score": 43599.222991248185 }, { "content": "pub fn completed_specialization_groups_msg(groups: &[String], needed: usize) -> String {\n\n let mut msg = if groups.len() == ZERO as usize {\n\n \"לא השלמת אף קבוצת התמחות\".to_string()\n\n } else if groups.len() == SINGLE as usize {\n\n format!(\"השלמת קבוצת התמחות אחת (מתוך {}): \", needed)\n\n } else {\n\n format!(\"השלמת {} (מתוך {}) קבוצות התמחות: \", groups.len(), needed)\n\n };\n\n for group in groups {\n\n if group == groups.last().unwrap() {\n\n msg += group;\n\n } else {\n\n msg += &format!(\"{}, \", group);\n\n }\n\n }\n\n msg\n\n}\n\n\n", "file_path": "packages/server/src/core/messages.rs", "rank": 83, "score": 42448.758320508925 }, { "content": "pub fn missing_credit_msg(overflow: f32, from: &str, to: &str) -> String {\n\n if overflow == SINGLE {\n\n format!(\n\n \"סך הנקודות של הקורסים שלקחת ב{} נמוך מהדרישה המקורית, לכן נקודה אחת התווספה לדרישה של {}\",\n\n from, to\n\n )\n\n } else {\n\n format!(\n\n \"סך הנקודות של הקורסים שלקחת ב{} נמוך מהדרישה המקורית, לכן {} נקודות התווספו לדרישה של {}\",\n\n from, overflow, to\n\n )\n\n }\n\n}\n\n\n", "file_path": "packages/server/src/core/messages.rs", "rank": 84, "score": 41559.8063354655 }, { "content": "pub fn courses_overflow_msg(overflow: f32, from: &str, to: &str) -> String {\n\n if overflow == SINGLE {\n\n format!(\n\n \"ביצעת יותר קורסים ממה שנדרש ב{}, הקורס העודף נספר תחת הדרישה {}\",\n\n from, to\n\n )\n\n } else {\n\n format!(\n\n \"ביצעת יותר קורסים ממה שנדרש ב{}, {} הקורסים העודפים נספרים תחת הדרישה {}\",\n\n from, overflow, to\n\n )\n\n }\n\n}\n\n\n", "file_path": "packages/server/src/core/messages.rs", "rank": 85, "score": 41559.8063354655 }, { "content": "pub fn credit_overflow_msg(overflow: f32, from: &str, to: &str) -> String {\n\n if overflow == SINGLE {\n\n format!(\"נקודה אחת עברה מ{} ל{}\", from, to)\n\n } else if overflow == HALF {\n\n format!(\"חצי נקודה עברה מ{} ל{}\", from, to)\n\n } else {\n\n format!(\"עברו {} נקודות מ{} ל{}\", overflow, from, to)\n\n }\n\n}\n\n\n", "file_path": "packages/server/src/core/messages.rs", "rank": 86, "score": 41559.8063354655 }, { "content": "use actix_web::{HttpResponse, ResponseError};\n\nuse colored::Colorize;\n\nuse derive_more::Display;\n\n\n\n#[derive(Debug, Display)]\n\npub enum AppError {\n\n BadRequest(String), // 400\n\n Bson(String), // 400\n\n Parser(String), // 400\n\n NotFound(String), // 404\n\n InternalServer(String), // 500\n\n Middleware(String), // 500\n\n MongoDriver(String), // 500\n\n}\n\n\n\nimpl ResponseError for AppError {\n\n fn error_response(&self) -> HttpResponse {\n\n let error;\n\n let resp = match self {\n\n AppError::BadRequest(e) => {\n", "file_path": "packages/server/src/error.rs", "rank": 87, "score": 41343.18636465923 }, { "content": " error = e.to_owned();\n\n HttpResponse::BadRequest().body(error.clone())\n\n }\n\n AppError::Bson(e) => {\n\n error = format!(\"Bson error: {}\", e);\n\n HttpResponse::BadRequest().body(error.clone())\n\n }\n\n AppError::Parser(e) => {\n\n error = format!(\"Parser error: {}\", e);\n\n HttpResponse::BadRequest().body(error.clone())\n\n }\n\n AppError::NotFound(e) => {\n\n error = format!(\"{} not found\", e);\n\n HttpResponse::NotFound().body(error.clone())\n\n }\n\n AppError::InternalServer(e) => {\n\n error = e.to_owned();\n\n HttpResponse::InternalServerError().body(error.clone())\n\n }\n\n AppError::Middleware(e) => {\n", "file_path": "packages/server/src/error.rs", "rank": 88, "score": 41341.18780076477 }, { "content": " error = format!(\"Middleware error: {}\", e);\n\n HttpResponse::InternalServerError().body(error.clone())\n\n }\n\n AppError::MongoDriver(e) => {\n\n error = format!(\"MongoDB driver error: {}\", e);\n\n HttpResponse::InternalServerError().body(error.clone())\n\n }\n\n };\n\n log::error!(\"{}\", error.bold().red());\n\n resp\n\n }\n\n}\n", "file_path": "packages/server/src/error.rs", "rank": 89, "score": 41334.575514094846 }, { "content": "pub fn vec_to_map(vec: Vec<Course>) -> HashMap<CourseId, Course> {\n\n HashMap::from_iter(\n\n vec.clone()\n\n .iter()\n\n .map(|course| course.id.clone())\n\n .zip(vec.into_iter()),\n\n )\n\n}\n", "file_path": "packages/server/src/resources/course.rs", "rank": 90, "score": 41109.68548504874 }, { "content": "use crate::{\n\n error::AppError,\n\n resources::course::{Course, CourseStatus, Grade},\n\n};\n\nuse std::collections::HashMap;\n\n\n", "file_path": "packages/server/src/core/parser.rs", "rank": 91, "score": 40468.59915355863 }, { "content": " .split(' ')\n\n .last()\n\n .ok_or_else(|| AppError::Parser(\"Bad Format\".into()))?\n\n .trim();\n\n\n\n let grade = match grade_str as &str {\n\n \"ניקוד\" => {\n\n if clean_line.contains(\"ללא\") {\n\n Some(Grade::ExemptionWithoutCredit)\n\n } else {\n\n Some(Grade::ExemptionWithCredit)\n\n }\n\n }\n\n \"עבר\" => Some(Grade::Binary(true)),\n\n \"נכשל\" => Some(Grade::Binary(false)), //TODO כתוב נכשל או שכתוב לא עבר?\n\n \"השלים\" if clean_line.contains(\"לא השלים\") => Some(Grade::NotComplete),\n\n \"השלים(מ)\" if clean_line.contains(\"לא השלים\") => Some(Grade::NotComplete),\n\n _ => grade_str.parse::<u8>().ok().map(Grade::Numeric),\n\n };\n\n Ok((Course { id, credit, name }, grade))\n\n}\n", "file_path": "packages/server/src/core/parser.rs", "rank": 92, "score": 40463.18273741512 }, { "content": " // The previous grade for this course is the valid one.\n\n // Nevertheless, the previous grade will appear with an asterisk (*) in the grades pdf.\n\n // Thus, to make sure we don't ignore these cases, we have to save a list of every asterisk-marked course,\n\n // and then search this list for courses who fall in this particular case, and fix their grade.\n\n asterisk_courses.push(course_status);\n\n } else {\n\n *courses\n\n .entry(course_status.course.id.clone())\n\n .or_insert(course_status) = course_status.clone();\n\n }\n\n }\n\n let mut vec_courses = courses.into_values().collect::<Vec<_>>();\n\n\n\n // Fix the grades for said courses\n\n set_grades_for_uncompleted_courses(&mut vec_courses, asterisk_courses.clone());\n\n\n\n vec_courses.append(&mut sport_courses);\n\n\n\n if vec_courses.is_empty() {\n\n return Err(AppError::Parser(\"Invalid copy paste data\".into()));\n\n }\n\n Ok(vec_courses)\n\n}\n\n\n", "file_path": "packages/server/src/core/parser.rs", "rank": 93, "score": 40463.08744320764 }, { "content": " // This means that the credit part is no longer parsable as f32, and therefore the hyphen must be manually removed.\n\n // This won't create a problem later in the code since 'word' only lives in the for-loop scope.\n\n if word.contains('-') && word.contains('.') {\n\n word = word.replace('-', \"\").trim().to_string();\n\n }\n\n if word.parse::<f32>().is_ok() && word.contains('.') {\n\n credit = word\n\n .chars()\n\n .rev()\n\n .collect::<String>()\n\n .parse::<f32>()\n\n .unwrap();\n\n break;\n\n }\n\n index += 1;\n\n }\n\n\n\n let name = clean_line.split_whitespace().collect::<Vec<&str>>()[1..index].join(\" \");\n\n\n\n let grade_str = clean_line\n", "file_path": "packages/server/src/core/parser.rs", "rank": 94, "score": 40458.47666612274 }, { "content": " semester = if is_spring || is_summer || is_winter {\n\n semester_counter += if is_summer || semester_counter.fract() != 0.0 {\n\n 0.5\n\n } else {\n\n 1.0\n\n };\n\n\n\n let semester_term = if is_spring {\n\n \"אביב\"\n\n } else if is_summer {\n\n \"קיץ\"\n\n } else {\n\n \"חורף\"\n\n };\n\n\n\n format!(\"{}_{}\", semester_term, semester_counter)\n\n } else {\n\n semester\n\n };\n\n\n", "file_path": "packages/server/src/core/parser.rs", "rank": 95, "score": 40458.47666612274 }, { "content": " if !contains_course_number(&line) {\n\n continue;\n\n }\n\n\n\n let (course, grade) = parse_course_status_pdf_format(&line)?;\n\n\n\n let mut course_status = CourseStatus {\n\n course,\n\n semester: (!semester.is_empty()).then(|| semester.clone()),\n\n grade: grade.clone(),\n\n ..Default::default()\n\n };\n\n course_status.set_state();\n\n if course_status.is_sport() {\n\n sport_courses.push(course_status);\n\n continue;\n\n }\n\n if line.contains('*') {\n\n // If a student decides to retake a course for which he already had a grade,\n\n // and then ends up not receiving a grade (לא השלים) for that course,\n", "file_path": "packages/server/src/core/parser.rs", "rank": 96, "score": 40458.47666612274 }, { "content": "pub struct JwtDecoder {\n\n parser: Parser,\n\n}\n\n\n\nimpl JwtDecoder {\n\n // Set up a jwt parser with actual google client id\n\n pub fn new() -> Self {\n\n JwtDecoder {\n\n parser: Parser::new(CONFIG.client_id),\n\n }\n\n }\n\n // Decode the jwt and return id info (sub wrapper)\n\n pub async fn decode(&self, token: &str) -> Result<IdInfo, ParserError> {\n\n Ok(self.parser.parse::<IdInfo>(token).await?)\n\n }\n\n // Set up a debug jwt parser for testing\n\n #[cfg(test)]\n\n pub fn new_with_parser(parser: Parser) -> Self {\n\n JwtDecoder { parser }\n\n }\n", "file_path": "packages/server/src/middleware/auth.rs", "rank": 97, "score": 40387.876796429206 }, { "content": "extern crate jsonwebtoken_google;\n\n\n\nuse crate::config::CONFIG;\n\nuse actix_web::{\n\n body::MessageBody,\n\n dev::{ServiceRequest, ServiceResponse},\n\n http::header,\n\n Error, HttpMessage, HttpResponse,\n\n};\n\nuse actix_web_lab::middleware::Next;\n\nuse jsonwebtoken_google::{Parser, ParserError};\n\nuse serde::Deserialize;\n\n\n\npub type Sub = String;\n\n#[derive(Default, Debug, Deserialize)]\n\npub struct IdInfo {\n\n // Identifier of the user, guaranteed to be unique by Google.\n\n pub sub: Sub,\n\n}\n\n\n", "file_path": "packages/server/src/middleware/auth.rs", "rank": 98, "score": 40387.59617779506 }, { "content": " let (request, payload) = req.into_parts();\n\n let auth_header = match request.headers().get(header::AUTHORIZATION) {\n\n Some(header) => header,\n\n None => return_401_with_reason!(request, \"No authorization header found\"),\n\n };\n\n\n\n let jwt = match auth_header.to_str() {\n\n Ok(jwt) => jwt,\n\n Err(_) => return_401_with_reason!(request, \"Invalid authorization header\"),\n\n };\n\n\n\n let decoder = match request.app_data::<JwtDecoder>() {\n\n Some(decoder) => decoder,\n\n None => {\n\n return Ok(ServiceResponse::new(\n\n request,\n\n HttpResponse::InternalServerError().body(\"JwtDecoder not initialized\"),\n\n ));\n\n }\n\n };\n", "file_path": "packages/server/src/middleware/auth.rs", "rank": 99, "score": 40385.96893674811 } ]
Rust
nonebot_rs/src/matcher/matchers/action.rs
abrahum/nonebot-rs
8c090098a10f574637fcad16fd74357e875bd43e
use super::{Matchers, MatchersBTreeMap, MatchersHashMap}; use crate::event::{MessageEvent, MetaEvent, NoticeEvent, RequestEvent}; use crate::matcher::{action::MatchersAction, Matcher}; use std::collections::{BTreeMap, HashMap}; use tokio::sync::broadcast; impl Matchers { pub fn new( message: Option<MatchersBTreeMap<MessageEvent>>, notice: Option<MatchersBTreeMap<NoticeEvent>>, request: Option<MatchersBTreeMap<RequestEvent>>, meta: Option<MatchersBTreeMap<MetaEvent>>, ) -> Matchers { let (sender, _) = broadcast::channel(32); Matchers { message: unoptionb(&message), notice: unoptionb(&notice), request: unoptionb(&request), meta: unoptionb(&meta), bot_getter: None, action_sender: sender, config: HashMap::new(), } } pub fn new_empty() -> Matchers { Matchers::new(None, None, None, None) } pub fn get(&mut self, m: &Matchers) { self.message = m.message.clone(); self.notice = m.notice.clone(); self.request = m.request.clone(); self.meta = m.meta.clone(); } pub async fn run_on_connect(&self, bot: crate::bot::Bot, disconnect: bool) { async fn run_on_connect_<E>( matcherb: &MatchersBTreeMap<E>, bot: crate::bot::Bot, disconnect: bool, ) where E: Clone, { for (_, matcherh) in matcherb { for (_, matcher) in matcherh { let built_matcher = matcher.build(bot.clone()); let handler = built_matcher.get_handler(); let lock_handler = handler.read().await; if disconnect { lock_handler.on_bot_disconnect(matcher.clone()); } else { lock_handler.on_bot_connect(matcher.clone()); } } } } run_on_connect_(&self.message, bot.clone(), disconnect).await; run_on_connect_(&self.notice, bot.clone(), disconnect).await; run_on_connect_(&self.request, bot.clone(), disconnect).await; run_on_connect_(&self.meta, bot.clone(), disconnect).await; } pub async fn load_all_matcher_config(&self) { async fn f<E>( matcherb: &MatchersBTreeMap<E>, config: &HashMap<String, HashMap<String, toml::Value>>, ) where E: Clone, { for (_, matcherh) in matcherb { for (matcher_name, matcher) in matcherh { if let Some(data) = config.get(&matcher_name.to_lowercase()) { let handler = matcher.get_handler(); let mut lock_handler = handler.write().await; lock_handler.load_config(data.clone()); } } } } f(&self.message, &self.config).await; f(&self.notice, &self.config).await; f(&self.request, &self.config).await; f(&self.meta, &self.config).await; } #[doc(hidden)] fn add_matcher<E>( matcherb: &mut MatchersBTreeMap<E>, mut matcher: Matcher<E>, action_sender: broadcast::Sender<MatchersAction>, ) where E: Clone, { matcher.set_action_sender(action_sender); match matcherb.get_mut(&matcher.priority) { Some(h) => { h.insert(matcher.name.clone(), matcher); } None => { let mut hashmap: MatchersHashMap<E> = HashMap::new(); hashmap.insert(matcher.name.clone(), matcher.clone()); matcherb.insert(matcher.priority, hashmap); } } } pub fn add_message_matcher(&mut self, matcher: Matcher<MessageEvent>) -> &mut Self { Matchers::add_matcher(&mut self.message, matcher, self.action_sender.clone()); self } pub fn add_message_matchers(&mut self, matchers: Vec<Matcher<MessageEvent>>) -> &mut Self { for m in matchers { self.add_message_matcher(m); } self } pub fn add_notice_matcher(&mut self, matcher: Matcher<NoticeEvent>) -> &mut Self { Matchers::add_matcher(&mut self.notice, matcher, self.action_sender.clone()); self } pub fn add_request_matcher(&mut self, matcher: Matcher<RequestEvent>) -> &mut Self { Matchers::add_matcher(&mut self.request, matcher, self.action_sender.clone()); self } pub fn add_meta_matcher(&mut self, matcher: Matcher<MetaEvent>) -> &mut Self { Matchers::add_matcher(&mut self.meta, matcher, self.action_sender.clone()); self } pub fn remove_matcher(&mut self, name: &str) { fn remove_matcher_<E>(matcherb: &mut MatchersBTreeMap<E>, name: &str) where E: Clone, { for (_, matcherh) in matcherb.iter_mut() { if let Some(_) = matcherh.remove(name) { return; } } } remove_matcher_(&mut self.message, name); remove_matcher_(&mut self.notice, name); remove_matcher_(&mut self.request, name); remove_matcher_(&mut self.meta, name); } pub fn disable_matcher(&mut self, name: &str, disable: bool) { fn disable_matcher_<E>(matcherb: &mut MatchersBTreeMap<E>, name: &str, disable: bool) where E: Clone, { for (_, matcherh) in matcherb.iter_mut() { if let Some(matcher) = matcherh.get_mut(name) { matcher.set_disable(disable); } } } disable_matcher_(&mut self.message, name, disable); disable_matcher_(&mut self.notice, name, disable); disable_matcher_(&mut self.request, name, disable); disable_matcher_(&mut self.meta, name, disable); } } #[doc(hidden)] fn unoptionb<K, D>(input: &Option<BTreeMap<K, D>>) -> BTreeMap<K, D> where K: Clone + std::cmp::Ord, D: Clone, { match input { Some(t) => t.clone(), None => BTreeMap::new(), } }
use super::{Matchers, MatchersBTreeMap, MatchersHashMap}; use crate::event::{MessageEvent, MetaEvent, NoticeEvent, RequestEvent}; use crate::matcher::{action::MatchersAction, Matcher}; use std::collections::{BTreeMap, HashMap}; use tokio::sync::broadcast; impl Matchers { pub fn new( message: Option<MatchersBTreeMap<MessageEvent>>, notice: Option<MatchersBTreeMap<NoticeEvent>>, request: Option<MatchersBTreeMap<RequestEvent>>, meta: Option<MatchersBTreeMap<MetaEvent>>, ) -> Matchers { let (sender, _) = broadcast::channel(32); Matchers { message: unoptionb(&message), notice: unoptionb(&notice), request: unoptionb(&request), meta: unoptionb(&meta), bot_getter: None, action_sender: sender, config: HashMap::new(), } } pub fn new_empty() -> Matchers { Matchers::new(None, None, None, None) } pub fn get(&mut self, m: &Matchers) { self.message = m.message.clone(); self.notice = m.notice.clone(); self.request = m.request.clone(); self.meta = m.meta.clone(); } pub async fn run_on_connect(&self, bot: crate::bot::Bot, disconnect: bool) { async fn run_on_connect_<E>( matcherb: &MatchersBTreeMap<E>, bot: crate::bot::Bot, disconnect: bool, ) where E: Clone, { for (_, matcherh) in matcherb { for (_, matcher) in matcherh { let built_matcher = matcher.build(bot.clone()); let handler = built_matcher.get_handler(); let lock_handler = handler.read().await; if disconnect { lock_handler.on_bot_disconnect(matcher.clone()); } else { lock_handler.on_bot_connect(matcher.clone()); } } } } run_on_connect_(&self.message, bot.clone(), disconnect).await; run_on_connect_(&self.notice, bot.clone(), disconnect).await; run_on_connect_(&self.request, bot.clone(), disconnect).await; run_on_connect_(&self.meta, bot.clone(), disconnect).await; } pub async fn load_all_matcher_config(&self) { async fn f<E>( matcherb: &MatchersBTreeMap<E>, config: &HashMap<String, HashMap<String, toml::Value>>, ) where E: Clone, { for (_, matcherh) in matcherb { for (matcher_name, matcher) in matcherh { if let Some(data) = config.get(&matcher_name.to_lowercase()) { let handler = matcher.get_handler(); let mut lock_handler = handler.write(
} self } pub fn add_notice_matcher(&mut self, matcher: Matcher<NoticeEvent>) -> &mut Self { Matchers::add_matcher(&mut self.notice, matcher, self.action_sender.clone()); self } pub fn add_request_matcher(&mut self, matcher: Matcher<RequestEvent>) -> &mut Self { Matchers::add_matcher(&mut self.request, matcher, self.action_sender.clone()); self } pub fn add_meta_matcher(&mut self, matcher: Matcher<MetaEvent>) -> &mut Self { Matchers::add_matcher(&mut self.meta, matcher, self.action_sender.clone()); self } pub fn remove_matcher(&mut self, name: &str) { fn remove_matcher_<E>(matcherb: &mut MatchersBTreeMap<E>, name: &str) where E: Clone, { for (_, matcherh) in matcherb.iter_mut() { if let Some(_) = matcherh.remove(name) { return; } } } remove_matcher_(&mut self.message, name); remove_matcher_(&mut self.notice, name); remove_matcher_(&mut self.request, name); remove_matcher_(&mut self.meta, name); } pub fn disable_matcher(&mut self, name: &str, disable: bool) { fn disable_matcher_<E>(matcherb: &mut MatchersBTreeMap<E>, name: &str, disable: bool) where E: Clone, { for (_, matcherh) in matcherb.iter_mut() { if let Some(matcher) = matcherh.get_mut(name) { matcher.set_disable(disable); } } } disable_matcher_(&mut self.message, name, disable); disable_matcher_(&mut self.notice, name, disable); disable_matcher_(&mut self.request, name, disable); disable_matcher_(&mut self.meta, name, disable); } } #[doc(hidden)] fn unoptionb<K, D>(input: &Option<BTreeMap<K, D>>) -> BTreeMap<K, D> where K: Clone + std::cmp::Ord, D: Clone, { match input { Some(t) => t.clone(), None => BTreeMap::new(), } }
).await; lock_handler.load_config(data.clone()); } } } } f(&self.message, &self.config).await; f(&self.notice, &self.config).await; f(&self.request, &self.config).await; f(&self.meta, &self.config).await; } #[doc(hidden)] fn add_matcher<E>( matcherb: &mut MatchersBTreeMap<E>, mut matcher: Matcher<E>, action_sender: broadcast::Sender<MatchersAction>, ) where E: Clone, { matcher.set_action_sender(action_sender); match matcherb.get_mut(&matcher.priority) { Some(h) => { h.insert(matcher.name.clone(), matcher); } None => { let mut hashmap: MatchersHashMap<E> = HashMap::new(); hashmap.insert(matcher.name.clone(), matcher.clone()); matcherb.insert(matcher.priority, hashmap); } } } pub fn add_message_matcher(&mut self, matcher: Matcher<MessageEvent>) -> &mut Self { Matchers::add_matcher(&mut self.message, matcher, self.action_sender.clone()); self } pub fn add_message_matchers(&mut self, matchers: Vec<Matcher<MessageEvent>>) -> &mut Self { for m in matchers { self.add_message_matcher(m);
random
[ { "content": "#[doc(hidden)]\n\nfn command_start_(event: &mut MessageEvent, config: BotConfig) -> bool {\n\n let raw_message = remove_space(&event.get_raw_message());\n\n let command_starts = config.command_starts;\n\n if command_starts.is_empty() {\n\n return true;\n\n }\n\n for sc in &command_starts {\n\n if raw_message.starts_with(sc) {\n\n let new_raw_message = remove_space(&raw_message[sc.len()..]);\n\n event.set_raw_message(new_raw_message);\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "nonebot_rs/src/builtin/prematchers.rs", "rank": 0, "score": 191379.1875848636 }, { "content": "fn log_matcherb<E>(matcherb: &MatchersBTreeMap<E>)\n\nwhere\n\n E: Clone,\n\n{\n\n if matcherb.is_empty() {\n\n return;\n\n }\n\n for (_, matcherh) in matcherb {\n\n for (name, _) in matcherh {\n\n event!(Level::INFO, \"Matcher {} is Loaded\", name.blue());\n\n }\n\n }\n\n}\n", "file_path": "nonebot_rs/src/matcher/matchers/mod.rs", "rank": 1, "score": 187877.6927655816 }, { "content": "pub fn bot_status(config: Option<&Value>) -> Matcher<MessageEvent> {\n\n let mut status = Status { test: None };\n\n if let Some(test) = config\n\n .and_then(|config| config.get(\"test\"))\n\n .and_then(|test| test.as_str())\n\n {\n\n status.test = Some(test.to_string())\n\n }\n\n Matcher::new(\"BotStatus\", status).add_rule(rules::is_superuser())\n\n}\n\n\n", "file_path": "nonebot_rs/src/builtin/bot_status.rs", "rank": 2, "score": 185582.06601770903 }, { "content": "#[async_trait]\n\npub trait Handler<E>\n\nwhere\n\n E: Clone,\n\n{\n\n /// 新 Bot 连接时,调用该函数\n\n fn on_bot_connect(&self, _: Matcher<E>) {}\n\n /// Bot 断开连接时,调用该函数\n\n fn on_bot_disconnect(&self, _: Matcher<E>) {}\n\n /// timeout drop 函数\n\n fn timeout_drop(&self, _: &Matcher<E>) {}\n\n /// 匹配函数\n\n fn match_(&self, event: &mut E) -> bool;\n\n /// 处理函数\n\n async fn handle(&self, event: E, matcher: Matcher<E>);\n\n /// Load config\n\n #[allow(unused_variables)]\n\n fn load_config(&mut self, config: HashMap<String, toml::Value>) {}\n\n}\n\n\n\nimpl<E> Matcher<E>\n", "file_path": "nonebot_rs/src/matcher/mod.rs", "rank": 3, "score": 162079.1109494112 }, { "content": "/// 判定是否为指定 Bot\n\npub fn is_bot<E>(bot_id: String) -> Rule<E>\n\nwhere\n\n E: SelfId,\n\n{\n\n let is_bot = move |event: &E, _: &BotConfig| -> bool {\n\n let self_id = event.get_self_id();\n\n if bot_id == self_id {\n\n return true;\n\n }\n\n false\n\n };\n\n Arc::new(is_bot)\n\n}\n\n\n", "file_path": "nonebot_rs/src/builtin/rules.rs", "rank": 4, "score": 151709.2244756657 }, { "content": "pub fn r6s() -> Vec<Matcher<MessageEvent>> {\n\n let mut headers = HeaderMap::new();\n\n headers.insert(\"Host\", \"www.r6s.cn\".parse().unwrap());\n\n headers.insert(\"referer\", \"https://www.r6s.cn\".parse().unwrap());\n\n headers.insert(\"user-agent\", \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36\".parse().unwrap());\n\n headers.insert(\"x-requested-with\", \"XMLHttpRequest\".parse().unwrap());\n\n let client = Arc::new(utils::R6sClient {\n\n client: Client::new(),\n\n headers: headers,\n\n });\n\n vec![\n\n Matcher::new(\n\n \"r6s\",\n\n base::R6s {\n\n client: client.clone(),\n\n },\n\n )\n\n .add_pre_matcher(to_me())\n\n .add_pre_matcher(command_start())\n\n .set_priority(3),\n", "file_path": "nbrs_matcher_r6s/src/lib.rs", "rank": 5, "score": 144655.72180258913 }, { "content": "/// 单次复读 Matcher\n\npub fn echo() -> Matcher<MessageEvent> {\n\n Matcher::new(\"Echo\", Echo {})\n\n .add_pre_matcher(prematchers::to_me())\n\n .add_pre_matcher(prematchers::command_start())\n\n}\n\n\n\n#[doc(hidden)]\n\n#[derive(Clone)]\n\npub struct Echo2 {\n\n max_times: i64, // negative for infinite (all most)\n\n}\n\n\n\n#[doc(hidden)]\n\n#[async_trait]\n\nimpl Handler<MessageEvent> for Echo2 {\n\n on_command!(MessageEvent, \"echo mode\", \"Echo Mode\");\n\n async fn handle(&self, _: MessageEvent, matcher: Matcher<MessageEvent>) {\n\n // echo whatever you say until exit\n\n let mut max_times = self.max_times;\n\n matcher\n", "file_path": "nonebot_rs/src/builtin/echo.rs", "rank": 6, "score": 141917.95962268542 }, { "content": "/// 无限复读 Matcher\n\npub fn echo2() -> Matcher<MessageEvent> {\n\n Matcher::new(\"Echo2\", Echo2 { max_times: 0 })\n\n .add_pre_matcher(prematchers::to_me())\n\n .add_pre_matcher(prematchers::command_start())\n\n}\n", "file_path": "nonebot_rs/src/builtin/echo.rs", "rank": 7, "score": 141917.95962268542 }, { "content": "/// rcnb!!!\n\npub fn rcnb() -> Matcher<MessageEvent> {\n\n Matcher::new(\"Rcnb\", Rcnb {})\n\n .add_pre_matcher(prematchers::to_me())\n\n .add_pre_matcher(prematchers::command_start())\n\n}\n", "file_path": "nonebot_rs/src/builtin/rcnb.rs", "rank": 8, "score": 141914.02532245853 }, { "content": "/// 判定 sender 是否为 superuser\n\npub fn is_superuser<E>() -> Rule<E>\n\nwhere\n\n E: UserId,\n\n{\n\n let is_superuser = |event: &E, config: &BotConfig| -> bool {\n\n let user_id = event.get_user_id();\n\n for superuser in &config.superusers {\n\n if &user_id == superuser {\n\n return true;\n\n }\n\n }\n\n false\n\n };\n\n Arc::new(is_superuser)\n\n}\n\n\n", "file_path": "nonebot_rs/src/builtin/rules.rs", "rank": 9, "score": 132682.48445601246 }, { "content": "/// 构建 timeout 为 30s 的临时 Matcher<MessageEvent>\n\npub fn build_temp_message_event_matcher<H>(\n\n event: &MessageEvent,\n\n handler: H,\n\n) -> Matcher<MessageEvent>\n\nwhere\n\n H: Handler<MessageEvent> + Send + Sync + 'static,\n\n{\n\n use crate::event::UserId;\n\n let mut m = Matcher::new(\n\n &format!(\n\n \"{}-{}-{}\",\n\n event.get_self_id(),\n\n event.get_user_id(),\n\n event.get_time()\n\n ),\n\n handler,\n\n )\n\n .add_rule(crate::builtin::rules::is_user(event.get_user_id()))\n\n .add_rule(crate::builtin::rules::is_bot(event.get_self_id()));\n\n if let MessageEvent::Group(g) = event {\n\n m.add_rule(crate::builtin::rules::in_group(g.group_id.clone()));\n\n } else {\n\n m.add_rule(crate::builtin::rules::is_private_message_event());\n\n }\n\n m.set_priority(0)\n\n .set_temp(true)\n\n .set_timeout(timestamp() + 30)\n\n}\n", "file_path": "nonebot_rs/src/matcher/mod.rs", "rank": 10, "score": 127306.05657596732 }, { "content": "pub fn init(debug: bool, trace: Option<bool>) {\n\n if debug {\n\n std::env::set_var(\"RUST_LOG\", \"debug\");\n\n } else {\n\n std::env::set_var(\"RUST_LOG\", \"info\");\n\n }\n\n if let Some(b) = trace {\n\n if b {\n\n std::env::set_var(\"RUST_LOG\", \"trace\");\n\n }\n\n }\n\n tracing_subscriber::fmt::init();\n\n}\n", "file_path": "nonebot_rs/src/log.rs", "rank": 11, "score": 125523.28749790866 }, { "content": "/// 判定 sender 是否为指定 user\n\npub fn is_user<E>(user_id: String) -> Rule<E>\n\nwhere\n\n E: UserId,\n\n{\n\n let is_user = move |event: &E, _: &BotConfig| -> bool {\n\n let id = event.get_user_id();\n\n if id == user_id {\n\n return true;\n\n }\n\n false\n\n };\n\n Arc::new(is_user)\n\n}\n\n\n", "file_path": "nonebot_rs/src/builtin/rules.rs", "rank": 12, "score": 120425.28718876376 }, { "content": "/// 判定消息是否提及 bot(私聊,at,昵称)\n\npub fn to_me() -> Arc<PreMatcher<MessageEvent>> {\n\n let to_me = |e: &mut MessageEvent, config: BotConfig| -> bool {\n\n match e {\n\n MessageEvent::Private(_) => true,\n\n MessageEvent::Group(g) => {\n\n let bot_id = g.self_id.to_string();\n\n let raw_message = remove_space(&g.raw_message);\n\n for name in config.nicknames {\n\n if raw_message.starts_with(&name) {\n\n g.raw_message = remove_space(&raw_message[name.len()..]);\n\n return true;\n\n }\n\n }\n\n for message in &g.message {\n\n match message {\n\n Message::At { qq: qq_id } => {\n\n if qq_id == &bot_id {\n\n g.raw_message = remove_space(\n\n &raw_message.replace(&format!(\"[CQ:at,qq={}]\", g.self_id), \"\"),\n\n );\n", "file_path": "nonebot_rs/src/builtin/prematchers.rs", "rank": 13, "score": 117368.33228435856 }, { "content": "pub fn get(event: MessageEvent) -> Option<String> {\n\n let msg = event.get_raw_message();\n\n if !msg.is_empty() {\n\n return Some(msg.to_string());\n\n }\n\n let data = load(&event.get_self_id());\n\n data.get(&event.get_user_id()).and_then(|x| Some(x.clone()))\n\n}\n\n\n", "file_path": "nbrs_matcher_r6s/src/utils.rs", "rank": 14, "score": 112629.88415432266 }, { "content": "pub fn load(bot_id: &str) -> UserNicknameMap {\n\n let path = check_dir().join(format!(\"{}.json\", bot_id));\n\n if path.exists() {\n\n let data = fs::read_to_string(path).unwrap();\n\n let data: UserNicknameMap = serde_json::from_str(&data).unwrap();\n\n data\n\n } else {\n\n let data = HashMap::new();\n\n data\n\n }\n\n}\n\n\n", "file_path": "nbrs_matcher_r6s/src/utils.rs", "rank": 15, "score": 111349.4894244922 }, { "content": "/// 判定消息是否符合命令起始符\n\npub fn command_start() -> Arc<PreMatcher<MessageEvent>> {\n\n Arc::new(command_start_)\n\n}\n", "file_path": "nonebot_rs/src/builtin/prematchers.rs", "rank": 16, "score": 111278.96043829067 }, { "content": "/// Meta Event Logger\n\npub fn meta_logger(event: &MetaEvent) {\n\n if &event.meta_event_type == \"heartbeat\" {\n\n event!(Level::TRACE, \"Recive HeartBeat\")\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Logger;\n\n\n\nimpl Logger {\n\n async fn event_recv(self, mut event_receiver: crate::EventReceiver) {\n\n while let Ok(event) = event_receiver.recv().await {\n\n match &event {\n\n Event::Message(m) => message_logger(m),\n\n Event::Meta(m) => meta_logger(m),\n\n _ => {}\n\n }\n\n }\n\n }\n\n}\n", "file_path": "nonebot_rs/src/logger.rs", "rank": 17, "score": 109912.38403520646 }, { "content": "/// Message Event Logger\n\npub fn message_logger(event: &MessageEvent) {\n\n match &event {\n\n MessageEvent::Private(p) => {\n\n let mut user_id = p.user_id.to_string();\n\n while user_id.len() < 10 {\n\n user_id.insert(0, ' ');\n\n }\n\n event!(\n\n Level::INFO,\n\n \"{} [{}] -> {} from {}({})\",\n\n user_id.green(),\n\n p.self_id.to_string().red(),\n\n p.raw_message,\n\n p.sender.nickname.to_string().blue(),\n\n p.user_id.to_string().green(),\n\n )\n\n }\n\n MessageEvent::Group(g) => {\n\n let mut group_id = g.group_id.to_string();\n\n while group_id.len() < 10 {\n", "file_path": "nonebot_rs/src/logger.rs", "rank": 18, "score": 109196.96466877006 }, { "content": "pub fn dump(bot_id: &str, data: UserNicknameMap) {\n\n let path = check_dir().join(format!(\"{}.json\", bot_id));\n\n let data_str = serde_json::to_string(&data).unwrap();\n\n fs::write(path, &data_str).unwrap();\n\n}\n\n\n", "file_path": "nbrs_matcher_r6s/src/utils.rs", "rank": 19, "score": 107010.75648186884 }, { "content": "/// 判定 event 是否为私聊消息事件\n\npub fn is_private_message_event() -> Rule<MessageEvent> {\n\n let is_private_message_event = |event: &MessageEvent, _: &BotConfig| -> bool {\n\n match event {\n\n MessageEvent::Group(_) => false,\n\n MessageEvent::Private(_) => true,\n\n }\n\n };\n\n Arc::new(is_private_message_event)\n\n}\n", "file_path": "nonebot_rs/src/builtin/rules.rs", "rank": 20, "score": 104243.62551318262 }, { "content": "pub fn set(bot_id: &str, user_id: String, nickname: String) {\n\n let mut data = load(bot_id);\n\n data.insert(user_id, nickname);\n\n dump(bot_id, data);\n\n}\n", "file_path": "nbrs_matcher_r6s/src/utils.rs", "rank": 21, "score": 101777.3155034462 }, { "content": "pub fn format_stat(data: &Value) -> String {\n\n let kd = format_division(\"kills\", \"deaths\", data);\n\n let wl = format_division(\"won\", \"lost\", data);\n\n let timeplayed: f64 = data.get(\"timePlayed\").unwrap().as_f64().unwrap() as f64 / 3600.0;\n\n format!(\n\n \"KD:{}\\n胜负比:{}\\n总场数:{}\\n游戏时常:{:.2}\",\n\n kd,\n\n wl,\n\n data.get(\"played\").unwrap(),\n\n timeplayed\n\n )\n\n}\n\n\n", "file_path": "nbrs_matcher_r6s/src/utils.rs", "rank": 22, "score": 94222.5142899568 }, { "content": "/// 判定 event 是否来自指定 group\n\npub fn in_group(group_id: String) -> Rule<MessageEvent> {\n\n let in_group = move |event: &MessageEvent, _: &BotConfig| -> bool {\n\n if let MessageEvent::Group(g) = event {\n\n if g.group_id == group_id {\n\n return true;\n\n }\n\n }\n\n false\n\n };\n\n Arc::new(in_group)\n\n}\n\n\n", "file_path": "nonebot_rs/src/builtin/rules.rs", "rank": 23, "score": 89017.0047638486 }, { "content": "#[allow(dead_code)]\n\npub fn timestamp() -> i64 {\n\n let time = Local::now();\n\n time.timestamp()\n\n}\n\n\n\nuse serde::Deserializer;\n\n\n", "file_path": "nonebot_rs/src/utils.rs", "rank": 24, "score": 83648.0956908749 }, { "content": "pub fn format_division(word1: &str, word2: &str, data: &Value) -> String {\n\n let w1 = data.get(word1).unwrap().as_i64().unwrap();\n\n let w2 = data.get(word2).unwrap().as_i64().unwrap();\n\n if w2 != 0 {\n\n format!(\"{:.2}\", w1 as f64 / w2 as f64)\n\n } else {\n\n format!(\"{}/{}\", w1, w2)\n\n }\n\n}\n\n\n", "file_path": "nbrs_matcher_r6s/src/utils.rs", "rank": 25, "score": 80334.32542463006 }, { "content": "fn log_load_matchers(matchers: &crate::Matchers) {\n\n log_matcherb(&matchers.message);\n\n log_matcherb(&matchers.notice);\n\n log_matcherb(&matchers.request);\n\n log_matcherb(&matchers.meta);\n\n}\n\n\n", "file_path": "nonebot_rs/src/matcher/matchers/mod.rs", "rank": 26, "score": 75879.56961055391 }, { "content": "#[allow(dead_code)]\n\npub fn remove_space(s: &str) -> String {\n\n let mut rstring = String::from(s);\n\n let mut chars = s.chars();\n\n while chars.next() == Some(' ') {\n\n rstring.remove(0);\n\n }\n\n rstring\n\n}\n\n\n\nuse chrono::Local;\n\n\n", "file_path": "nonebot_rs/src/utils.rs", "rank": 27, "score": 75621.20639564423 }, { "content": "/// `get_self_id()` trait\n\npub trait SelfId {\n\n fn get_self_id(&self) -> String;\n\n}\n\n\n\nimpl SelfId for MessageEvent {\n\n fn get_self_id(&self) -> String {\n\n match self {\n\n MessageEvent::Private(p) => p.self_id.clone(),\n\n MessageEvent::Group(g) => g.self_id.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl SelfId for RequestEvent {\n\n fn get_self_id(&self) -> String {\n\n self.self_id.clone()\n\n }\n\n}\n\n\n\nimpl SelfId for NoticeEvent {\n", "file_path": "nonebot_rs/src/event.rs", "rank": 28, "score": 72305.74759358932 }, { "content": "#[allow(dead_code)]\n\npub fn clock(nb: &nonebot_rs::Nonebot) -> Job {\n\n let bot_getter = nb.bot_getter.clone();\n\n Job::new(\"1 * * * * *\", move |_, _| {\n\n let bots = bot_getter.borrow().clone();\n\n for (_, bot) in bots {\n\n let bot = bot.clone();\n\n tokio::spawn(send_a_msg(bot));\n\n }\n\n })\n\n .unwrap()\n\n}\n\n\n\n// Just for test\n\n#[allow(dead_code)]\n\nasync fn send_a_msg(bot: nonebot_rs::Bot) {\n\n for superuser in &bot.config.superusers {\n\n bot.send_private_msg(\n\n superuser,\n\n vec![Message::text(\"One minute passed.\".to_string())],\n\n )\n\n .await;\n\n }\n\n}\n", "file_path": "nbrs_no4/src/clock.rs", "rank": 29, "score": 70487.95587517653 }, { "content": "#[doc(hidden)]\n\npub fn resp_logger(resp: &crate::api_resp::ApiResp) {\n\n if &resp.status == \"ok\" {\n\n event!(Level::DEBUG, \"{} success\", resp.echo);\n\n } else {\n\n event!(Level::INFO, \"{} failed\", resp.echo);\n\n }\n\n}\n", "file_path": "nonebot_rs/src/builtin/mod.rs", "rank": 30, "score": 66024.75256479396 }, { "content": "fn check_dir() -> PathBuf {\n\n let r6s_path = PathBuf::from(CACHE_DATE_PATH).join(R6S_DIR_NAME);\n\n if !r6s_path.exists() {\n\n fs::create_dir_all(&r6s_path).unwrap();\n\n }\n\n r6s_path\n\n}\n\n\n", "file_path": "nbrs_matcher_r6s/src/utils.rs", "rank": 31, "score": 63113.027179119876 }, { "content": "\n\n // timeout 后调用,通知接受端 Timeout\n\n fn timeout_drop(&self, matcher: &Matcher<MessageEvent>) {\n\n let sender = matcher.bot.clone().unwrap().api_sender;\n\n tokio::spawn(async move { sender.send(ApiChannelItem::TimeOut).await.unwrap() });\n\n }\n\n }\n\n\n\n // 搭建临时通道接受 MessageEvent\n\n let (sender, mut receiver) = tokio::sync::mpsc::channel::<ApiChannelItem>(4);\n\n let event = self.event.clone().unwrap();\n\n // 根据提供的 event Handler 构建仅指向当先通话的 Temp Matcher\n\n let mut m = build_temp_message_event_matcher(&event, Temp);\n\n // 使用临时通道构建专用 Bot\n\n let bot = crate::bot::Bot::new(\n\n \"Temp\".to_string(),\n\n crate::config::BotConfig::default(),\n\n sender,\n\n self.bot.clone().unwrap().action_sender.clone(),\n\n self.bot.clone().unwrap().api_resp_watcher.clone(),\n", "file_path": "nonebot_rs/src/matcher/message_event_matcher.rs", "rank": 32, "score": 61873.70034222343 }, { "content": " return Some(crate::utils::remove_space(raw_message));\n\n }\n\n }\n\n\n\n /// 临时 Matcher 的 Handler struct\n\n struct Temp;\n\n\n\n #[async_trait]\n\n impl Handler<MessageEvent> for Temp {\n\n crate::on_match_all!();\n\n async fn handle(&self, event: MessageEvent, matcher: Matcher<MessageEvent>) {\n\n matcher\n\n .bot\n\n .clone()\n\n .unwrap()\n\n .api_sender\n\n .send(ApiChannelItem::MessageEvent(event))\n\n .await\n\n .unwrap();\n\n }\n", "file_path": "nonebot_rs/src/matcher/message_event_matcher.rs", "rank": 33, "score": 61869.50593409426 }, { "content": "use super::{build_temp_message_event_matcher, Handler, Matcher};\n\nuse crate::event::MessageEvent;\n\nuse crate::ApiChannelItem;\n\nuse async_trait::async_trait;\n\nuse colored::*;\n\nuse tracing::{event, Level};\n\n\n\nimpl Matcher<MessageEvent> {\n\n /// 发送纯文本消息\n\n pub async fn send_text(&self, msg: &str) {\n\n let msg = crate::message::Message::Text {\n\n text: msg.to_string(),\n\n };\n\n self.send(vec![msg]).await;\n\n }\n\n\n\n /// 设置临时 Matcher<MessageEvent>\n\n pub async fn set_temp_message_event_matcher<H>(&self, event: &MessageEvent, handler: H)\n\n where\n\n H: Handler<MessageEvent> + Send + Sync + 'static,\n", "file_path": "nonebot_rs/src/matcher/message_event_matcher.rs", "rank": 34, "score": 61868.63881977954 }, { "content": " {\n\n self.set_message_matcher(build_temp_message_event_matcher(event, handler))\n\n .await;\n\n }\n\n\n\n /// 请求消息内容\n\n ///\n\n /// 传入 event raw_message 若不为空则直接返回该消息文本(传入 None 表示必须请求)\n\n ///\n\n /// 传入 msg 为发送给用户的请求文本信息(传入 None 表示不向用户发送请求信息)\n\n ///\n\n /// 重新请求消息为空将返回 None\n\n pub async fn request_message(\n\n &self,\n\n event: Option<&MessageEvent>,\n\n msg: Option<&str>,\n\n ) -> Option<String> {\n\n if let Some(event) = event {\n\n let raw_message = event.get_raw_message();\n\n if !raw_message.is_empty() {\n", "file_path": "nonebot_rs/src/matcher/message_event_matcher.rs", "rank": 35, "score": 61866.535362603085 }, { "content": " /// 发送 Vec<Message> 消息\n\n pub async fn send(&self, msg: Vec<crate::message::Message>) {\n\n if let (Some(bot), Some(event)) = (&self.bot, &self.event) {\n\n bot.send_by_message_event(&event, msg).await;\n\n } else {\n\n event!(\n\n Level::ERROR,\n\n \"{}\",\n\n \"Sending msg with unbuilt matcher!\".red()\n\n );\n\n }\n\n }\n\n}\n", "file_path": "nonebot_rs/src/matcher/message_event_matcher.rs", "rank": 36, "score": 61861.95182881184 }, { "content": " );\n\n // 绑定专用 Bot\n\n m.bot = Some(bot);\n\n self.set_message_matcher(m).await;\n\n\n\n // Temp Matcher 已就绪,发送提示信息\n\n if let Some(msg) = msg {\n\n self.send_text(msg).await;\n\n }\n\n\n\n // 等待接收 MessageEvent\n\n while let Some(data) = receiver.recv().await {\n\n match data {\n\n ApiChannelItem::MessageEvent(event) => {\n\n let msg = crate::utils::remove_space(event.get_raw_message());\n\n if msg.is_empty() {\n\n return None;\n\n } else {\n\n return Some(msg);\n\n }\n", "file_path": "nonebot_rs/src/matcher/message_event_matcher.rs", "rank": 37, "score": 61859.25243071478 }, { "content": " }\n\n ApiChannelItem::TimeOut => {\n\n event!(Level::DEBUG, \"Temp Matcher TimeOut\");\n\n return None;\n\n }\n\n // 中转 temp Matcher 的 Remove Action\n\n // ApiChannelItem::Action(action) => self.set(action).await,\n\n _ => {\n\n event!(\n\n Level::WARN,\n\n \"{}\",\n\n \"Temp Matcher接受端接收到错误Api或Action消息\".bright_red()\n\n );\n\n } // 忽视 event 该 receiver 永不应该收到 event\n\n }\n\n }\n\n\n\n None\n\n }\n\n\n", "file_path": "nonebot_rs/src/matcher/message_event_matcher.rs", "rank": 38, "score": 61853.119908934714 }, { "content": "pub fn id_deserializer<'de, D>(d: D) -> Result<String, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n d.deserialize_any(JsonIdVisitor)\n\n}\n\n\n", "file_path": "nonebot_rs/src/utils.rs", "rank": 39, "score": 61241.076761114746 }, { "content": "fn rank(mmr: i64) -> String {\n\n let f = |a: &str, b: &str| format!(\"{}{}\", a, b);\n\n\n\n let head = [\"紫铜\", \"黄铜\", \"白银\", \"黄金\", \"白金\", \"钻石\", \"冠军\"];\n\n let feet1 = [\"V\", \"IV\", \"III\", \"II\", \"I\"];\n\n let feet2 = [\"III\", \"II\", \"I\"];\n\n if mmr < 2600 {\n\n let mmrd = (mmr / 100) - 11;\n\n if mmrd < 5 {\n\n return f(head[0], feet1[mmrd as usize]);\n\n } else if mmrd < 10 {\n\n return f(head[1], feet1[(mmrd - 5) as usize]);\n\n } else {\n\n return f(head[2], feet1[(mmrd - 10) as usize]);\n\n }\n\n } else if mmr < 4400 {\n\n let mmrd = (mmr / 200) - 13;\n\n if mmrd < 3 {\n\n return f(head[3], feet2[mmrd as usize]);\n\n } else {\n\n return f(head[4], feet2[((mmrd - 3) / 2) as usize]);\n\n }\n\n } else if mmr < 5000 {\n\n return head[5].to_string();\n\n } else {\n\n return head[6].to_string();\n\n }\n\n}\n", "file_path": "nbrs_matcher_r6s/src/pro.rs", "rank": 40, "score": 61088.90999322606 }, { "content": "fn format_time(time: i64) -> String {\n\n fn f(time: i64, div: i64, mut rs: String, s: &str) -> (String, i64) {\n\n let (time, remain) = (time / div, time % div);\n\n rs.insert_str(0, &format!(\"{}{}\", remain, s));\n\n (rs, time)\n\n }\n\n\n\n let rs = String::new();\n\n let (rs, time) = f(time, 60, rs, \"秒\");\n\n if time == 0 {\n\n return rs;\n\n }\n\n let (rs, time) = f(time, 60, rs, \"分\");\n\n if time == 0 {\n\n return rs;\n\n }\n\n let (mut rs, time) = f(time, 24, rs, \"时\");\n\n if time == 0 {\n\n return rs;\n\n }\n\n rs.insert_str(0, &format!(\"{}{}\", time, \"天\"));\n\n rs\n\n}\n", "file_path": "nonebot_rs/src/builtin/bot_status.rs", "rank": 41, "score": 58693.26435701559 }, { "content": "pub fn option_id_deserializer<'de, D>(d: D) -> Result<Option<String>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n d.deserialize_option(OptionJsonIdVisitor)\n\n}\n", "file_path": "nonebot_rs/src/utils.rs", "rank": 42, "score": 57800.465497681085 }, { "content": "fn format_plays(id: &str, data: &Value) -> String {\n\n let f = |data: &Value| {\n\n let update_at = data.get(\"update_at\").unwrap();\n\n let date = format!(\n\n \"{}.{}.{} {}:{}\",\n\n update_at.get(\"year\").unwrap().as_i64().unwrap() + 1900,\n\n update_at.get(\"month\").unwrap().as_i64().unwrap() + 1,\n\n update_at.get(\"date\").unwrap().as_i64().unwrap(),\n\n update_at.get(\"hours\").unwrap(),\n\n update_at.get(\"minutes\").unwrap()\n\n );\n\n let kd = if data.get(\"deaths\").unwrap() == 0 {\n\n \"-\".to_string()\n\n } else {\n\n format_division(\"kills\", \"deaths\", &data)\n\n };\n\n format!(\n\n \"\\n\\n{}\\n胜负比:{}/{}\\nKD:{}\",\n\n date,\n\n data.get(\"won\").unwrap(),\n", "file_path": "nbrs_matcher_r6s/src/plays.rs", "rank": 43, "score": 54541.760598200955 }, { "content": "fn format_base(id: &str, data: Value) -> String {\n\n format!(\n\n \"{}\\n等级:{}\\n\\n综合数据:\\n{}\",\n\n id,\n\n data.get(\"Basicstat\")\n\n .unwrap()\n\n .get(0)\n\n .unwrap()\n\n .get(\"level\")\n\n .unwrap(),\n\n format_stat(data.get(\"StatGeneral\").unwrap().get(0).unwrap())\n\n )\n\n}\n", "file_path": "nbrs_matcher_r6s/src/base.rs", "rank": 44, "score": 54541.760598200955 }, { "content": "fn format_pro(id: &str, data: Value) -> String {\n\n let casual_mmr = data.get(\"Casualstat\").unwrap().get(\"mmr\").unwrap();\n\n let casual = format! {\n\n \"{}\\n\\n休闲数据:\\n{}\\n隐藏MMR:{}\\n隐藏Rank:{}\",\n\n id,\n\n format_stat(data.get(\"StatCR\").unwrap().get(0).unwrap()),\n\n casual_mmr,\n\n rank(casual_mmr.as_f64().unwrap() as i64),\n\n };\n\n if let Some(rank_data) = data.get(\"StatCR\").unwrap().get(1) {\n\n let rank_mmr = data\n\n .get(\"Basicstat\")\n\n .unwrap()\n\n .get(0)\n\n .unwrap()\n\n .get(\"mmr\")\n\n .unwrap();\n\n return format!(\n\n \"{}\\n\\n排位数据:\\n{}\\n排位MMR:{}\\n排位Rank:{}\",\n\n casual,\n\n format_stat(rank_data),\n\n rank_mmr,\n\n rank(rank_mmr.as_f64().unwrap() as i64),\n\n );\n\n }\n\n casual\n\n}\n\n\n", "file_path": "nbrs_matcher_r6s/src/pro.rs", "rank": 45, "score": 54541.760598200955 }, { "content": "fn main() {\n\n let mut nb = nonebot_rs::Nonebot::new();\n\n\n\n let mut matchers = nonebot_rs::Matchers::new_empty();\n\n matchers\n\n .add_message_matcher(nonebot_rs::builtin::rcnb::rcnb())\n\n .add_message_matcher(nonebot_rs::builtin::echo::echo2());\n\n nb.add_plugin(matchers);\n\n\n\n let lua = nbrs_lua::LuaPlugin::new();\n\n nb.add_plugin(lua);\n\n\n\n let mut scheduler = nonebot_rs::Scheduler::new();\n\n scheduler.add_job(clock::clock(&nb));\n\n nb.add_plugin(scheduler);\n\n\n\n nb.run()\n\n}\n", "file_path": "nbrs_no4/src/main.rs", "rank": 47, "score": 45044.676942011385 }, { "content": "/// `get_user_id()` trait\n\npub trait UserId {\n\n fn get_user_id(&self) -> String;\n\n}\n\n\n\nimpl UserId for MessageEvent {\n\n fn get_user_id(&self) -> String {\n\n match self {\n\n MessageEvent::Private(p) => p.user_id.to_string(),\n\n MessageEvent::Group(g) => g.user_id.to_string(),\n\n }\n\n }\n\n}\n\n\n\nimpl UserId for NoticeEvent {\n\n fn get_user_id(&self) -> String {\n\n self.user_id.clone()\n\n }\n\n}\n\n\n\nimpl UserId for RequestEvent {\n\n fn get_user_id(&self) -> String {\n\n self.user_id.clone()\n\n }\n\n}\n\n\n", "file_path": "nonebot_rs/src/event.rs", "rank": 48, "score": 44740.89363077804 }, { "content": "#[test]\n\nfn de_test() {\n\n let test_str = \"{\\\"group_id\\\":101,\\\"message_id\\\":111,\\\"notice_type\\\":\\\"group_recall\\\",\\\"operator_id\\\":11,\\\"post_type\\\":\\\"notice\\\",\\\"self_id\\\":11,\\\"time\\\":1631193409,\\\"user_id\\\":11}\\n\";\n\n let _meta: Event = serde_json::from_str(test_str).unwrap();\n\n}\n\n\n\n/// 元事件状态字段\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Status {\n\n /// 是否在线,None 表示无法查询\n\n pub online: Option<bool>,\n\n /// 运行状态是否符合预期\n\n pub good: bool,\n\n}\n\n\n", "file_path": "nonebot_rs/src/event.rs", "rank": 49, "score": 43774.54452676252 }, { "content": "fn run_lua_script(\n\n script_name: &str,\n\n script_path: &str,\n\n event: &MessageEvent,\n\n bot: &nonebot_rs::Bot,\n\n) {\n\n let path = std::path::PathBuf::from(&script_path);\n\n match std::fs::read_to_string(&path) {\n\n Ok(s) => {\n\n let bot = bot.clone();\n\n let event = event.clone();\n\n let lua = Lua::new();\n\n lua.globals()\n\n .set(\"Message\", event.get_raw_message())\n\n .unwrap();\n\n lua.load(&s).exec().unwrap();\n\n let r_msg = lua.globals().get(\"Rmessage\");\n\n match r_msg {\n\n Ok(r_msg) => {\n\n event!(\n", "file_path": "nbrs_lua/src/lib.rs", "rank": 50, "score": 42609.908986707924 }, { "content": "#[async_trait]\n\npub trait Plugin: std::fmt::Debug {\n\n /// Plugin 启动函数,在 nb 启动时调用一次,不应当阻塞\n\n fn run(&self, event_receiver: crate::EventReceiver, bot_getter: crate::BotGetter);\n\n /// Plugin Name 用于注册 Plugin 时标识唯一性\n\n fn plugin_name(&self) -> &'static str;\n\n /// Load config\n\n #[allow(unused_variables)]\n\n async fn load_config(&mut self, config: toml::Value);\n\n}\n", "file_path": "nonebot_rs/src/plugin.rs", "rank": 51, "score": 39195.27354045527 }, { "content": "#[derive(Clone, Debug)]\n\npub struct Matchers {\n\n /// MessageEvent 对应 MatcherBTreeMap\n\n pub message: MatchersBTreeMap<MessageEvent>,\n\n /// NoticeEvent 对应 MatcherBTreeMap\n\n pub notice: MatchersBTreeMap<NoticeEvent>,\n\n /// RequestEvent 对应 MatcherBTreeMap\n\n pub request: MatchersBTreeMap<RequestEvent>,\n\n /// MetaEvent 对应 MatcherBTreeMap\n\n pub meta: MatchersBTreeMap<MetaEvent>,\n\n /// Bot Watch channel Receiver\n\n bot_getter: Option<crate::BotGetter>,\n\n /// Matchers Action Sender\n\n action_sender: ActionSender,\n\n /// Config\n\n config: HashMap<String, HashMap<String, toml::Value>>,\n\n}\n\n\n\nimpl Matchers {\n\n async fn handle_events(&mut self, event: Event, bot: &crate::bot::Bot) {\n", "file_path": "nonebot_rs/src/matcher/matchers/mod.rs", "rank": 59, "score": 35897.08310078329 }, { "content": " ._handler_event(matcherh, event.clone(), bot.clone())\n\n .await\n\n {\n\n break;\n\n };\n\n }\n\n }\n\n\n\n #[doc(hidden)]\n\n async fn _handler_event<E>(\n\n &mut self,\n\n matcherh: &mut MatchersHashMap<E>,\n\n e: E,\n\n bot: crate::bot::Bot,\n\n ) -> bool\n\n where\n\n E: Clone + Send + 'static + std::fmt::Debug + SelfId,\n\n {\n\n event!(Level::TRACE, \"handling event_ {:?}\", e);\n\n // 每级 Matcher 匹配,返回是否 block\n", "file_path": "nonebot_rs/src/matcher/matchers/mod.rs", "rank": 61, "score": 35895.78299522214 }, { "content": " crate::event::NbEvent::BotDisconnect { bot } => {\n\n self.run_on_connect(bot, true).await;\n\n }\n\n },\n\n }\n\n }\n\n\n\n /// 接收按类型分发后的 Event 逐级匹配 Matcher\n\n async fn handle_event<E>(\n\n &mut self,\n\n mut matcherb: MatchersBTreeMap<E>,\n\n event: E,\n\n bot: crate::bot::Bot,\n\n ) where\n\n E: Clone + Send + 'static + std::fmt::Debug + SelfId,\n\n {\n\n event!(Level::TRACE, \"handling event {:?}\", event);\n\n // 根据不同 Event 类型,逐级匹配,判定是否 Block\n\n for (_, matcherh) in matcherb.iter_mut() {\n\n if self\n", "file_path": "nonebot_rs/src/matcher/matchers/mod.rs", "rank": 63, "score": 35892.37513397403 }, { "content": "use crate::event::{Event, MessageEvent, MetaEvent, NoticeEvent, RequestEvent, SelfId};\n\nuse crate::matcher::Matcher;\n\nuse async_trait::async_trait;\n\nuse colored::*;\n\nuse std::collections::{BTreeMap, HashMap};\n\nuse tokio::sync::broadcast;\n\nuse tracing::{event, Level};\n\n\n\nmod action;\n\n\n\n/// 按 `priority` 依序存储 `MatchersHashMap`\n\npub type MatchersBTreeMap<E> = BTreeMap<i8, MatchersHashMap<E>>;\n\n/// 使用唯一名字存储 `Matcher`\n\npub type MatchersHashMap<E> = HashMap<String, Matcher<E>>;\n\n/// Matchers Action Sender\n\npub type ActionSender = broadcast::Sender<super::action::MatchersAction>;\n\n\n\npub const PLUGIN_NAME: &'static str = \"Matcher\";\n\n\n\n/// 根据 `Event` 类型分类存储对应的 `Matcher`\n", "file_path": "nonebot_rs/src/matcher/matchers/mod.rs", "rank": 64, "score": 35891.88130844283 }, { "content": " match event {\n\n Event::Message(e) => {\n\n self.handle_event(self.message.clone(), e, bot.clone())\n\n .await;\n\n }\n\n Event::Notice(e) => {\n\n self.handle_event(self.notice.clone(), e, bot.clone()).await;\n\n }\n\n Event::Request(e) => {\n\n self.handle_event(self.request.clone(), e, bot.clone())\n\n .await;\n\n }\n\n Event::Meta(e) => {\n\n self.handle_event(self.meta.clone(), e, bot.clone()).await;\n\n }\n\n Event::Nonebot(e) => match e {\n\n crate::event::NbEvent::BotConnect { bot } => {\n\n log_load_matchers(&self);\n\n self.run_on_connect(bot, false).await;\n\n }\n", "file_path": "nonebot_rs/src/matcher/matchers/mod.rs", "rank": 65, "score": 35886.89122510232 }, { "content": "\n\n async fn event_recv(mut self, mut event_receiver: crate::EventReceiver) {\n\n let mut receiver = self.action_sender.subscribe();\n\n while let Ok(event) = event_receiver.recv().await {\n\n\n\n match receiver.try_recv() {\n\n Ok(action) => self.handle_action(action),\n\n Err(_) => {}\n\n }\n\n\n\n let bots = self.bot_getter.clone().unwrap().borrow().clone();\n\n if let Some(bot) = bots.get(&event.get_self_id()) {\n\n self.handle_events(event, bot).await;\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl crate::Plugin for Matchers {\n", "file_path": "nonebot_rs/src/matcher/matchers/mod.rs", "rank": 66, "score": 35885.76734221516 }, { "content": " let mut get_block = false;\n\n let config = bot.config.clone();\n\n for (name, matcher) in matcherh.iter_mut() {\n\n let matched = matcher\n\n .build(bot.clone())\n\n .match_(e.clone(), config.clone(), self)\n\n .await;\n\n if matched {\n\n event!(Level::INFO, \"Matched {}\", name.blue());\n\n if matcher.is_block() {\n\n get_block = true;\n\n }\n\n if matcher.is_temp() {\n\n event!(Level::INFO, \"Remove matched temp matcher {}\", name.blue());\n\n self.remove_matcher(name);\n\n }\n\n }\n\n }\n\n get_block\n\n }\n", "file_path": "nonebot_rs/src/matcher/matchers/mod.rs", "rank": 67, "score": 35884.27168641026 }, { "content": " fn run(&self, event_receiver: crate::EventReceiver, bot_getter: crate::BotGetter) {\n\n let mut m = self.clone();\n\n m.bot_getter = Some(bot_getter.clone());\n\n tokio::spawn(m.event_recv(event_receiver));\n\n }\n\n\n\n fn plugin_name(&self) -> &'static str {\n\n PLUGIN_NAME\n\n }\n\n\n\n async fn load_config(&mut self, config: toml::Value) {\n\n let config: HashMap<String, HashMap<String, toml::Value>> =\n\n config.try_into().expect(\"Matchers get error config\");\n\n self.config = config;\n\n self.load_all_matcher_config().await;\n\n event!(Level::INFO, \"Loaded Matchers config: {:?}\", self.config);\n\n }\n\n}\n\n\n", "file_path": "nonebot_rs/src/matcher/matchers/mod.rs", "rank": 68, "score": 35883.21196051224 }, { "content": " superusers: vec![],\n\n nicknames: vec![],\n\n command_starts: vec![\"/\".to_string()],\n\n },\n\n bots: None,\n\n config: Config::default(),\n\n ws_server: Some(WebSocketServerConfig {\n\n host: std::net::Ipv4Addr::new(127, 0, 0, 1),\n\n port: 8088,\n\n access_token: String::default(),\n\n }),\n\n }\n\n }\n\n}\n\n\n\nimpl NbConfig {\n\n /// 从配置文件读取配置\n\n pub fn load() -> Self {\n\n use colored::*;\n\n let mut config: NbConfig;\n", "file_path": "nonebot_rs/src/config.rs", "rank": 69, "score": 31241.52888550357 }, { "content": "\n\nimpl Default for BotConfig {\n\n fn default() -> Self {\n\n BotConfig {\n\n bot_id: String::new(),\n\n superusers: vec![],\n\n nicknames: vec![],\n\n command_starts: vec![],\n\n access_token: String::default(),\n\n ws_server: String::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for NbConfig {\n\n fn default() -> Self {\n\n NbConfig {\n\n global: GlobalConfig {\n\n debug: true,\n\n trace: None,\n", "file_path": "nonebot_rs/src/config.rs", "rank": 70, "score": 31238.145370825125 }, { "content": "use crate::log::{colored::*, event, Level};\n\nuse config::Config;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n/// nbrs 配置文件名\n\npub static CONFIG_PATH: &str = \"Nonebotrs.toml\";\n\n\n\n/// nbrs 配置项结构体\n\n#[derive(Serialize, Deserialize, Clone)]\n\npub struct NbConfig {\n\n /// 全局配置\n\n pub global: GlobalConfig,\n\n /// bot 配置\n\n pub bots: Option<HashMap<String, BotConfig>>,\n\n /// 反向 WS 服务器设置\n\n pub ws_server: Option<WebSocketServerConfig>,\n\n #[serde(skip)]\n\n config: Config, // save the full config\n\n}\n", "file_path": "nonebot_rs/src/config.rs", "rank": 71, "score": 31235.957724836968 }, { "content": " pub fn gen_bot_config(&self, bot_id: &str) -> BotConfig {\n\n let mut rbotconfig = BotConfig {\n\n bot_id: bot_id.to_string(),\n\n superusers: self.global.superusers.clone(),\n\n nicknames: self.global.nicknames.clone(),\n\n command_starts: self.global.command_starts.clone(),\n\n access_token: String::default(),\n\n ws_server: String::default(),\n\n };\n\n\n\n if let Some(server_config) = &self.ws_server {\n\n rbotconfig.access_token = server_config.access_token.clone();\n\n }\n\n\n\n if let Some(bots_config) = &self.bots {\n\n if let Some(bot_config) = bots_config.get(bot_id) {\n\n if !bot_config.superusers.is_empty() {\n\n rbotconfig.superusers = bot_config.superusers.clone();\n\n }\n\n if !bot_config.nicknames.is_empty() {\n", "file_path": "nonebot_rs/src/config.rs", "rank": 72, "score": 31234.929282790647 }, { "content": " rbotconfig.nicknames = bot_config.nicknames.clone();\n\n }\n\n if !bot_config.command_starts.is_empty() {\n\n rbotconfig.command_starts = bot_config.command_starts.clone();\n\n }\n\n if !bot_config.access_token.is_empty() {\n\n rbotconfig.access_token = bot_config.access_token.clone();\n\n }\n\n }\n\n }\n\n rbotconfig\n\n }\n\n\n\n pub fn gen_access_token(&self) -> AccessToken {\n\n let mut at = AccessToken {\n\n global: if let Some(ws_server_config) = &self.ws_server {\n\n ws_server_config.access_token.clone()\n\n } else {\n\n String::default()\n\n },\n", "file_path": "nonebot_rs/src/config.rs", "rank": 73, "score": 31234.770384457202 }, { "content": "\n\nimpl std::fmt::Debug for NbConfig {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"NbConfig\")\n\n .field(\"Global\", &self.global)\n\n .field(\"Bots\", &self.bots)\n\n .finish()\n\n }\n\n}\n\n\n\n/// 反向 WS 服务器设置\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct WebSocketServerConfig {\n\n /// Host\n\n pub host: std::net::Ipv4Addr,\n\n /// Port\n\n pub port: u16,\n\n /// Onebot authorization\n\n #[serde(alias = \"access-token\")]\n\n #[serde(default)]\n", "file_path": "nonebot_rs/src/config.rs", "rank": 74, "score": 31234.46133271156 }, { "content": " access_token: String,\n\n}\n\n\n\n/// nbrs 全局配置\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct GlobalConfig {\n\n /// Debug 模式\n\n pub debug: bool,\n\n /// Trace 模式\n\n pub trace: Option<bool>,\n\n /// 全局管理员账号设置\n\n pub superusers: Vec<String>,\n\n /// 全局昵称设置\n\n pub nicknames: Vec<String>,\n\n /// 全局命令起始符设置\n\n pub command_starts: Vec<String>,\n\n}\n\n\n\n/// nbrs bot 配置\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n", "file_path": "nonebot_rs/src/config.rs", "rank": 75, "score": 31234.09945094634 }, { "content": " let _config = self.config.clone();\n\n let get_config: Result<T, config::ConfigError> = _config.get(key_word);\n\n match get_config {\n\n Ok(t) => {\n\n event!(Level::DEBUG, \"Found config for {}\", key_word);\n\n Some(t)\n\n }\n\n Err(_) => {\n\n event!(Level::DEBUG, \"Not found config for {}\", key_word);\n\n None\n\n }\n\n }\n\n }\n\n\n\n /// 获取 full config\n\n pub fn get_full_config(&self) -> Config {\n\n self.config.clone()\n\n }\n\n\n\n /// 生成 BotConfig\n", "file_path": "nonebot_rs/src/config.rs", "rank": 76, "score": 31233.48077191136 }, { "content": "impl AccessToken {\n\n pub fn get(&self, bot_id: &str) -> &str {\n\n if let Some(a) = self.bots.get(bot_id) {\n\n a\n\n } else {\n\n &self.global\n\n }\n\n }\n\n\n\n pub fn check_auth(&self, bot_id: &str, token: Option<String>) -> bool {\n\n let access_token = if let Some(a) = self.bots.get(bot_id) {\n\n &a\n\n } else {\n\n &self.global\n\n };\n\n\n\n if access_token.is_empty() {\n\n return true;\n\n }\n\n\n", "file_path": "nonebot_rs/src/config.rs", "rank": 77, "score": 31231.720915958038 }, { "content": " bots: HashMap::default(),\n\n };\n\n if let Some(bots) = &self.bots {\n\n for (bot_id, bot) in bots {\n\n if !bot.access_token.is_empty() {\n\n at.bots\n\n .insert(bot_id.to_string(), bot.access_token.to_string());\n\n }\n\n }\n\n }\n\n at\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct AccessToken {\n\n pub global: String,\n\n pub bots: HashMap<String, String>,\n\n}\n\n\n", "file_path": "nonebot_rs/src/config.rs", "rank": 78, "score": 31229.789733817597 }, { "content": " let config_pathbuf = std::path::PathBuf::from(&CONFIG_PATH);\n\n if !config_pathbuf.exists() {\n\n config = NbConfig::default();\n\n let config_string = toml::to_string(&config).unwrap();\n\n std::fs::write(&config_pathbuf, &config_string).unwrap();\n\n println!(\"{}\", \"未发现配置文件,已新建配置文件。\".green())\n\n } else {\n\n let mut _config = Config::default();\n\n _config.merge(config::File::with_name(CONFIG_PATH)).unwrap();\n\n config = _config.clone().try_into().unwrap();\n\n config.config = _config;\n\n }\n\n config\n\n }\n\n\n\n /// 根据 key_word 获取 config\n\n pub fn get_config<'de, T>(&self, key_word: &str) -> Option<T>\n\n where\n\n T: serde::Deserialize<'de>,\n\n {\n", "file_path": "nonebot_rs/src/config.rs", "rank": 79, "score": 31229.33020496796 }, { "content": "pub struct BotConfig {\n\n /// bot id\n\n #[serde(skip)]\n\n pub bot_id: String,\n\n /// 管理员账号设置\n\n #[serde(default)]\n\n pub superusers: Vec<String>,\n\n /// 昵称设置\n\n #[serde(default)]\n\n pub nicknames: Vec<String>,\n\n /// 命令起始符设置\n\n #[serde(default)]\n\n pub command_starts: Vec<String>,\n\n #[serde(alias = \"access-token\")]\n\n #[serde(default)]\n\n access_token: String, // Onebot authorization\n\n /// 正向 WS 地址\n\n #[serde(default)]\n\n pub ws_server: String,\n\n}\n", "file_path": "nonebot_rs/src/config.rs", "rank": 80, "score": 31228.862812719697 }, { "content": " fn check(head: &str, token: &str, access_token: &str) -> bool {\n\n if token.starts_with(head) {\n\n let token = crate::utils::remove_space(&token.replace(head, \"\"));\n\n if token == access_token {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n\n\n let mut result = false;\n\n if let Some(token) = &token {\n\n result = check(\"Token\", token, access_token) || check(\"Bearer\", &token, access_token)\n\n }\n\n\n\n if !result {\n\n event!(\n\n Level::WARN,\n\n \"Access Token match fail Bot:[{}] Token:{:?}\",\n\n bot_id.red(),\n\n token\n\n );\n\n }\n\n\n\n result\n\n }\n\n}\n", "file_path": "nonebot_rs/src/config.rs", "rank": 81, "score": 31226.079196368482 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// Onebot 协议消息定义\n\n#[derive(Debug, Deserialize, Serialize, Clone)]\n\n#[serde(tag = \"type\", content = \"data\")]\n\npub enum Message {\n\n /// 纯文本\n\n #[serde(rename = \"text\")]\n\n Text {\n\n /// 纯文本内容\n\n text: String,\n\n },\n\n\n\n /// QQ 表情\n\n #[serde(rename = \"face\")]\n\n Face {\n\n /// QQ 表情 ID\n\n id: String,\n\n },\n\n\n", "file_path": "nonebot_rs/src/message.rs", "rank": 82, "score": 30837.012091877372 }, { "content": "\n\nimpl Message {\n\n // pub fn text(text: &str) -> Message {\n\n // Message::Text {\n\n // text: text.to_string(),\n\n // }\n\n // }\n\n message_builder!(text, Text, text: String);\n\n message_builder!(face, Face, id: String);\n\n message_builder!(\n\n image,\n\n Image,\n\n file: String,\n\n type_: Option<String>,\n\n url: Option<String>,\n\n cache: Option<u8>,\n\n proxy: Option<u8>,\n\n timeout: Option<i64>\n\n );\n\n message_builder!(\n", "file_path": "nonebot_rs/src/message.rs", "rank": 83, "score": 30835.70159568071 }, { "content": " },\n\n}\n\n\n\nmacro_rules! message_builder {\n\n ($fn_name: ident, $message_type: tt) => {\n\n pub fn $fn_name() -> Message {\n\n Message::$message_type\n\n }\n\n };\n\n ($fn_name: ident, $message_type: tt, $param: ident: $param_ty: ty) => {\n\n pub fn $fn_name($param: $param_ty) -> Message {\n\n Message::$message_type { $param: $param }\n\n }\n\n };\n\n ($fn_name: ident, $message_type: tt, $($param: ident: $param_ty: ty),*) => {\n\n pub fn $fn_name($($param: $param_ty,)*) -> Message {\n\n Message::$message_type { $($param: $param,)* }\n\n }\n\n };\n\n}\n", "file_path": "nonebot_rs/src/message.rs", "rank": 84, "score": 30834.95261658446 }, { "content": " message_builder!(dice, Dice);\n\n message_builder!(shake, Shake);\n\n message_builder!(poke, Poke, type_: String, id: String, name: Option<String>);\n\n message_builder!(anonymous, Anonymous);\n\n message_builder!(\n\n share,\n\n Share,\n\n url: String,\n\n title: String,\n\n content: Option<String>,\n\n image: Option<String>\n\n );\n\n message_builder!(contact, Contact, type_: String, id: String);\n\n message_builder!(\n\n location,\n\n Lacation,\n\n lat: String,\n\n lon: String,\n\n title: Option<String>,\n\n content: Option<String>\n", "file_path": "nonebot_rs/src/message.rs", "rank": 85, "score": 30831.449131260553 }, { "content": " );\n\n message_builder!(\n\n music,\n\n Music,\n\n type_: String,\n\n id: Option<String>,\n\n url: Option<String>,\n\n audio: Option<String>,\n\n title: Option<String>,\n\n content: Option<String>,\n\n image: Option<String>\n\n );\n\n message_builder!(reply, Reply, id: String);\n\n message_builder!(forward, Forward, id: String);\n\n message_builder!(\n\n node,\n\n Node,\n\n id: Option<String>,\n\n user_id: Option<String>,\n\n nickname: Option<String>,\n\n content: Option<Vec<Message>>\n\n );\n\n message_builder!(xml, Xml, data: String);\n\n message_builder!(json, Json, data: String);\n\n}\n", "file_path": "nonebot_rs/src/message.rs", "rank": 86, "score": 30831.351282168514 }, { "content": " record,\n\n Record,\n\n file: String,\n\n magic: Option<u8>,\n\n url: Option<String>,\n\n cache: Option<u8>,\n\n proxy: Option<u8>,\n\n timeout: Option<i64>\n\n );\n\n message_builder!(\n\n video,\n\n Video,\n\n file: String,\n\n url: Option<String>,\n\n cache: Option<u8>,\n\n proxy: Option<u8>,\n\n timeout: Option<i64>\n\n );\n\n message_builder!(at, At, qq: String);\n\n message_builder!(rps, Rps);\n", "file_path": "nonebot_rs/src/message.rs", "rank": 87, "score": 30830.787929022536 }, { "content": " /// 发送者 QQ 号 \n\n user_id: Option<String>,\n\n /// 发送者昵称 \n\n nickname: Option<String>,\n\n /// 消息内容 \n\n content: Option<Vec<Message>>,\n\n },\n\n\n\n /// XML 消息\n\n #[serde(rename = \"xml\")]\n\n Xml {\n\n /// 合并转发 ID\n\n data: String,\n\n },\n\n\n\n /// JSON 消息\n\n #[serde(rename = \"json\")]\n\n Json {\n\n /// 合并转发 ID\n\n data: String,\n", "file_path": "nonebot_rs/src/message.rs", "rank": 88, "score": 30829.863213990975 }, { "content": "\n\n /// 匿名发消息\n\n #[serde(rename = \"anonymous\")]\n\n Anonymous,\n\n\n\n /// 链接分享\n\n #[serde(rename = \"share\")]\n\n Share {\n\n /// URL\n\n url: String,\n\n /// 标题\n\n title: String,\n\n /// 内容描述\n\n content: Option<String>,\n\n /// 图片 URl\n\n image: Option<String>,\n\n },\n\n\n\n /// 推荐好友|群\n\n #[serde(rename = \"contact\")]\n", "file_path": "nonebot_rs/src/message.rs", "rank": 89, "score": 30827.333658265503 }, { "content": "\n\n /// 音乐分享\n\n #[serde(rename = \"music\")]\n\n Music {\n\n /// 类型 qq|163|xm|custom\n\n #[serde(rename = \"type\")]\n\n type_: String,\n\n /// 歌曲 ID\n\n id: Option<String>,\n\n /// 点击后跳转 URL\n\n url: Option<String>,\n\n /// 歌曲 URL \n\n audio: Option<String>,\n\n /// 标题 \n\n title: Option<String>,\n\n /// 内容描述\n\n content: Option<String>,\n\n /// 图片 URl\n\n image: Option<String>,\n\n },\n", "file_path": "nonebot_rs/src/message.rs", "rank": 90, "score": 30827.333658265503 }, { "content": " /// 视频 URL\n\n url: Option<String>,\n\n /// 是否使用缓存文件 1|0\n\n cache: Option<u8>,\n\n /// 是否使用代理 1|0\n\n proxy: Option<u8>,\n\n /// 网络文件下载超时 单位秒\n\n timeout: Option<i64>,\n\n },\n\n\n\n /// @某人\n\n #[serde(rename = \"at\")]\n\n At {\n\n /// @QQ ID all 表示全体\n\n qq: String,\n\n },\n\n\n\n /// 猜拳魔法表情\n\n #[serde(rename = \"rps\")]\n\n Rps,\n", "file_path": "nonebot_rs/src/message.rs", "rank": 91, "score": 30827.333658265503 }, { "content": " Record {\n\n /// 语音文件名\n\n file: String,\n\n /// 是否变声 1|0\n\n magic: Option<u8>,\n\n /// 语音 URL \n\n url: Option<String>,\n\n /// 是否使用缓存文件 1|0\n\n cache: Option<u8>,\n\n /// 是否使用代理 1|0\n\n proxy: Option<u8>,\n\n /// 网络文件下载超时 单位秒\n\n timeout: Option<i64>,\n\n },\n\n\n\n /// 短视频\n\n #[serde(rename = \"video\")]\n\n Video {\n\n /// 视频文件名\n\n file: String,\n", "file_path": "nonebot_rs/src/message.rs", "rank": 92, "score": 30827.333658265503 }, { "content": " Contact {\n\n /// 类型 qq|group\n\n #[serde(rename = \"type\")]\n\n type_: String,\n\n /// QQ号|群号\n\n id: String,\n\n },\n\n\n\n /// 位置\n\n #[serde(rename = \"location\")]\n\n Lacation {\n\n /// 纬度\n\n lat: String,\n\n /// 经度 \n\n lon: String,\n\n /// 标题 \n\n title: Option<String>,\n\n /// 内容描述\n\n content: Option<String>,\n\n },\n", "file_path": "nonebot_rs/src/message.rs", "rank": 93, "score": 30827.333658265503 }, { "content": "\n\n /// 回复\n\n #[serde(rename = \"reply\")]\n\n Reply {\n\n /// 回复的消息 ID\n\n id: String,\n\n },\n\n\n\n /// 合并转发\n\n #[serde(rename = \"forward\")]\n\n Forward {\n\n /// 合并转发 ID\n\n id: String,\n\n },\n\n\n\n /// 合并转发节点\n\n #[serde(rename = \"node\")]\n\n Node {\n\n /// 转发的消息 ID\n\n id: Option<String>,\n", "file_path": "nonebot_rs/src/message.rs", "rank": 94, "score": 30827.333658265503 }, { "content": "\n\n /// 掷骰子魔法表情\n\n #[serde(rename = \"dice\")]\n\n Dice,\n\n\n\n /// 窗口抖动(戳一戳)\n\n #[serde(rename = \"shake\")]\n\n Shake,\n\n\n\n /// 戳一戳\n\n #[serde(rename = \"poke\")]\n\n Poke {\n\n /// 类型\n\n #[serde(rename = \"type\")]\n\n type_: String,\n\n /// ID\n\n id: String,\n\n /// 表情名\n\n name: Option<String>,\n\n },\n", "file_path": "nonebot_rs/src/message.rs", "rank": 95, "score": 30827.333658265503 }, { "content": " /// 图片\n\n #[serde(rename = \"image\")]\n\n Image {\n\n /// 图片文件名\n\n file: String,\n\n /// 图片类型 flash 闪照\n\n #[serde(rename = \"type\")]\n\n type_: Option<String>,\n\n /// 图片 URL\n\n url: Option<String>,\n\n /// 是否使用缓存文件 1|0\n\n cache: Option<u8>,\n\n /// 是否使用代理 1|0\n\n proxy: Option<u8>,\n\n /// 网络文件下载超时 单位秒\n\n timeout: Option<i64>,\n\n },\n\n\n\n /// 语音\n\n #[serde(rename = \"record\")]\n", "file_path": "nonebot_rs/src/message.rs", "rank": 96, "score": 30827.333658265503 }, { "content": "use crate::api_resp;\n\nuse crate::event::MessageEvent;\n\nuse crate::{api, config, message, utils, ApiChannelItem, ApiResp};\n\nuse colored::*;\n\nuse tokio::sync::{mpsc, watch};\n\nuse tracing::{event, Level};\n\n\n\nmod _api;\n\n\n\n/// 为 Plugin 提供各类 Onebot Api \n\n#[derive(Debug, Clone)]\n\npub struct Bot {\n\n /// bot id\n\n pub bot_id: String,\n\n /// connect timestamp\n\n pub connect_time: i64,\n\n // Bot Config\n\n pub config: config::BotConfig,\n\n /// 暂存调用 Bot api\n\n pub api_sender: mpsc::Sender<ApiChannelItem>,\n", "file_path": "nonebot_rs/src/bot/mod.rs", "rank": 97, "score": 29548.843311156306 }, { "content": " /// Nonebot Action Sender\n\n pub action_sender: crate::ActionSender,\n\n /// ApiResp Receiver\n\n pub api_resp_watcher: watch::Receiver<ApiResp>,\n\n}\n\n\n\nimpl Bot {\n\n pub fn new(\n\n bot_id: String,\n\n config: config::BotConfig,\n\n api_sender: mpsc::Sender<ApiChannelItem>,\n\n action_sender: crate::ActionSender,\n\n api_resp_watcher: watch::Receiver<ApiResp>,\n\n ) -> Self {\n\n Bot {\n\n bot_id: bot_id,\n\n connect_time: crate::utils::timestamp(),\n\n config: config,\n\n api_sender: api_sender,\n\n action_sender: action_sender,\n", "file_path": "nonebot_rs/src/bot/mod.rs", "rank": 98, "score": 29547.212549587333 }, { "content": " event!(\n\n Level::INFO,\n\n \"Bot [{}] Calling Api {:?}\",\n\n self.config.bot_id.red(),\n\n api\n\n );\n\n }\n\n\n\n /// 请求 Onebot Api,等待 Onebot 返回项(30s 后 timeout 返回 None)\n\n pub async fn call_api_resp(&self, api: api::Api) -> Option<api_resp::ApiResp> {\n\n let echo = api.get_echo();\n\n self.api_sender\n\n .send(ApiChannelItem::Api(api.clone()))\n\n .await\n\n .unwrap();\n\n event!(\n\n Level::INFO,\n\n \"Bot [{}] Calling Api {:?}\",\n\n self.config.bot_id.red(),\n\n api\n", "file_path": "nonebot_rs/src/bot/mod.rs", "rank": 99, "score": 29542.836218577755 } ]
Rust
src/bitwidth.rs
Robbepop/apin
0b863c6f5dfcee22e6c2f4ead88bce9814788fc2
use crate::{ mem::NonZeroUsize, storage::Storage, BitPos, Digit, Error, Result, ShiftAmount, }; #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct BitWidth(NonZeroUsize); impl BitWidth { #[inline] pub fn w1() -> Self { BitWidth(NonZeroUsize::new(1).unwrap()) } #[inline] pub fn w8() -> Self { BitWidth(NonZeroUsize::new(8).unwrap()) } #[inline] pub fn w16() -> Self { BitWidth(NonZeroUsize::new(16).unwrap()) } #[inline] pub fn w32() -> Self { BitWidth(NonZeroUsize::new(32).unwrap()) } #[inline] pub fn w64() -> Self { BitWidth(NonZeroUsize::new(64).unwrap()) } #[inline] pub fn w128() -> Self { BitWidth(NonZeroUsize::new(128).unwrap()) } pub fn new(width: usize) -> Result<Self> { if width == 0 { return Err(Error::invalid_zero_bitwidth()) } Ok(BitWidth(NonZeroUsize::new(width).unwrap())) } #[inline] pub(crate) fn is_valid_pos<P>(self, pos: P) -> bool where P: Into<BitPos>, { pos.into().to_usize() < self.to_usize() } #[inline] pub(crate) fn is_valid_shift_amount<S>(self, shift_amount: S) -> bool where S: Into<ShiftAmount>, { shift_amount.into().to_usize() < self.to_usize() } #[inline] pub(crate) fn msb_pos(self) -> BitPos { BitPos::from(self.to_usize() - 1) } } impl From<usize> for BitWidth { fn from(width: usize) -> BitWidth { BitWidth::new(width).unwrap() } } impl BitWidth { #[inline] pub fn to_usize(self) -> usize { self.0.get() } pub(crate) fn excess_bits(self) -> Option<usize> { match self.to_usize() % Digit::BITS { 0 => None, n => Some(n), } } pub(crate) fn excess_width(self) -> Option<BitWidth> { NonZeroUsize::new(self.to_usize() % Digit::BITS).map(BitWidth) } #[inline] pub(crate) fn storage(self) -> Storage { Storage::from(self) } #[inline] pub(crate) fn required_digits(self) -> usize { ((self.to_usize() - 1) / Digit::BITS) + 1 } } #[cfg(test)] mod tests { use super::*; mod excess_bits { use super::*; #[test] fn powers_of_two() { assert_eq!(BitWidth::w1().excess_bits(), Some(1)); assert_eq!(BitWidth::w8().excess_bits(), Some(8)); assert_eq!(BitWidth::w16().excess_bits(), Some(16)); assert_eq!(BitWidth::w32().excess_bits(), Some(32)); assert_eq!(BitWidth::w64().excess_bits(), None); assert_eq!(BitWidth::w128().excess_bits(), None); } #[test] fn multiples_of_50() { assert_eq!(BitWidth::new(50).unwrap().excess_bits(), Some(50)); assert_eq!(BitWidth::new(100).unwrap().excess_bits(), Some(36)); assert_eq!(BitWidth::new(150).unwrap().excess_bits(), Some(22)); assert_eq!(BitWidth::new(200).unwrap().excess_bits(), Some(8)); assert_eq!(BitWidth::new(250).unwrap().excess_bits(), Some(58)); assert_eq!(BitWidth::new(300).unwrap().excess_bits(), Some(44)); } } }
use crate::{ mem::NonZeroUsize, storage::Storage, BitPos, Digit, Error, Result, ShiftAmount, }; #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct BitWidth(NonZeroUsize); impl BitWidth { #[inline] pub fn w1() -> Self { BitWidth(NonZeroUsize::new(1).unwrap()) } #[inline] pub fn w8() -> Self { BitWidth(NonZeroUsize::new(8).unwrap()) } #[inline] pub fn w16() -> Self { BitWidth(NonZeroUsize::new(16).unwrap()) } #[inline] pub fn w32() -> Self { BitWidth(NonZeroUsize::new(32).unwrap()) } #[inline] pub fn w64() -> Self { BitWidth(NonZeroUsize::new(64).unwrap()) } #[inline] pub fn w128() -> Self { BitWidth(NonZeroUsize::new(128).unwrap()) } pub fn new(width: usize) -> Result<Self> { if widt
#[inline] pub(crate) fn is_valid_pos<P>(self, pos: P) -> bool where P: Into<BitPos>, { pos.into().to_usize() < self.to_usize() } #[inline] pub(crate) fn is_valid_shift_amount<S>(self, shift_amount: S) -> bool where S: Into<ShiftAmount>, { shift_amount.into().to_usize() < self.to_usize() } #[inline] pub(crate) fn msb_pos(self) -> BitPos { BitPos::from(self.to_usize() - 1) } } impl From<usize> for BitWidth { fn from(width: usize) -> BitWidth { BitWidth::new(width).unwrap() } } impl BitWidth { #[inline] pub fn to_usize(self) -> usize { self.0.get() } pub(crate) fn excess_bits(self) -> Option<usize> { match self.to_usize() % Digit::BITS { 0 => None, n => Some(n), } } pub(crate) fn excess_width(self) -> Option<BitWidth> { NonZeroUsize::new(self.to_usize() % Digit::BITS).map(BitWidth) } #[inline] pub(crate) fn storage(self) -> Storage { Storage::from(self) } #[inline] pub(crate) fn required_digits(self) -> usize { ((self.to_usize() - 1) / Digit::BITS) + 1 } } #[cfg(test)] mod tests { use super::*; mod excess_bits { use super::*; #[test] fn powers_of_two() { assert_eq!(BitWidth::w1().excess_bits(), Some(1)); assert_eq!(BitWidth::w8().excess_bits(), Some(8)); assert_eq!(BitWidth::w16().excess_bits(), Some(16)); assert_eq!(BitWidth::w32().excess_bits(), Some(32)); assert_eq!(BitWidth::w64().excess_bits(), None); assert_eq!(BitWidth::w128().excess_bits(), None); } #[test] fn multiples_of_50() { assert_eq!(BitWidth::new(50).unwrap().excess_bits(), Some(50)); assert_eq!(BitWidth::new(100).unwrap().excess_bits(), Some(36)); assert_eq!(BitWidth::new(150).unwrap().excess_bits(), Some(22)); assert_eq!(BitWidth::new(200).unwrap().excess_bits(), Some(8)); assert_eq!(BitWidth::new(250).unwrap().excess_bits(), Some(58)); assert_eq!(BitWidth::new(300).unwrap().excess_bits(), Some(44)); } } }
h == 0 { return Err(Error::invalid_zero_bitwidth()) } Ok(BitWidth(NonZeroUsize::new(width).unwrap())) }
function_block-function_prefixed
[]
Rust
src/borrows.rs
TimonPost/legion
43acaaa1b68e177636c618f63aaf6f5cf8f10558
use std::fmt::Debug; use std::fmt::Display; use std::ops::Deref; use std::ops::DerefMut; use std::slice::Iter; use std::slice::IterMut; use std::sync::atomic::{AtomicIsize, Ordering}; pub enum Borrow<'a> { Read { state: &'a AtomicIsize }, Write { state: &'a AtomicIsize }, } impl<'a> Borrow<'a> { pub fn aquire_read(state: &'a AtomicIsize) -> Result<Borrow<'a>, &'static str> { loop { let read = state.load(Ordering::SeqCst); if read < 0 { return Err("resource already borrowed as mutable"); } if state.compare_and_swap(read, read + 1, Ordering::SeqCst) == read { break; } } Ok(Borrow::Read { state }) } pub fn aquire_write(state: &'a AtomicIsize) -> Result<Borrow<'a>, &'static str> { let borrowed = state.compare_and_swap(0, -1, Ordering::SeqCst); match borrowed { 0 => Ok(Borrow::Write { state }), x if x < 0 => Err("resource already borrowed as mutable"), _ => Err("resource already borrowed as immutable"), } } } impl<'a> Drop for Borrow<'a> { fn drop(&mut self) { match *self { Borrow::Read { state } => { state.fetch_sub(1, Ordering::SeqCst); } Borrow::Write { state } => { state.store(0, Ordering::SeqCst); } }; } } pub struct Borrowed<'a, T: 'a> { value: &'a T, #[allow(dead_code)] state: Borrow<'a>, } impl<'a, T: 'a> Borrowed<'a, T> { pub fn new(value: &'a T, borrow: Borrow<'a>) -> Borrowed<'a, T> { Borrowed { value, state: borrow, } } } impl<'a, 'b, T: 'a + Debug> Debug for Borrowed<'a, T> { fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { self.value.fmt(formatter) } } impl<'a, 'b, T: 'a + Display> Display for Borrowed<'a, T> { fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { self.value.fmt(formatter) } } impl<'a, 'b, T: 'a + PartialEq<T>> PartialEq<Borrowed<'b, T>> for Borrowed<'a, T> { fn eq(&self, other: &Borrowed<'b, T>) -> bool { self.value.eq(other.value) } } impl<'a, 'b, T: 'a + PartialEq<T>> PartialEq<T> for Borrowed<'a, T> { fn eq(&self, other: &T) -> bool { self.value.eq(other) } } impl<'a, 'b, T: 'a + Eq> Eq for Borrowed<'a, T> {} impl<'a, T: 'a> Deref for Borrowed<'a, T> { type Target = T; fn deref(&self) -> &Self::Target { self.value } } impl<'a, T: 'a> AsRef<T> for Borrowed<'a, T> { fn as_ref(&self) -> &T { self.value } } impl<'a, T: 'a> std::borrow::Borrow<T> for Borrowed<'a, T> { fn borrow(&self) -> &T { self.value } } pub struct BorrowedMut<'a, T: 'a> { value: &'a mut T, #[allow(dead_code)] state: Borrow<'a>, } impl<'a, T: 'a> BorrowedMut<'a, T> { pub fn new(value: &'a mut T, borrow: Borrow<'a>) -> BorrowedMut<'a, T> { BorrowedMut { value, state: borrow, } } } impl<'a, T: 'a> Deref for BorrowedMut<'a, T> { type Target = T; fn deref(&self) -> &Self::Target { self.value } } impl<'a, T: 'a> DerefMut for BorrowedMut<'a, T> { fn deref_mut(&mut self) -> &mut Self::Target { self.value } } impl<'a, T: 'a> AsRef<T> for BorrowedMut<'a, T> { fn as_ref(&self) -> &T { self.value } } impl<'a, T: 'a> std::borrow::Borrow<T> for BorrowedMut<'a, T> { fn borrow(&self) -> &T { self.value } } impl<'a, 'b, T: 'a + Debug> Debug for BorrowedMut<'a, T> { fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { self.value.fmt(formatter) } } impl<'a, 'b, T: 'a + Display> Display for BorrowedMut<'a, T> { fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { self.value.fmt(formatter) } } pub struct BorrowedSlice<'a, T: 'a> { slice: &'a [T], state: Borrow<'a>, } impl<'a, T: 'a> BorrowedSlice<'a, T> { pub fn new(slice: &'a [T], borrow: Borrow<'a>) -> BorrowedSlice<'a, T> { BorrowedSlice { slice, state: borrow, } } pub fn single(self, i: usize) -> Option<Borrowed<'a, T>> { let slice = self.slice; let state = self.state; slice.get(i).map(|x| Borrowed::new(x, state)) } } impl<'a, T: 'a> Deref for BorrowedSlice<'a, T> { type Target = [T]; fn deref(&self) -> &Self::Target { self.slice } } impl<'a, T: 'a> IntoIterator for BorrowedSlice<'a, T> { type Item = &'a T; type IntoIter = BorrowedIter<'a, Iter<'a, T>>; fn into_iter(self) -> Self::IntoIter { BorrowedIter { inner: self.slice.into_iter(), state: self.state, } } } pub struct BorrowedMutSlice<'a, T: 'a> { slice: &'a mut [T], state: Borrow<'a>, } impl<'a, T: 'a> BorrowedMutSlice<'a, T> { pub fn new(slice: &'a mut [T], borrow: Borrow<'a>) -> BorrowedMutSlice<'a, T> { BorrowedMutSlice { slice, state: borrow, } } pub fn single(self, i: usize) -> Option<BorrowedMut<'a, T>> { let slice = self.slice; let state = self.state; slice.get_mut(i).map(|x| BorrowedMut::new(x, state)) } } impl<'a, T: 'a> Deref for BorrowedMutSlice<'a, T> { type Target = [T]; fn deref(&self) -> &Self::Target { self.slice } } impl<'a, T: 'a> DerefMut for BorrowedMutSlice<'a, T> { fn deref_mut(&mut self) -> &mut Self::Target { self.slice } } impl<'a, T: 'a> IntoIterator for BorrowedMutSlice<'a, T> { type Item = &'a mut T; type IntoIter = BorrowedIter<'a, IterMut<'a, T>>; fn into_iter(self) -> Self::IntoIter { BorrowedIter { inner: self.slice.into_iter(), state: self.state, } } } pub struct BorrowedIter<'a, I: 'a + Iterator> { inner: I, #[allow(dead_code)] state: Borrow<'a>, } impl<'a, I: 'a + Iterator> Iterator for BorrowedIter<'a, I> { type Item = I::Item; fn next(&mut self) -> Option<Self::Item> { self.inner.next() } fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } impl<'a, I: 'a + ExactSizeIterator> ExactSizeIterator for BorrowedIter<'a, I> {} #[cfg(test)] mod tests { use super::*; use std::sync::atomic::{AtomicIsize, Ordering}; #[test] fn borrow_read() { let state = AtomicIsize::new(0); let x = 5u8; let _borrow = Borrowed::new(&x, Borrow::aquire_read(&state).unwrap()); assert_eq!(1, state.load(Ordering::SeqCst)); } #[test] fn drop_read() { let state = AtomicIsize::new(0); let x = 5u8; { let _borrow = Borrowed::new(&x, Borrow::aquire_read(&state).unwrap()); assert_eq!(1, state.load(Ordering::SeqCst)); } assert_eq!(0, state.load(Ordering::SeqCst)); } #[test] fn borrow_write() { let state = AtomicIsize::new(0); let x = 5u8; let _borrow = Borrowed::new(&x, Borrow::aquire_write(&state).unwrap()); assert_eq!(-1, state.load(Ordering::SeqCst)); } #[test] fn drop_write() { let state = AtomicIsize::new(0); let x = 5u8; { let _borrow = Borrowed::new(&x, Borrow::aquire_write(&state).unwrap()); assert_eq!(-1, state.load(Ordering::SeqCst)); } assert_eq!(0, state.load(Ordering::SeqCst)); } #[test] fn read_while_reading() { let state = AtomicIsize::new(0); let _read = Borrow::aquire_read(&state).unwrap(); let _read2 = Borrow::aquire_read(&state).unwrap(); } #[test] #[should_panic(expected = "resource already borrowed as immutable")] fn write_while_reading() { let state = AtomicIsize::new(0); let _read = Borrow::aquire_read(&state).unwrap(); let _write = Borrow::aquire_write(&state).unwrap(); } #[test] #[should_panic(expected = "resource already borrowed as mutable")] fn read_while_writing() { let state = AtomicIsize::new(0); let _write = Borrow::aquire_write(&state).unwrap(); let _read = Borrow::aquire_read(&state).unwrap(); } #[test] #[should_panic(expected = "resource already borrowed as mutable")] fn write_while_writing() { let state = AtomicIsize::new(0); let _write = Borrow::aquire_write(&state).unwrap(); let _write2 = Borrow::aquire_write(&state).unwrap(); } }
use std::fmt::Debug; use std::fmt::Display; use std::ops::Deref; use std::ops::DerefMut; use std::slice::Iter; use std::slice::IterMut; use std::sync::atomic::{AtomicIsize, Ordering}; pub enum Borrow<'a> { Read { state: &'a AtomicIsize }, Write { state: &'a AtomicIsize }, } impl<'a> Borrow<'a> { pub fn aquire_read(state: &'a AtomicIsize) -> Result<Borrow<'a>, &'static str> { loop { let read = state.load(Ordering::SeqCst); if read < 0 { return Err("resource already borrowed as mutable"); } if state.compare_and_swap(read, read + 1, Ordering::SeqCst) == read { break; } } Ok(Borrow::Read { state }) } pub fn aquire_write(state: &'a AtomicIsize) -> Result<Borrow<'a>, &'static str> { let borrowed = state.compare_and_swap(0, -1, Ordering::SeqCst); match borrowed { 0 => Ok(Borrow::Write { state }), x
i: usize) -> Option<BorrowedMut<'a, T>> { let slice = self.slice; let state = self.state; slice.get_mut(i).map(|x| BorrowedMut::new(x, state)) } } impl<'a, T: 'a> Deref for BorrowedMutSlice<'a, T> { type Target = [T]; fn deref(&self) -> &Self::Target { self.slice } } impl<'a, T: 'a> DerefMut for BorrowedMutSlice<'a, T> { fn deref_mut(&mut self) -> &mut Self::Target { self.slice } } impl<'a, T: 'a> IntoIterator for BorrowedMutSlice<'a, T> { type Item = &'a mut T; type IntoIter = BorrowedIter<'a, IterMut<'a, T>>; fn into_iter(self) -> Self::IntoIter { BorrowedIter { inner: self.slice.into_iter(), state: self.state, } } } pub struct BorrowedIter<'a, I: 'a + Iterator> { inner: I, #[allow(dead_code)] state: Borrow<'a>, } impl<'a, I: 'a + Iterator> Iterator for BorrowedIter<'a, I> { type Item = I::Item; fn next(&mut self) -> Option<Self::Item> { self.inner.next() } fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } impl<'a, I: 'a + ExactSizeIterator> ExactSizeIterator for BorrowedIter<'a, I> {} #[cfg(test)] mod tests { use super::*; use std::sync::atomic::{AtomicIsize, Ordering}; #[test] fn borrow_read() { let state = AtomicIsize::new(0); let x = 5u8; let _borrow = Borrowed::new(&x, Borrow::aquire_read(&state).unwrap()); assert_eq!(1, state.load(Ordering::SeqCst)); } #[test] fn drop_read() { let state = AtomicIsize::new(0); let x = 5u8; { let _borrow = Borrowed::new(&x, Borrow::aquire_read(&state).unwrap()); assert_eq!(1, state.load(Ordering::SeqCst)); } assert_eq!(0, state.load(Ordering::SeqCst)); } #[test] fn borrow_write() { let state = AtomicIsize::new(0); let x = 5u8; let _borrow = Borrowed::new(&x, Borrow::aquire_write(&state).unwrap()); assert_eq!(-1, state.load(Ordering::SeqCst)); } #[test] fn drop_write() { let state = AtomicIsize::new(0); let x = 5u8; { let _borrow = Borrowed::new(&x, Borrow::aquire_write(&state).unwrap()); assert_eq!(-1, state.load(Ordering::SeqCst)); } assert_eq!(0, state.load(Ordering::SeqCst)); } #[test] fn read_while_reading() { let state = AtomicIsize::new(0); let _read = Borrow::aquire_read(&state).unwrap(); let _read2 = Borrow::aquire_read(&state).unwrap(); } #[test] #[should_panic(expected = "resource already borrowed as immutable")] fn write_while_reading() { let state = AtomicIsize::new(0); let _read = Borrow::aquire_read(&state).unwrap(); let _write = Borrow::aquire_write(&state).unwrap(); } #[test] #[should_panic(expected = "resource already borrowed as mutable")] fn read_while_writing() { let state = AtomicIsize::new(0); let _write = Borrow::aquire_write(&state).unwrap(); let _read = Borrow::aquire_read(&state).unwrap(); } #[test] #[should_panic(expected = "resource already borrowed as mutable")] fn write_while_writing() { let state = AtomicIsize::new(0); let _write = Borrow::aquire_write(&state).unwrap(); let _write2 = Borrow::aquire_write(&state).unwrap(); } }
if x < 0 => Err("resource already borrowed as mutable"), _ => Err("resource already borrowed as immutable"), } } } impl<'a> Drop for Borrow<'a> { fn drop(&mut self) { match *self { Borrow::Read { state } => { state.fetch_sub(1, Ordering::SeqCst); } Borrow::Write { state } => { state.store(0, Ordering::SeqCst); } }; } } pub struct Borrowed<'a, T: 'a> { value: &'a T, #[allow(dead_code)] state: Borrow<'a>, } impl<'a, T: 'a> Borrowed<'a, T> { pub fn new(value: &'a T, borrow: Borrow<'a>) -> Borrowed<'a, T> { Borrowed { value, state: borrow, } } } impl<'a, 'b, T: 'a + Debug> Debug for Borrowed<'a, T> { fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { self.value.fmt(formatter) } } impl<'a, 'b, T: 'a + Display> Display for Borrowed<'a, T> { fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { self.value.fmt(formatter) } } impl<'a, 'b, T: 'a + PartialEq<T>> PartialEq<Borrowed<'b, T>> for Borrowed<'a, T> { fn eq(&self, other: &Borrowed<'b, T>) -> bool { self.value.eq(other.value) } } impl<'a, 'b, T: 'a + PartialEq<T>> PartialEq<T> for Borrowed<'a, T> { fn eq(&self, other: &T) -> bool { self.value.eq(other) } } impl<'a, 'b, T: 'a + Eq> Eq for Borrowed<'a, T> {} impl<'a, T: 'a> Deref for Borrowed<'a, T> { type Target = T; fn deref(&self) -> &Self::Target { self.value } } impl<'a, T: 'a> AsRef<T> for Borrowed<'a, T> { fn as_ref(&self) -> &T { self.value } } impl<'a, T: 'a> std::borrow::Borrow<T> for Borrowed<'a, T> { fn borrow(&self) -> &T { self.value } } pub struct BorrowedMut<'a, T: 'a> { value: &'a mut T, #[allow(dead_code)] state: Borrow<'a>, } impl<'a, T: 'a> BorrowedMut<'a, T> { pub fn new(value: &'a mut T, borrow: Borrow<'a>) -> BorrowedMut<'a, T> { BorrowedMut { value, state: borrow, } } } impl<'a, T: 'a> Deref for BorrowedMut<'a, T> { type Target = T; fn deref(&self) -> &Self::Target { self.value } } impl<'a, T: 'a> DerefMut for BorrowedMut<'a, T> { fn deref_mut(&mut self) -> &mut Self::Target { self.value } } impl<'a, T: 'a> AsRef<T> for BorrowedMut<'a, T> { fn as_ref(&self) -> &T { self.value } } impl<'a, T: 'a> std::borrow::Borrow<T> for BorrowedMut<'a, T> { fn borrow(&self) -> &T { self.value } } impl<'a, 'b, T: 'a + Debug> Debug for BorrowedMut<'a, T> { fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { self.value.fmt(formatter) } } impl<'a, 'b, T: 'a + Display> Display for BorrowedMut<'a, T> { fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { self.value.fmt(formatter) } } pub struct BorrowedSlice<'a, T: 'a> { slice: &'a [T], state: Borrow<'a>, } impl<'a, T: 'a> BorrowedSlice<'a, T> { pub fn new(slice: &'a [T], borrow: Borrow<'a>) -> BorrowedSlice<'a, T> { BorrowedSlice { slice, state: borrow, } } pub fn single(self, i: usize) -> Option<Borrowed<'a, T>> { let slice = self.slice; let state = self.state; slice.get(i).map(|x| Borrowed::new(x, state)) } } impl<'a, T: 'a> Deref for BorrowedSlice<'a, T> { type Target = [T]; fn deref(&self) -> &Self::Target { self.slice } } impl<'a, T: 'a> IntoIterator for BorrowedSlice<'a, T> { type Item = &'a T; type IntoIter = BorrowedIter<'a, Iter<'a, T>>; fn into_iter(self) -> Self::IntoIter { BorrowedIter { inner: self.slice.into_iter(), state: self.state, } } } pub struct BorrowedMutSlice<'a, T: 'a> { slice: &'a mut [T], state: Borrow<'a>, } impl<'a, T: 'a> BorrowedMutSlice<'a, T> { pub fn new(slice: &'a mut [T], borrow: Borrow<'a>) -> BorrowedMutSlice<'a, T> { BorrowedMutSlice { slice, state: borrow, } } pub fn single(self,
random
[ { "content": "/// A type which can fetch a strongly-typed view of the data contained\n\n/// within a `Chunk`.\n\npub trait View<'a>: Sized + Send + Sync + 'static {\n\n /// The iterator over the chunk data.\n\n type Iter: Iterator + 'a;\n\n\n\n /// Pulls data out of a chunk.\n\n fn fetch(chunk: &'a Chunk) -> Self::Iter;\n\n\n\n /// Validates that the view does not break any component borrowing rules.\n\n fn validate() -> bool;\n\n\n\n /// Determines if the view reads the specified data type.\n\n fn reads<T: Component>() -> bool;\n\n\n\n /// Determines if the view writes to the specified data type.\n\n fn writes<T: Component>() -> bool;\n\n}\n\n\n", "file_path": "src/query.rs", "rank": 0, "score": 89497.76379036267 }, { "content": "pub fn ext_type_id() -> std::any::TypeId {\n\n std::any::TypeId::of::<ExternalComponent>()\n\n}\n\n\n\nimpl crate::TagSet for &EntityData {\n\n fn is_archetype_match(&self, archetype: &Archetype) -> bool {\n\n if archetype.tags.len() != self.num_tag_types as usize {\n\n false\n\n } else {\n\n unsafe {\n\n for i in 0..self.num_tag_types {\n\n if !archetype.has_tag_type(&TagTypeId(\n\n ext_type_id(),\n\n *self.tag_types.offset(i as isize),\n\n )) {\n\n return false;\n\n }\n\n }\n\n }\n\n true\n", "file_path": "src/c_api.rs", "rank": 1, "score": 89288.93069027601 }, { "content": "/// Components that are stored once per entity.\n\npub trait Component: Send + Sync + Sized + Debug + 'static {\n\n fn type_id() -> ComponentTypeId;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 87331.05173849162 }, { "content": "#[test]\n\nfn query_partial_match() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 3, "score": 82442.63259673232 }, { "content": "#[test]\n\nfn query_write_entity_data() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 4, "score": 79862.21262211795 }, { "content": "#[test]\n\nfn query_read_shared_data() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n world.insert_from(shared.as_tags(), components.clone());\n\n\n\n let mut query = Tagged::<Static>::query();\n\n\n\n let mut count = 0;\n\n for marker in query.iter(&world) {\n\n assert_eq!(&Static, marker);\n\n count += 1;\n\n }\n\n\n\n assert_eq!(components.len(), count);\n\n}\n\n\n", "file_path": "tests/query_api.rs", "rank": 5, "score": 79815.450187612 }, { "content": "#[test]\n\nfn query_read_entity_data() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 6, "score": 79815.450187612 }, { "content": "/// Components that are shared across multiple entities.\n\npub trait Tag: Send + Sync + Sized + PartialEq + Clone + Debug + 'static {\n\n fn type_id() -> TagTypeId;\n\n}\n\n#[cfg(feature = \"blanket-impl-comp\")]\n\nimpl<T> DataTypeId for T {}\n\n\n\nimpl<T: DataTypeId + Send + Sync + Sized + Debug + 'static> Component for T {\n\n fn type_id() -> ComponentTypeId {\n\n let (a, b) = <T as DataTypeId>::type_id();\n\n ComponentTypeId(a, b)\n\n }\n\n}\n\n\n\nimpl<T: DataTypeId + Send + Sync + Sized + PartialEq + Clone + Debug + 'static> Tag for T {\n\n fn type_id() -> TagTypeId {\n\n let (a, b) = <T as DataTypeId>::type_id();\n\n TagTypeId(a, b)\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 77718.43001168506 }, { "content": "#[test]\n\nfn query_write_entity_data_tuple() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 8, "score": 77519.65740038447 }, { "content": "#[test]\n\nfn query_read_entity_data_par() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 9, "score": 77474.93961441796 }, { "content": "#[test]\n\nfn query_read_entity_data_tuple() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 10, "score": 77474.93961441796 }, { "content": "#[test]\n\nfn query_cached_read_entity_data() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 11, "score": 77474.93961441796 }, { "content": "#[test]\n\nfn query_read_entity_data_par_foreach() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 12, "score": 75330.52850984083 }, { "content": "fn bench_ordered(c: &mut Criterion) {\n\n c.bench(\n\n \"concurrent queries\",\n\n ParameterizedBenchmark::new(\n\n \"sequential ideal\",\n\n |b, n| {\n\n let data = data(*n);\n\n let (mut ab, mut ac) = setup_ideal(&data);\n\n b.iter(|| ideal(&mut ab, &mut ac));\n\n },\n\n (1..11).map(|i| i * 1000),\n\n )\n\n .with_function(\"sequential\", |b, n| {\n\n let data = data(*n);\n\n let world = setup(&data);\n\n b.iter(|| sequential(&world));\n\n })\n\n .with_function(\"parallel\", |b, n| {\n\n let data = data(*n);\n\n let world = setup(&data);\n", "file_path": "benches/parallel_query.rs", "rank": 13, "score": 74041.16823266666 }, { "content": "fn build_dispatch_state<'a, T: JobDescriptor>(graph: &'a JobGraph<'a, T>) -> DispatchState<'a, T> {\n\n use petgraph::visit::EdgeRef;\n\n // topologically sort graph to optimize iteration for unscheduled jobs\n\n let mut sorted_nodes =\n\n petgraph::algo::toposort(&graph, None).expect(\"failed to sort job graph\");\n\n sorted_nodes.reverse();\n\n // extract a bitset for each node that defines their dependencies in terms of indices into sorted_nodes\n\n let job_deps = sorted_nodes\n\n .iter()\n\n .map(|n| {\n\n let dep_indices = graph\n\n .edges_directed(*n, petgraph::Direction::Outgoing)\n\n .filter_map(|e| sorted_nodes.iter().position(|n| *n == e.target()));\n\n let mut bitset = BitSet::new();\n\n for idx in dep_indices {\n\n bitset.add(idx as u32);\n\n }\n\n bitset\n\n })\n\n .collect();\n\n let sorted_jobs: Vec<_> = sorted_nodes.into_iter().map(|n| &graph[n]).collect();\n\n DispatchState {\n\n jobs_completed: BitSet::with_capacity(sorted_jobs.len() as u32),\n\n jobs_scheduled: BitSet::with_capacity(sorted_jobs.len() as u32),\n\n sorted_jobs,\n\n job_deps,\n\n }\n\n}\n\n\n", "file_path": "src/schedule.rs", "rank": 14, "score": 62052.055680921294 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum Variants {\n\n AB(A, B),\n\n AC(A, C),\n\n}\n\n\n", "file_path": "benches/parallel_query.rs", "rank": 15, "score": 61471.55376845368 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]\n\nstruct Static;\n\n\n", "file_path": "tests/world_api.rs", "rank": 16, "score": 61340.75180272893 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]\n\nstruct Static;\n\n\n\n#[cfg(not(feature = \"blanket-impl-comp\"))]\n\nmod component_impl {\n\n use super::*;\n\n impl DefaultComponentImpl for Static {}\n\n impl DefaultComponentImpl for Model {}\n\n impl DefaultComponentImpl for Accel {}\n\n impl DefaultComponentImpl for Vel {}\n\n impl DefaultComponentImpl for Scale {}\n\n impl DefaultComponentImpl for Pos {}\n\n impl DefaultComponentImpl for Rot {}\n\n}\n\n// fn create_test_world() -> (\n\n// World,\n\n// HashMap<\n\n// Entity,\n\n// (\n\n// Option<Pos>,\n\n// Option<Rot>,\n", "file_path": "tests/query_api.rs", "rank": 17, "score": 61340.75180272893 }, { "content": "fn process(\n\n position: &Vector3<f32>,\n\n orientation: &Quaternion<f32>,\n\n scale: &Vector3<f32>,\n\n) -> Matrix4<f32> {\n\n let rot: Matrix4<f32> = (*orientation).into();\n\n Matrix4::from_nonuniform_scale(scale.x, scale.y, scale.z)\n\n * rot\n\n * Matrix4::from_translation(*position)\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 18, "score": 58644.270055605695 }, { "content": "/// Queries for entities within a `World`.\n\n///\n\n/// # Examples\n\n///\n\n/// Queries can be constructed from any `View` type, including tuples of `View`s.\n\n///\n\n/// ```rust\n\n/// # use legion::prelude::*;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Position;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Velocity;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Model;\n\n/// // A query which matches any entity with a `Position` component\n\n/// let mut query = Read::<Position>::query();\n\n///\n\n/// // A query which matches any entity with both a `Position` and a `Velocity` component\n\n/// let mut query = <(Read<Position>, Read<Velocity>)>::query();\n\n/// ```\n\n///\n\n/// The view determines what data is accessed, and whether it is accessed mutably or not.\n\n///\n\n/// ```rust\n\n/// # use legion::prelude::*;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Position;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Velocity;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Model;\n\n/// // A query which writes `Position`, reads `Velocity` and reads `Model`\n\n/// // Tags are read-only, and is distinguished from entity data reads with `Tagged<T>`.\n\n/// let mut query = <(Write<Position>, Read<Velocity>, Tagged<Model>)>::query();\n\n/// ```\n\n///\n\n/// By default, a query will filter its results to include only entities with the data\n\n/// types accessed by the view. However, additional filters can be specified if needed:\n\n///\n\n/// ```rust\n\n/// # use legion::prelude::*;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Position;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Velocity;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Model;\n\n/// #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// struct Static;\n\n///\n\n/// // A query which also requires that entities have the `Static` tag\n\n/// let mut query = <(Read<Position>, Tagged<Model>)>::query().filter(tag::<Static>());\n\n/// ```\n\n///\n\n/// Filters can be combined with bitwise operators:\n\n///\n\n/// ```rust\n\n/// # use legion::prelude::*;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Position;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Velocity;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Model;\n\n/// #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// struct Static;\n\n///\n\n/// // This query matches entities with positions and a model\n\n/// // But it also requires that the entity is not static, or has moved (even if static)\n\n/// let mut query = <(Read<Position>, Tagged<Model>)>::query()\n\n/// .filter(!tag::<Static>() | changed::<Position>());\n\n/// ```\n\n///\n\n/// Filters can be iterated through to pull data out of a `World`:\n\n///\n\n/// ```rust\n\n/// # use legion::prelude::*;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Position;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Velocity;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Model;\n\n/// # let universe = Universe::new(None);\n\n/// # let world = universe.create_world();\n\n/// // A query which writes `Position`, reads `Velocity` and reads `Model`\n\n/// // Tags are read-only, and is distinguished from entity data reads with `Tagged<T>`.\n\n/// let mut query = <(Write<Position>, Read<Velocity>, Tagged<Model>)>::query();\n\n///\n\n/// for (pos, vel, model) in query.iter(&world) {\n\n/// // `.iter` yields tuples of references to a single entity's data:\n\n/// // pos: &mut Position\n\n/// // vel: &Velocity\n\n/// // model: &Model\n\n/// }\n\n/// ```\n\n///\n\n/// The lower level `iter_chunks` function allows access to each underlying chunk of entity data.\n\n/// This allows you to run code for each tag value, or to retrieve a contiguous data slice.\n\n///\n\n/// ```rust\n\n/// # use legion::prelude::*;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Position;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Velocity;\n\n/// # #[derive(Copy, Clone, Debug, PartialEq)]\n\n/// # struct Model;\n\n/// # let universe = Universe::new(None);\n\n/// # let world = universe.create_world();\n\n/// let mut query = <(Write<Position>, Read<Velocity>, Tagged<Model>)>::query();\n\n///\n\n/// for chunk in query.iter_chunks(&world) {\n\n/// let model = chunk.tag::<Model>();\n\n/// let positions = chunk.components_mut::<Position>();\n\n/// let velocities = chunk.components::<Velocity>();\n\n/// }\n\n/// ```\n\n///\n\n/// The `ChunkView` yielded from `iter_chunks` allows access to all shared data in the chunk (queried for or not),\n\n/// but entity data slices can only be accessed if they were requested in the query's view. Attempting to access\n\n/// other data types, or attempting to write to components that were only requested via a `Read` will panic.\n\npub trait Query {\n\n /// The chunk filter used to determine which chunks to include in the output.\n\n type Filter: Filter;\n\n\n\n /// The view used to determine which components are accessed.\n\n type View: for<'data> View<'data>;\n\n\n\n /// Adds an additional filter to the query.\n\n fn filter<T: Filter>(self, filter: T) -> QueryDef<Self::View, And<(Self::Filter, T)>>;\n\n\n\n /// Gets an iterator which iterates through all chunks that match the query.\n\n fn iter_chunks<'a, 'data>(\n\n &'a mut self,\n\n world: &'data World,\n\n ) -> ChunkViewIter<'data, 'a, Self::View, Self::Filter>;\n\n\n\n /// Gets an iterator which iterates through all entity data that matches the query.\n\n fn iter<'a, 'data>(\n\n &'a mut self,\n\n world: &'data World,\n", "file_path": "src/query.rs", "rank": 19, "score": 58132.08618078244 }, { "content": "#[test]\n\nfn delete() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut entities: Vec<Entity> = Vec::new();\n\n for e in world.insert_from(shared.as_tags(), components.clone()) {\n\n entities.push(*e);\n\n }\n\n\n\n for e in entities.iter() {\n\n assert_eq!(true, world.is_alive(e));\n\n }\n\n\n\n for e in entities.iter() {\n\n world.delete(*e);\n\n assert_eq!(false, world.is_alive(e));\n\n }\n\n}\n\n\n", "file_path": "tests/world_api.rs", "rank": 20, "score": 56929.553499201866 }, { "content": "#[test]\n\nfn insert() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (1usize, 2f32, 3u16);\n\n let components = vec![(4f32, 5u64, 6u16), (4f32, 5u64, 6u16)];\n\n let entities = world.insert_from(shared.as_tags(), components);\n\n\n\n assert_eq!(2, entities.len());\n\n}\n\n\n", "file_path": "tests/world_api.rs", "rank": 21, "score": 56929.553499201866 }, { "content": "fn create_entities(\n\n world: &mut World,\n\n variants: &mut [Box<FnMut(&mut DynamicTagSet, &mut DynamicSingleEntitySource)>],\n\n num_components: usize,\n\n count: usize,\n\n) {\n\n let len_variants = variants.len();\n\n let components = (0..)\n\n .flat_map(|step| (0..len_variants).map(move |i| (i + i * step) % len_variants))\n\n .chunks(num_components);\n\n\n\n for initializers in (&components).into_iter().take(count) {\n\n let entity = world.insert_from((), Some((A(0.0),)))[0];\n\n world.mutate_entity(entity, |e| {\n\n for i in initializers {\n\n let init = variants.get_mut(i).unwrap();\n\n let (tags, components) = e.deconstruct();\n\n init(tags, components);\n\n }\n\n });\n\n }\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 22, "score": 56929.553499201866 }, { "content": "#[test]\n\nfn merge() {\n\n let universe = Universe::new(None);\n\n let mut world_1 = universe.create_world();\n\n let mut world_2 = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut world_1_entities: Vec<Entity> = Vec::new();\n\n for e in world_1.insert_from(shared.as_tags(), components.clone()) {\n\n world_1_entities.push(*e);\n\n }\n\n\n\n let mut world_2_entities: Vec<Entity> = Vec::new();\n\n for e in world_2.insert_from(shared.as_tags(), components.clone()) {\n\n world_2_entities.push(*e);\n\n }\n", "file_path": "tests/world_api.rs", "rank": 23, "score": 56929.553499201866 }, { "content": "fn main() {\n\n // create world\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n // create entities\n\n world.insert_from(\n\n (),\n\n vec![\n\n (Pos(1., 2., 3.), Vel(1., 2., 3.)),\n\n (Pos(1., 2., 3.), Vel(1., 2., 3.)),\n\n (Pos(1., 2., 3.), Vel(1., 2., 3.)),\n\n (Pos(1., 2., 3.), Vel(1., 2., 3.)),\n\n ],\n\n );\n\n\n\n // update positions\n\n let mut query = <(Write<Pos>, Read<Vel>)>::query();\n\n for (pos, vel) in query.iter(&world) {\n\n pos.0 += vel.0;\n", "file_path": "examples/hello_world.rs", "rank": 24, "score": 56929.553499201866 }, { "content": "/// A type which can construct a default entity filter.\n\npub trait DefaultFilter {\n\n /// The type of entity filter constructed.\n\n type Filter: Filter;\n\n\n\n /// constructs an entity filter.\n\n fn filter() -> Self::Filter;\n\n}\n\n\n", "file_path": "src/query.rs", "rank": 25, "score": 56544.689364652964 }, { "content": "/// Inserts tags into a `Chunk` in a `World`.\n\npub trait TagSet {\n\n /// Determines if the given archetype is compatible with the data\n\n /// contained in the data set.\n\n fn is_archetype_match(&self, archetype: &Archetype) -> bool;\n\n\n\n /// Determines if the given chunk is compatible with the data\n\n /// contained in the data set.\n\n fn is_chunk_match(&self, chunk: &Chunk) -> bool;\n\n\n\n /// Configures a new chunk to include the tag types in this data set.\n\n fn configure_chunk(&self, chunk: &mut ChunkBuilder);\n\n\n\n /// Gets the type of tags contained in this data set.\n\n fn types(&self) -> FnvHashSet<TagTypeId>;\n\n\n\n /// Writes the tags into the given `Chunk`, consuming the data in `self`.\n\n fn write<'a>(&mut self, chunk: &'a mut Chunk);\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 26, "score": 56544.689364652964 }, { "content": "#[doc(hidden)]\n\npub trait ViewElement {\n\n type Component: DataTypeId;\n\n}\n\n\n", "file_path": "src/query.rs", "rank": 27, "score": 56544.689364652964 }, { "content": "/// Inserts entity data into a `Chunk` in a `World`.\n\npub trait EntitySource {\n\n /// Determines if the given archetype is compatible with the data\n\n /// contained in the source.\n\n fn is_archetype_match(&self, archetype: &Archetype) -> bool;\n\n\n\n /// Configures a new chunk to support the data contained within this source.\n\n fn configure_chunk(&self, chunk: &mut ChunkBuilder);\n\n\n\n /// Gets the entity data component types contained within this source.\n\n fn types(&self) -> FnvHashSet<ComponentTypeId>;\n\n\n\n /// Determines if the source is empty.\n\n fn is_empty(&mut self) -> bool;\n\n\n\n /// Writes as many entities into the given `Chunk` as possible, consuming the\n\n /// data in `self`.\n\n ///\n\n /// The provided `EntityAllocator` can be used to allocate new `Entity` IDs.\n\n ///\n\n /// Returns the number of entities written.\n", "file_path": "src/lib.rs", "rank": 28, "score": 56544.689364652964 }, { "content": "#[test]\n\nfn delete_first() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut entities: Vec<Entity> = Vec::new();\n\n for e in world.insert_from(shared.as_tags(), components.clone()) {\n\n entities.push(*e);\n\n }\n\n\n\n let first = *entities.first().unwrap();\n\n\n\n world.delete(first);\n\n assert_eq!(false, world.is_alive(&first));\n\n\n", "file_path": "tests/world_api.rs", "rank": 29, "score": 55387.424838560255 }, { "content": "#[test]\n\nfn delete_last() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut entities: Vec<Entity> = Vec::new();\n\n for e in world.insert_from(shared.as_tags(), components.clone()) {\n\n entities.push(*e);\n\n }\n\n\n\n let last = *entities.last().unwrap();\n\n world.delete(last);\n\n assert_eq!(false, world.is_alive(&last));\n\n\n\n for (i, e) in entities.iter().take(entities.len() - 1).enumerate() {\n", "file_path": "tests/world_api.rs", "rank": 30, "score": 55387.424838560255 }, { "content": "#[test]\n\nfn get_shared() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut entities: Vec<Entity> = Vec::new();\n\n for e in world.insert_from(shared.as_tags(), components.clone()) {\n\n entities.push(*e);\n\n }\n\n\n\n for e in entities.iter() {\n\n assert_eq!(Some(&Static), world.tag(*e));\n\n assert_eq!(Some(&Model(5)), world.tag(*e));\n\n }\n\n}\n\n\n", "file_path": "tests/world_api.rs", "rank": 31, "score": 55387.424838560255 }, { "content": "#[test]\n\nfn get_component() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut entities: Vec<Entity> = Vec::new();\n\n for e in world.insert_from(shared.as_tags(), components.clone()) {\n\n entities.push(*e);\n\n }\n\n\n\n for (i, e) in entities.iter().enumerate() {\n\n match world.component(*e) {\n\n Some(x) => assert_eq!(components.get(i).map(|(x, _)| x), Some(&x as &Pos)),\n\n None => assert_eq!(components.get(i).map(|(x, _)| x), None),\n\n }\n\n match world.component(*e) {\n\n Some(x) => assert_eq!(components.get(i).map(|(_, x)| x), Some(&x as &Rot)),\n\n None => assert_eq!(components.get(i).map(|(_, x)| x), None),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/world_api.rs", "rank": 32, "score": 55387.424838560255 }, { "content": "pub trait DataTypeId {\n\n /// Returns a type tuple. By default, it is (TypeId::of::<Self>(), 0).\n\n /// This function allows you to override it for interop between Rust and FFI\n\n ///\n\n /// # Safety\n\n ///\n\n /// When overriding, it is important that you ensure the type ID is unique!\n\n fn type_id() -> (TypeId, u32)\n\n where\n\n Self: 'static,\n\n {\n\n (TypeId::of::<Self>(), 0)\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 33, "score": 55121.591798566755 }, { "content": "#[test]\n\nfn mutate_remove_tag() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Model(5), Static).as_tags();\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let entities = world.insert_from(shared, components).to_vec();\n\n\n\n let mut query_without_static = <(Read<Pos>, Read<Rot>)>::query().filter(!tag::<Static>());\n\n let mut query_with_static = <(Read<Pos>, Read<Rot>, Tagged<Static>)>::query();\n\n\n\n assert_eq!(0, query_without_static.iter(&world).count());\n\n assert_eq!(3, query_with_static.iter(&world).count());\n\n\n\n world.mutate_entity(*entities.get(1).unwrap(), |e| {\n\n e.remove_tag::<Static>();\n\n });\n\n\n\n assert_eq!(1, query_without_static.iter(&world).count());\n\n assert_eq!(2, query_with_static.iter(&world).count());\n\n}\n\n\n", "file_path": "tests/world_api.rs", "rank": 34, "score": 53993.07584933174 }, { "content": "#[test]\n\nfn mutate_remove_component() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5)).as_tags();\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let entities = world.insert_from(shared, components).to_vec();\n\n\n\n let mut query_without_rot = Read::<Pos>::query().filter(!component::<Rot>());\n\n let mut query_with_rot = <(Read<Pos>, Read<Rot>)>::query();\n\n\n\n assert_eq!(0, query_without_rot.iter(&world).count());\n\n assert_eq!(3, query_with_rot.iter(&world).count());\n\n\n\n world.mutate_entity(*entities.get(1).unwrap(), |e| {\n\n e.remove_component::<Rot>();\n\n });\n\n\n\n assert_eq!(1, query_without_rot.iter(&world).count());\n\n assert_eq!(2, query_with_rot.iter(&world).count());\n\n}\n\n\n", "file_path": "tests/world_api.rs", "rank": 35, "score": 53993.07584933174 }, { "content": "#[test]\n\nfn mutate_change_tag() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Model(5),).as_tags();\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let entities = world.insert_from(shared, components).to_vec();\n\n\n\n let mut query_model_3 = <(Read<Pos>, Read<Rot>)>::query().filter(tag_value(&Model(3)));\n\n let mut query_model_5 = <(Read<Pos>, Read<Rot>)>::query().filter(tag_value(&Model(5)));\n\n\n\n assert_eq!(3, query_model_5.iter(&world).count());\n\n assert_eq!(0, query_model_3.iter(&world).count());\n\n\n\n world.mutate_entity(*entities.get(1).unwrap(), |tags| {\n", "file_path": "tests/world_api.rs", "rank": 36, "score": 53993.07584933174 }, { "content": "#[test]\n\nfn query_on_changed_first() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 37, "score": 53993.07584933174 }, { "content": "#[test]\n\nfn mutate_add_tag() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Model(5),).as_tags();\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let entities = world.insert_from(shared, components).to_vec();\n\n\n\n let mut query_without_static = <(Read<Pos>, Read<Rot>)>::query();\n\n let mut query_with_static = <(Read<Pos>, Read<Rot>, Tagged<Static>)>::query();\n\n\n\n assert_eq!(3, query_without_static.iter(&world).count());\n\n assert_eq!(0, query_with_static.iter(&world).count());\n\n\n\n world.mutate_entity(*entities.get(1).unwrap(), |tags| {\n\n tags.set_tag(Static);\n\n });\n\n\n\n assert_eq!(3, query_without_static.iter(&world).count());\n\n assert_eq!(1, query_with_static.iter(&world).count());\n\n}\n\n\n", "file_path": "tests/world_api.rs", "rank": 38, "score": 53993.07584933174 }, { "content": "#[test]\n\nfn query_on_changed_no_changes() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 39, "score": 53993.07584933174 }, { "content": "#[test]\n\nfn mutate_add_component() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5)).as_tags();\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let entities = world.insert_from(shared, components).to_vec();\n\n\n\n let mut query_without_scale = <(Read<Pos>, Read<Rot>)>::query();\n\n let mut query_with_scale = <(Read<Pos>, Read<Rot>, Read<Scale>)>::query();\n\n\n\n assert_eq!(3, query_without_scale.iter(&world).count());\n\n assert_eq!(0, query_with_scale.iter(&world).count());\n\n\n\n world.mutate_entity(*entities.get(1).unwrap(), |e| {\n\n e.add_component(Scale(0.5, 0.5, 0.5))\n\n });\n\n\n\n assert_eq!(3, query_without_scale.iter(&world).count());\n\n assert_eq!(1, query_with_scale.iter(&world).count());\n\n}\n\n\n", "file_path": "tests/world_api.rs", "rank": 40, "score": 53993.07584933174 }, { "content": "/// A set of entity data components.\n\npub trait ComponentSet: Sized {\n\n /// Converts an iterator of `Self` into an `EntitySource`.\n\n fn component_source<T>(source: T) -> IterEntitySource<T, Self>\n\n where\n\n T: Iterator<Item = Self>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 41, "score": 52977.11968884866 }, { "content": "#[test]\n\nfn get_component_wrong_type() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let entity = *world.insert_from((), vec![(0f64,)]).get(0).unwrap();\n\n\n\n assert_eq!(None, world.component::<i32>(entity));\n\n}\n\n\n", "file_path": "tests/world_api.rs", "rank": 42, "score": 52726.23557112914 }, { "content": "#[test]\n\nfn get_shared_wrong_type() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let entity = *world\n\n .insert_from((Static,).as_tags(), vec![(0f64,)])\n\n .get(0)\n\n .unwrap();\n\n\n\n assert_eq!(None, world.tag::<Model>(entity));\n\n}\n\n\n", "file_path": "tests/world_api.rs", "rank": 43, "score": 52726.23557112914 }, { "content": "#[test]\n\nfn query_on_changed_self_changes() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 44, "score": 52726.23557112914 }, { "content": "#[derive(Debug)]\n\nenum Node<'a, J: JobDescriptor> {\n\n Job(&'a J),\n\n Barrier(J::Barrier),\n\n Root,\n\n}\n\n\n", "file_path": "src/schedule.rs", "rank": 45, "score": 51793.25083652332 }, { "content": "#[test]\n\nfn query_mixed_entity_data_tuple() {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n let shared = (Static, Model(5));\n\n let components = vec![\n\n (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)),\n\n (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)),\n\n ];\n\n\n\n let mut expected = HashMap::<Entity, (Pos, Rot)>::new();\n\n\n\n for (i, e) in world\n\n .insert_from(shared.as_tags(), components.clone())\n\n .iter()\n\n .enumerate()\n\n {\n\n if let Some((pos, rot)) = components.get(i) {\n\n expected.insert(*e, (*pos, *rot));\n\n }\n", "file_path": "tests/query_api.rs", "rank": 46, "score": 51570.178367069966 }, { "content": "fn par_for_each(world: &World) {\n\n <(\n\n Read<Position>,\n\n Read<Orientation>,\n\n Read<Scale>,\n\n Write<Transform>,\n\n )>::query()\n\n .par_for_each(&world, |(pos, orient, scale, trans)| {\n\n trans.0 = process(&pos.0, &orient.0, &scale.0);\n\n });\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 47, "score": 51187.5170958201 }, { "content": "fn sequential(world: &World) {\n\n for (pos, orient, scale, trans) in <(\n\n Read<Position>,\n\n Read<Orientation>,\n\n Read<Scale>,\n\n Write<Transform>,\n\n )>::query()\n\n .iter(&world)\n\n {\n\n trans.0 = process(&pos.0, &orient.0, &scale.0);\n\n }\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 48, "score": 51187.5170958201 }, { "content": "enum ScheduleResult<'a, J: JobDescriptor> {\n\n Schedule(&'a Node<'a, J>, usize),\n\n WaitingForJob,\n\n Done,\n\n}\n\nimpl<'a, J: JobDescriptor> DispatchState<'a, J> {\n\n pub fn next_job(&mut self) -> ScheduleResult<'a, J> {\n\n println!(\"scheduling with completed {:#?}\", self.jobs_completed);\n\n let mut waiting = false;\n\n for i in 0..self.sorted_jobs.len() {\n\n if self.jobs_scheduled.contains(i as u32) == false {\n\n waiting = true;\n\n let deps = &self.job_deps[i];\n\n // first AND between deps and jobs_completed to retain only the relevant bits,\n\n // then XOR between deps and the result to check if there's a difference\n\n\n\n if (deps ^ (deps & &self.jobs_completed))\n\n .iter()\n\n .next()\n\n .is_none()\n", "file_path": "src/schedule.rs", "rank": 49, "score": 50493.624926596734 }, { "content": "fn par_for_each(world: &World) {\n\n join(\n\n || {\n\n <(Write<B>, Read<A>)>::query().par_for_each(&world, |(b, a)| {\n\n b.0 = a.0;\n\n });\n\n },\n\n || {\n\n <(Write<C>, Read<A>)>::query().par_for_each(&world, |(c, a)| {\n\n c.0 = a.0;\n\n });\n\n },\n\n );\n\n}\n\n\n", "file_path": "benches/parallel_query.rs", "rank": 50, "score": 49793.168106591576 }, { "content": "fn parallel(world: &World) {\n\n join(\n\n || {\n\n for (b, a) in <(Write<B>, Read<A>)>::query().iter(&world) {\n\n b.0 = a.0;\n\n }\n\n },\n\n || {\n\n for (c, a) in <(Write<C>, Read<A>)>::query().iter(&world) {\n\n c.0 = a.0;\n\n }\n\n },\n\n );\n\n}\n\n\n", "file_path": "benches/parallel_query.rs", "rank": 51, "score": 49793.168106591576 }, { "content": "fn sequential(world: &World) {\n\n for (b, a) in <(Write<B>, Read<A>)>::query().iter(&world) {\n\n b.0 = a.0;\n\n }\n\n\n\n for (c, a) in <(Write<C>, Read<A>)>::query().iter(&world) {\n\n c.0 = a.0;\n\n }\n\n}\n\n\n", "file_path": "benches/parallel_query.rs", "rank": 52, "score": 49793.168106591576 }, { "content": "fn setup(n: usize) -> World {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n world.insert_from((), (0..n).map(|_| (Position(0.), Rotation(0.))));\n\n\n\n world\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 53, "score": 49553.61078596095 }, { "content": "pub trait DefaultComponentImpl: DataTypeId {}\n\n#[cfg(not(feature = \"blanket-impl-comp\"))]\n\nimpl<T: DefaultComponentImpl> DataTypeId for T {}\n\n\n\nimpl DefaultComponentImpl for usize {}\n\nimpl DefaultComponentImpl for i8 {}\n\nimpl DefaultComponentImpl for u8 {}\n\nimpl DefaultComponentImpl for u16 {}\n\nimpl DefaultComponentImpl for i16 {}\n\nimpl DefaultComponentImpl for i32 {}\n\nimpl DefaultComponentImpl for u32 {}\n\nimpl DefaultComponentImpl for u64 {}\n\nimpl DefaultComponentImpl for f64 {}\n\nimpl DefaultComponentImpl for f32 {}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::*;\n\n\n\n #[test]\n", "file_path": "src/lib.rs", "rank": 54, "score": 49423.23160280344 }, { "content": "pub trait IntoTagSet<T: TagSet> {\n\n fn as_tags(self) -> T;\n\n}\n\n\n\nmacro_rules! impl_shared_data_set {\n\n ( $arity: expr; $( $ty: ident ),* ) => {\n\n impl<$( $ty ),*> IntoTagSet<($( $ty, )*)> for ($( $ty, )*)\n\n where $( $ty: Tag ),*\n\n {\n\n fn as_tags(self) -> ($( $ty, )*) {\n\n self\n\n }\n\n }\n\n\n\n impl<$( $ty ),*> TagSet for ($( $ty, )*)\n\n where $( $ty: Tag ),*\n\n {\n\n fn is_archetype_match(&self, archetype: &Archetype) -> bool {\n\n archetype.tags.len() == $arity &&\n\n $( archetype.tags.contains(&$ty::type_id()) )&&*\n", "file_path": "src/lib.rs", "rank": 55, "score": 48677.08005268873 }, { "content": "fn index(v: Variants) -> u8 {\n\n match v {\n\n Variants::AB(_, _) => 0,\n\n Variants::AC(_, _) => 1,\n\n }\n\n}\n\n\n", "file_path": "benches/parallel_query.rs", "rank": 56, "score": 48159.261796732426 }, { "content": "fn generate(i: u8) -> Variants {\n\n match i {\n\n 0 => Variants::AB(A(0.0), B(0.0)),\n\n _ => Variants::AC(A(0.0), C(0.0)),\n\n }\n\n}\n\n\n", "file_path": "benches/parallel_query.rs", "rank": 57, "score": 48159.261796732426 }, { "content": "fn bench_transform(c: &mut Criterion) {\n\n c.bench(\n\n \"update transform\",\n\n ParameterizedBenchmark::new(\n\n \"sequential ideal\",\n\n |b, n| {\n\n let mut data = data(*n);\n\n b.iter(|| ideal(&mut data));\n\n },\n\n (1..11).map(|i| i * 1000),\n\n )\n\n .with_function(\"sequential\", |b, n| {\n\n let data = data(*n);\n\n let world = setup(data);\n\n b.iter(|| sequential(&world));\n\n })\n\n .with_function(\"par_for_each\", |b, n| {\n\n let data = data(*n);\n\n let world = setup(data);\n\n join(|| {}, || b.iter(|| par_for_each(&world)));\n\n }),\n\n );\n\n}\n\n\n\ncriterion_group!(iterate, bench_transform);\n\ncriterion_main!(iterate);\n", "file_path": "benches/transform.rs", "rank": 58, "score": 48159.261796732426 }, { "content": "fn bench_iter_simple(c: &mut Criterion) {\n\n c.bench_function(\"iter-simple\", |b| {\n\n let mut world = setup(2000);\n\n add_background_entities(&mut world, 10000);\n\n\n\n let mut query = <(Read<Position>, Write<Rotation>)>::query();\n\n\n\n b.iter(|| {\n\n for (pos, rot) in query.iter(&world) {\n\n rot.0 = pos.0;\n\n }\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 59, "score": 46892.42151852982 }, { "content": "fn bench_create_delete(c: &mut Criterion) {\n\n c.bench_function_over_inputs(\n\n \"create-delete\",\n\n |b, count| {\n\n let mut world = setup(0);\n\n b.iter(|| {\n\n let entities = world\n\n .insert_from((), (0..*count).map(|_| (Position(0.),)))\n\n .to_vec();\n\n\n\n for e in entities {\n\n world.delete(e);\n\n }\n\n })\n\n },\n\n (0..10).map(|i| i * 100),\n\n );\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 60, "score": 46892.42151852982 }, { "content": "fn bench_iter_complex(c: &mut Criterion) {\n\n c.bench_function(\"iter-complex\", |b| {\n\n let mut world = setup(0);\n\n add_background_entities(&mut world, 10000);\n\n\n\n for i in 0..200 {\n\n world.insert_from(\n\n (Tag(i as f32),).as_tags(),\n\n (0..2000).map(|_| (Position(0.), Rotation(0.))),\n\n );\n\n }\n\n\n\n let mut query = <(Read<Position>, Write<Rotation>)>::query()\n\n .filter(!component::<A>() & tag_value(&Tag(2.0)));\n\n\n\n b.iter(|| {\n\n for (pos, rot) in query.iter(&world) {\n\n rot.0 = pos.0;\n\n }\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 61, "score": 46892.42151852982 }, { "content": "fn bench_iter_chunks_simple(c: &mut Criterion) {\n\n c.bench_function(\"iter-chunks-simple\", |b| {\n\n let mut world = setup(10000);\n\n add_background_entities(&mut world, 10000);\n\n\n\n let mut query = <(Read<Position>, Write<Rotation>)>::query();\n\n\n\n b.iter(|| {\n\n for c in query.iter_chunks(&world) {\n\n unsafe {\n\n c.components_mut::<Position>()\n\n .unwrap()\n\n .get_unchecked_mut(0)\n\n .0 = 0.0\n\n };\n\n }\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 62, "score": 45736.36431447066 }, { "content": "fn bench_iter_chunks_complex(c: &mut Criterion) {\n\n c.bench_function(\"iter-chunks-complex\", |b| {\n\n let mut world = setup(0);\n\n add_background_entities(&mut world, 10000);\n\n\n\n for i in 0..200 {\n\n world.insert_from(\n\n (Tag(i as f32),).as_tags(),\n\n (0..10000).map(|_| (Position(0.), Rotation(0.))),\n\n );\n\n }\n\n\n\n let mut query = <(Read<Position>, Write<Rotation>)>::query()\n\n .filter(!component::<A>() & tag_value(&Tag(2.0)));\n\n\n\n b.iter(|| {\n\n for c in query.iter_chunks(&world) {\n\n unsafe {\n\n c.components_mut::<Position>()\n\n .unwrap()\n", "file_path": "benches/benchmarks.rs", "rank": 63, "score": 45736.36431447066 }, { "content": "/// Filters chunks to determine which are to be included in a `Query`.\n\npub trait Filter: Send + Sync + Sized + Debug {\n\n /// Determines if an archetype matches the filter's conditions.\n\n fn filter_archetype(&self, _: &Archetype) -> Option<bool> {\n\n None\n\n }\n\n\n\n /// Determines if a chunk matches immutable elements of the filter's conditions.\n\n /// This must always return the same result when given the same chunk.\n\n fn filter_chunk_immutable(&self, chunk: &Chunk) -> Option<bool>;\n\n\n\n /// Determines if a chunk matches variable elements of the filter's conditions.\n\n /// This may return different results when called repeatedly with the same chunk.\n\n fn filter_chunk_variable(&mut self, chunk: &Chunk) -> Option<bool>;\n\n\n\n /// Determines if a chunk matches the filter's conditions.\n\n fn filter_chunk(&mut self, chunk: &Chunk) -> Option<bool> {\n\n self.filter_chunk_immutable(chunk)\n\n .coalesce_or(self.filter_chunk_variable(chunk))\n\n }\n\n}\n", "file_path": "src/query.rs", "rank": 64, "score": 45729.200287377156 }, { "content": "/// Converts a `View` into a `Query`.\n\npub trait IntoQuery: DefaultFilter + for<'a> View<'a> {\n\n /// Converts the `View` type into a `Query`.\n\n fn query() -> QueryDef<Self, <Self as DefaultFilter>::Filter>;\n\n}\n\n\n\nimpl<T: DefaultFilter + for<'a> View<'a>> IntoQuery for T {\n\n fn query() -> QueryDef<Self, <Self as DefaultFilter>::Filter> {\n\n if !Self::validate() {\n\n panic!(\"invalid view, please ensure the view contains no duplicate component types\");\n\n }\n\n\n\n QueryDef {\n\n view: PhantomData,\n\n filter: Self::filter(),\n\n }\n\n }\n\n}\n\n\n\n/// Reads a single entity data component type from a `Chunk`.\n\n#[derive(Debug)]\n", "file_path": "src/query.rs", "rank": 65, "score": 45702.85602911933 }, { "content": "fn data(n: usize) -> Vec<Variants> {\n\n let mut v = Vec::<Variants>::new();\n\n\n\n for _ in 0..n {\n\n v.push(generate(0));\n\n }\n\n\n\n for _ in 0..n {\n\n v.push(generate(1));\n\n }\n\n\n\n v\n\n}\n\n\n", "file_path": "benches/parallel_query.rs", "rank": 66, "score": 45483.60781091574 }, { "content": "fn setup(data: &Vec<Variants>) -> World {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n for (i, group) in &data.into_iter().group_by(|x| index(**x)) {\n\n match i {\n\n 0 => world.insert_from(\n\n (),\n\n group.map(|x| {\n\n if let Variants::AB(a, b) = x {\n\n (*a, *b)\n\n } else {\n\n panic!();\n\n }\n\n }),\n\n ),\n\n _ => world.insert_from(\n\n (),\n\n group.map(|x| {\n\n if let Variants::AC(a, c) = x {\n", "file_path": "benches/parallel_query.rs", "rank": 67, "score": 44327.550606856574 }, { "content": "fn align_up(addr: usize, align: usize) -> usize {\n\n (addr + (align - 1)) & align.wrapping_neg()\n\n}\n\n\n\n/// Constructs a new `Chunk`.\n\npub struct ChunkBuilder {\n\n components: Vec<(ComponentTypeId, usize, Option<fn(*mut u8)>)>,\n\n tags: Vec<(TagTypeId, usize, TagStorageVTable)>,\n\n}\n\n\n\nimpl ChunkBuilder {\n\n const MAX_SIZE: usize = 16 * 1024;\n\n\n\n /// Constructs a new `ChunkBuilder`.\n\n pub fn new() -> ChunkBuilder {\n\n ChunkBuilder {\n\n components: Vec::new(),\n\n tags: Vec::new(),\n\n }\n\n }\n", "file_path": "src/storage.rs", "rank": 68, "score": 43100.31270698635 }, { "content": "fn align_down(addr: usize, align: usize) -> usize {\n\n addr & align.wrapping_neg()\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 69, "score": 43100.31270698635 }, { "content": "fn add_background_entities(world: &mut World, count: usize) {\n\n create_entities(\n\n world,\n\n &mut [\n\n Box::new(|_, c| c.add_component(A(0.0))),\n\n Box::new(|_, c| c.add_component(B(0.0))),\n\n Box::new(|_, c| c.add_component(C(0.0))),\n\n Box::new(|t, _| t.set_tag(Arc::new(Tag(0.0)))),\n\n Box::new(|_, c| c.add_component(D(0.0))),\n\n Box::new(|t, _| t.set_tag(Arc::new(Tag(1.0)))),\n\n Box::new(|_, c| c.add_component(E(0.0))),\n\n Box::new(|t, _| t.set_tag(Arc::new(Tag(2.0)))),\n\n Box::new(|_, c| c.add_component(F(0.0))),\n\n Box::new(|t, _| t.set_tag(Arc::new(Tag(3.0)))),\n\n ],\n\n 5,\n\n count,\n\n );\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 70, "score": 41067.09507584249 }, { "content": "fn data(n: usize) -> Vec<(Position, Orientation, Scale, Transform)> {\n\n let mut v = Vec::<(Position, Orientation, Scale, Transform)>::new();\n\n\n\n for _ in 0..n {\n\n v.push((\n\n Position(vec3(0.0, 0.0, 0.0)),\n\n Orientation(Quaternion::new(1.0, 0.0, 0.0, 0.0)),\n\n Scale(vec3(0.0, 0.0, 0.0)),\n\n Transform(Matrix4::identity()),\n\n ));\n\n }\n\n\n\n v\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 71, "score": 40006.98350561713 }, { "content": "fn ideal(data: &mut Vec<(Position, Orientation, Scale, Transform)>) {\n\n for (pos, orient, scale, trans) in data.iter_mut() {\n\n trans.0 = process(&pos.0, &orient.0, &scale.0);\n\n }\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 72, "score": 39032.96292929696 }, { "content": "fn setup(data: Vec<(Position, Orientation, Scale, Transform)>) -> World {\n\n let universe = Universe::new(None);\n\n let mut world = universe.create_world();\n\n\n\n world.insert_from((), data);\n\n\n\n world\n\n}\n\n\n", "file_path": "benches/transform.rs", "rank": 73, "score": 39032.96292929696 }, { "content": "fn setup_ideal(data: &Vec<Variants>) -> (Vec<(A, B)>, Vec<(A, C)>) {\n\n let mut ab = Vec::<(A, B)>::new();\n\n let mut ac = Vec::<(A, C)>::new();\n\n\n\n for v in data {\n\n match v {\n\n Variants::AB(a, b) => ab.push((*a, *b)),\n\n Variants::AC(a, c) => ac.push((*a, *c)),\n\n };\n\n }\n\n\n\n (ab, ac)\n\n}\n\n\n", "file_path": "benches/parallel_query.rs", "rank": 74, "score": 35685.161446333914 }, { "content": "fn ideal(ab: &mut Vec<(A, B)>, ac: &mut Vec<(A, C)>) {\n\n for (a, b) in ab.iter_mut() {\n\n b.0 = a.0;\n\n }\n\n\n\n for (a, c) in ac.iter_mut() {\n\n c.0 = a.0;\n\n }\n\n}\n\n\n", "file_path": "benches/parallel_query.rs", "rank": 75, "score": 34994.769530799334 }, { "content": "fn generate_job_graph<'a, T: JobDescriptor>(jobs: &'a [T]) -> JobGraph<'a, T> {\n\n // ensure job barrier relationships make sense\n\n for j in jobs {\n\n if let Some(a) = j.run_after() {\n\n if let Some(b) = j.finish_before() {\n\n assert!(\n\n a < b,\n\n \"Invalid job ordering: finish_before is before run_after for job {:?}\",\n\n j\n\n );\n\n }\n\n }\n\n }\n\n // find all used barriers and sort them\n\n let mut barriers = HashSet::new();\n\n for j in jobs {\n\n if let Some(b) = j.run_after() {\n\n barriers.insert(b);\n\n }\n\n if let Some(b) = j.finish_before() {\n", "file_path": "src/schedule.rs", "rank": 95, "score": 34913.02468416386 }, { "content": "fn filter_world(world: &crate::World, filter: &FilterData) -> Vec<crate::ChunkId> {\n\n let mut matching_chunks: Vec<crate::ChunkId> = Vec::new();\n\n for archetype in &world.archetypes {\n\n // Inclusive Tags\n\n if filter\n\n .tags\n\n .iter()\n\n .any(|x| archetype.has_tag_type(&x) == false)\n\n {\n\n continue;\n\n }\n\n // Inclusive Components\n\n if filter\n\n .components\n\n .iter()\n\n .any(|x| archetype.has_component_type(&x) == false)\n\n {\n\n continue;\n\n }\n\n // Exclusive Tags\n", "file_path": "src/c_api_query.rs", "rank": 96, "score": 34288.805044793786 }, { "content": "#[derive(Debug)]\n\nstruct DispatchState<'a, J: JobDescriptor> {\n\n sorted_jobs: Vec<&'a Node<'a, J>>,\n\n jobs_completed: BitSet,\n\n jobs_scheduled: BitSet,\n\n job_deps: Vec<BitSet>,\n\n}\n", "file_path": "src/schedule.rs", "rank": 97, "score": 27135.977050988815 }, { "content": " /// Returns `None` if the chunk does not contain the requested data type.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This function performs runtime borrow checking. It will panic if other code is borrowing\n\n /// the same component type mutably.\n\n pub fn components<'a, T: Component>(&'a self) -> Option<BorrowedSlice<'a, T>> {\n\n match unsafe { self.components_unchecked() } {\n\n Some(data) => {\n\n let borrow = self.borrow::<T>();\n\n Some(BorrowedSlice::new(data, borrow))\n\n }\n\n None => None,\n\n }\n\n }\n\n\n\n /// Gets a mutable slice of component data.\n\n ///\n\n /// Returns `None` if the chunk does not contain the requested data type.\n\n ///\n", "file_path": "src/storage.rs", "rank": 98, "score": 14.95702930697052 }, { "content": " ///\n\n /// Returns `None` if the chunk does not contain the requested data type.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function bypasses any borrow checking. Ensure no other code is reading or writing to\n\n /// this component type in the chunk before calling this function.\n\n pub unsafe fn components_mut_unchecked<T: Component>(&self) -> Option<&mut [T]> {\n\n self.components\n\n .get(&T::type_id())\n\n .map(|c| std::slice::from_raw_parts_mut(c.data_mut().cast().as_ptr(), self.len()))\n\n }\n\n\n\n /// Gets a mutable vector of all, possibly uninitialized, component data.\n\n ///\n\n /// Returns `None` if the chunk does not contain the requested data type.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function bypasses any borrow checking. Ensure no other code is reading or writing to\n", "file_path": "src/storage.rs", "rank": 99, "score": 14.837463766632563 } ]
Rust
src/route.rs
quarterblue/-kademlia-dht
de82d5407922f97a6b7885c9dea4828b7ed04f55
use crate::node::{Bit, ByteString, Node, ID_LENGTH}; use std::cell::RefCell; use std::iter::Iterator; use std::rc::Rc; use std::rc::Weak; const K_BUCKET_SIZE: usize = 4; type LeafNode = Option<Rc<RefCell<Vertex>>>; #[derive(Debug)] pub struct KBucket { node_bucket: Vec<Node>, depth: usize, } impl KBucket { pub fn new() -> Self { KBucket { node_bucket: Vec::with_capacity(K_BUCKET_SIZE), depth: 0, } } pub fn sort(&mut self) {} fn split(&self) -> (Option<KBucket>, Option<KBucket>) { let mut left = KBucket::new(); let mut right = KBucket::new(); left.depth = self.depth + 1; right.depth = self.depth + 1; for node in &self.node_bucket { match node.node_id.index(self.depth + 1) { 1 => left.node_bucket.push(*node), 0 => right.node_bucket.push(*node), _ => unreachable!(), } } (Some(left), Some(right)) } } #[derive(Debug)] pub struct Vertex { bit: Bit, k_bucket: Option<KBucket>, parent: Option<Weak<RefCell<Vertex>>>, left: LeafNode, right: LeafNode, } impl Vertex { fn new(bit: Bit) -> Vertex { Vertex { bit, k_bucket: Some(KBucket::new()), parent: None, left: None, right: None, } } fn split( vertex: &Rc<RefCell<Vertex>>, ) -> (Option<Rc<RefCell<Vertex>>>, Option<Rc<RefCell<Vertex>>>) { let mut left = Vertex::new(Bit::One); let mut right = Vertex::new(Bit::Zero); let tuple = vertex.borrow().k_bucket.as_ref().unwrap().split(); vertex.borrow_mut().k_bucket = None; left.k_bucket = tuple.0; right.k_bucket = tuple.1; ( Some(Rc::new(RefCell::new(left))), Some(Rc::new(RefCell::new(right))), ) } fn add_node<I: Iterator<Item = u8>>( vertex: &Rc<RefCell<Vertex>>, node: Node, node_iter: &mut I, node_id: &ByteString, prefix_contained: bool, ) { let has_k_bucket: bool; let mut split: bool = false; { has_k_bucket = vertex.borrow().k_bucket.is_some(); } match has_k_bucket { true => { { let mut vert = vertex.borrow_mut(); let bucket = vert.k_bucket.as_mut().unwrap(); if bucket.node_bucket.len() < K_BUCKET_SIZE { bucket.node_bucket.push(node); return; } if prefix_contained { let node_iter_next: u8 = node_iter.next().unwrap(); match node_iter_next { 1 => { if !matches!(vert.bit, Bit::One) { split = false; } } 0 => { if !matches!(vert.bit, Bit::Zero) { split = false; } } _ => {} } } } if split { let (left_vert, right_vert) = Vertex::split(vertex); { left_vert.as_ref().unwrap().borrow_mut().parent = Some(Rc::downgrade(&Rc::clone(vertex))); right_vert.as_ref().unwrap().borrow_mut().parent = Some(Rc::downgrade(&Rc::clone(vertex))); } { vertex.borrow_mut().left = left_vert; vertex.borrow_mut().right = right_vert; } Vertex::add_node(vertex, node, node_iter, &node_id, false); } } false => match node_iter.next().unwrap() { 1 => match &vertex.borrow().left { Some(vert) => { Vertex::add_node(vert, node, node_iter, &node_id, prefix_contained); } None => {} }, 0 => match &vertex.borrow().right { Some(vert) => { Vertex::add_node(vert, node, node_iter, &node_id, prefix_contained); } None => {} }, _ => unreachable!(), }, } } } #[derive(Debug)] pub struct RouteTable { pub length: u64, node_id: ByteString, root: LeafNode, } impl RouteTable { pub fn empty_new(node_id: ByteString) -> Self { RouteTable { length: 0, node_id, root: Some(Rc::new(RefCell::new(Vertex::new(Bit::Root)))), } } pub fn add_vertex() {} pub fn add_node(&mut self, node: Node) { match self.root.as_mut() { Some(x) => { let mut iter = node.node_id.into_iter(); Vertex::add_node(x, node, &mut iter, &self.node_id, true); self.length += 1; } None => { panic!("Root does not exist"); } } } fn find_closest(&self, node_id: [u8; ID_LENGTH]) -> Vec<Node> { let alpha_nodes: Vec<Node> = Vec::new(); match self.root { Some(ref x) => match &x.borrow_mut().k_bucket { Some(bucket) => {} None => {} }, None => {} } return alpha_nodes; } }
use crate::node::{Bit, ByteString, Node, ID_LENGTH}; use std::cell::RefCell; use std::iter::Iterator; use std::rc::Rc; use std::rc::Weak; const K_BUCKET_SIZE: usize = 4; type LeafNode = Option<Rc<RefCell<Vertex>>>; #[derive(Debug)] pub struct KBucket { node_bucket: Vec<Node>, depth: usize, } impl KBucket { pub fn new() -> Self { KBucket { node_bucket: Vec::with_capacity(K_BUCKET_SIZE), depth: 0, } } pub fn sort(&mut self) {} fn split(&self) -> (Option<KBucket>, Option<KBucket>) { let mut left = KBucket::new(); let mut right = KBucket::new(); left.depth = self.depth + 1; right.depth = self.depth + 1; for node in &self.node_bucket { match node.node_id.index(self.depth + 1) { 1 => left.node_bucket.push(*node), 0 => right.node_bucket.push(*node), _ => unreachable!(), } } (Some(left), Some(right)) } } #[derive(Debug)] pub struct Vertex { bit: Bit, k_bucket: Option<KBucket>, parent: Option<Weak<RefCell<Vertex>>>, left: LeafNode, right: LeafNode, } impl Vertex { fn new(bit: Bit) -> Vertex { Vertex { bit, k_bucket: Some(KBucket::new()), parent: None, left: None, right: None, } } fn split( vertex: &Rc<RefCell<Vertex>>, ) -> (Option<Rc<RefCell<Vertex>>>, Option<Rc<RefCell<Vertex>>>) { let mut left = Vertex::new(Bit::One); let mut right = Vertex::new(Bit::Zero); let tuple = vertex.borrow().k_bucket.as_ref().unwrap().split(); vertex.borrow_mut().k_bucket = None; left.k_bucket = tuple.0; right.k_bucket = tuple.1; ( Some(Rc::new(RefCell::new(left))), Some(Rc::new(RefCell::new(right))), ) } fn add_node<I: Iterator<Item = u8>>( vertex: &Rc<RefCell<Vertex>>, node: Node, node_iter: &mut I, node_id: &ByteString, prefix_contained: bool, ) { let has_k_bucket: bool; let mut split: bool = false; { has_k_bucket = vertex.borrow().k_bucket.is_some(); } match has_k_bucket { true => { { let mut vert = vertex.borrow_mut(); let bucket = vert.k_bucket.as_mut().unwrap(); if bucket.node_bucket.len() < K_BUCKET_SIZE { bucket.node_bucket.push(node); return; } if prefix_contained { let node_iter_next: u8 = node_iter.next().unwrap();
vertex.borrow_mut().left = left_vert; vertex.borrow_mut().right = right_vert; } Vertex::add_node(vertex, node, node_iter, &node_id, false); } } false => match node_iter.next().unwrap() { 1 => match &vertex.borrow().left { Some(vert) => { Vertex::add_node(vert, node, node_iter, &node_id, prefix_contained); } None => {} }, 0 => match &vertex.borrow().right { Some(vert) => { Vertex::add_node(vert, node, node_iter, &node_id, prefix_contained); } None => {} }, _ => unreachable!(), }, } } } #[derive(Debug)] pub struct RouteTable { pub length: u64, node_id: ByteString, root: LeafNode, } impl RouteTable { pub fn empty_new(node_id: ByteString) -> Self { RouteTable { length: 0, node_id, root: Some(Rc::new(RefCell::new(Vertex::new(Bit::Root)))), } } pub fn add_vertex() {} pub fn add_node(&mut self, node: Node) { match self.root.as_mut() { Some(x) => { let mut iter = node.node_id.into_iter(); Vertex::add_node(x, node, &mut iter, &self.node_id, true); self.length += 1; } None => { panic!("Root does not exist"); } } } fn find_closest(&self, node_id: [u8; ID_LENGTH]) -> Vec<Node> { let alpha_nodes: Vec<Node> = Vec::new(); match self.root { Some(ref x) => match &x.borrow_mut().k_bucket { Some(bucket) => {} None => {} }, None => {} } return alpha_nodes; } }
match node_iter_next { 1 => { if !matches!(vert.bit, Bit::One) { split = false; } } 0 => { if !matches!(vert.bit, Bit::Zero) { split = false; } } _ => {} } } } if split { let (left_vert, right_vert) = Vertex::split(vertex); { left_vert.as_ref().unwrap().borrow_mut().parent = Some(Rc::downgrade(&Rc::clone(vertex))); right_vert.as_ref().unwrap().borrow_mut().parent = Some(Rc::downgrade(&Rc::clone(vertex))); } {
random
[ { "content": "pub fn deserialize_message(v: Vec<u8>) -> Message {\n\n bincode::deserialize(&v).expect(\"Could not serialize message\")\n\n}\n\n\n", "file_path": "src/rpc.rs", "rank": 1, "score": 65682.53778056285 }, { "content": "pub fn serialize_message(msg: Message) -> Vec<u8> {\n\n bincode::serialize(&msg).expect(\"Could not serialize message\")\n\n}\n\n\n", "file_path": "src/rpc.rs", "rank": 2, "score": 63725.152702170526 }, { "content": "pub fn handle_message(sock: UdpSocket, size: usize, src: SocketAddr, buf: [u8; 1500]) {\n\n let mut vec = buf.to_vec();\n\n vec.resize(size, 0);\n\n let msg = deserialize_message(vec);\n\n let mut resp = ResponseFuncs::Error(\"Unknown request\".to_string());\n\n\n\n if let Message::Request(msg) = msg {\n\n resp = match msg {\n\n RequestFuncs::Ping => {\n\n // TODO Handle Ping\n\n println!(\"Ping!\");\n\n ResponseFuncs::Ping(true)\n\n }\n\n RequestFuncs::Store(s) => {\n\n // TODO Handle Store\n\n println!(\"Store: {}\", s);\n\n ResponseFuncs::Store(true)\n\n }\n\n RequestFuncs::FindNode(s) => {\n\n // TODO Handle Find Node\n", "file_path": "src/rpc.rs", "rank": 3, "score": 60939.56413354266 }, { "content": "pub fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n if args.len() < 2 {\n\n panic!(\"You must provide a server/client/full and a port\");\n\n }\n\n let addr = format!(\"0.0.0.0:{}\", args.get(2).unwrap());\n\n let addr_2 = addr.clone();\n\n\n\n let node_type = &args.get(1).unwrap()[..];\n\n\n\n match node_type {\n\n \"client\" => {\n\n println!(\n\n \"Your Kademlia DHT Node Client is binded to port: {}\",\n\n args.get(2).unwrap()\n\n );\n\n rpc::init_client(&addr);\n\n }\n\n \"server\" => {\n\n println!(\n", "file_path": "src/main.rs", "rank": 4, "score": 50731.48882741185 }, { "content": "pub fn test() {\n\n println!(\"test\");\n\n}\n", "file_path": "src/lib.rs", "rank": 5, "score": 50731.48882741185 }, { "content": "pub trait KadeNode {\n\n fn get_ip() -> IpAddr;\n\n fn get_port() -> u16;\n\n fn get_nodeid() -> ByteString;\n\n}\n\n\n\nimpl Node {\n\n // The default new node constructor\n\n // returns a node with a random 160 bit node_id\n\n pub fn new_node(ip_addr: IpAddr, port: u16) -> Self {\n\n Node {\n\n node_id: ByteString::random_new(),\n\n ip_addr,\n\n port,\n\n }\n\n }\n\n\n\n // The Node constructor for testing purposes only\n\n // returns a node with an empty node id\n\n // the node id should be manually set and tested for equality and ordering\n", "file_path": "src/node.rs", "rank": 6, "score": 44542.86155666063 }, { "content": "pub fn init_server(addr: &str) {\n\n let socket = UdpSocket::bind(addr).expect(\"Could not bind socket\");\n\n\n\n loop {\n\n let mut buf = [0u8; 1500];\n\n let sock = socket.try_clone().expect(\"Failed to clone socket\");\n\n match socket.recv_from(&mut buf) {\n\n Ok((sz, src)) => {\n\n thread::spawn(move || {\n\n handle_message(sock, sz, src, buf);\n\n });\n\n }\n\n Err(e) => {\n\n eprintln!(\"couldn't receive datagram: {}\", e);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rpc.rs", "rank": 7, "score": 43289.63477634116 }, { "content": "pub fn init_client(addr: &str) {\n\n let socket = UdpSocket::bind(addr).expect(\"Could not bind client socket\");\n\n socket\n\n .connect(\"127.0.0.1:8111\")\n\n .expect(\"Could not connect to server\");\n\n\n\n let mut i = 0;\n\n loop {\n\n let fmt = format!(\"Hello Iteration {}\", i);\n\n let resp = req_rep(\n\n socket.try_clone().expect(\"Could not clone socket\"),\n\n RequestFuncs::Ping,\n\n );\n\n\n\n println!(\"{:?}\", resp);\n\n\n\n let resp = req_rep(\n\n socket.try_clone().expect(\"Could not clone socket\"),\n\n RequestFuncs::Store(32),\n\n );\n\n println!(\"{:?}\", resp);\n\n i += 1;\n\n thread::sleep(time::Duration::from_millis(500));\n\n }\n\n}\n", "file_path": "src/rpc.rs", "rank": 8, "score": 43289.63477634116 }, { "content": "pub fn kade_init(addr: &str) {\n\n let port_string = addr.to_string();\n\n let localhost_v4 = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));\n\n let mut kade_node: KademNode<i32, i32> = KademNode::new(localhost_v4, 8111);\n\n\n\n println!(\"Kade Node Initialized!:\");\n\n println!(\"{:#?}\", &kade_node);\n\n println!(\"-------\");\n\n\n\n let one_ip = IpAddr::V4(Ipv4Addr::new(28, 2, 9, 1));\n\n let one_node = Node::new_node(one_ip, 8111);\n\n kade_node.route_table.as_mut().unwrap().add_node(one_node);\n\n\n\n let two_ip = IpAddr::V4(Ipv4Addr::new(18, 7, 9, 1));\n\n let two_node = Node::new_node(two_ip, 8111);\n\n kade_node.route_table.as_mut().unwrap().add_node(two_node);\n\n println!(\"{:#?}\", &kade_node);\n\n}\n", "file_path": "src/kademlia_dht.rs", "rank": 9, "score": 41880.43559212749 }, { "content": "pub trait RPC<K, V> {\n\n fn find_node(&self) -> Node;\n\n fn store(&mut self) -> bool;\n\n fn find_value(&self, key: K) -> V;\n\n fn ping(&self) -> bool;\n\n}\n\n// KademNode represents the entire data struct of a node\n\n// Contained within is the route table for finding other nodes, and hash map to store the k, v pairs\n\n#[derive(Debug)]\n\npub struct KademNode<K, V> {\n\n pub node_id: ByteString,\n\n pub ip_addr: IpAddr,\n\n pub port: u16,\n\n pub route_table: Option<RouteTable>,\n\n hash_map: Option<HashMap<K, V>>,\n\n}\n\n\n\nimpl<K, V> KademNode<K, V> {\n\n pub fn new(ip_addr: IpAddr, port: u16) -> Self {\n\n let node_id = ByteString::random_new();\n", "file_path": "src/node.rs", "rank": 10, "score": 36174.180302900924 }, { "content": "pub fn req_rep(sock: UdpSocket, req: RequestFuncs) -> ResponseFuncs {\n\n let msg = Message::Request(req);\n\n let serialized = serialize_message(msg);\n\n\n\n sock.send(&serialized).expect(\"Failed to write to server\");\n\n let mut buf = [0u8; 1500];\n\n\n\n let (len, _src) = sock\n\n .recv_from(&mut buf)\n\n .expect(\"Could not read into buffer\");\n\n\n\n let resp = deserialize_message(buf.to_vec());\n\n if let Message::Response(resp) = resp {\n\n return resp;\n\n }\n\n\n\n return ResponseFuncs::Error(\"No valid response\".to_string());\n\n}\n\n\n", "file_path": "src/rpc.rs", "rank": 11, "score": 34090.85862811158 }, { "content": "\n\n pub fn index(&self, i: usize) -> u8 {\n\n let base = i / 8;\n\n let offset = i % 8;\n\n (self.0[base] >> offset) & 1\n\n }\n\n\n\n pub fn get(index: usize) -> Option<Bit> {\n\n todo!()\n\n }\n\n}\n\n\n\nimpl PartialEq for ByteString {\n\n fn eq(&self, other: &Self) -> bool {\n\n for i in 0..ID_LENGTH {\n\n if self.0[i] != other.0[i] {\n\n return false;\n\n }\n\n }\n\n return true;\n", "file_path": "src/node.rs", "rank": 12, "score": 19209.05481358453 }, { "content": "impl ByteString {\n\n // ByteString constructor that takes in a preset array of bitstring\n\n pub fn new(arr: [u8; ID_LENGTH]) -> Self {\n\n ByteString(arr, 0)\n\n }\n\n\n\n // ByteString constructor to create an empty node ID\n\n pub fn new_empty() -> Self {\n\n let node = [0; ID_LENGTH];\n\n ByteString(node, 0)\n\n }\n\n\n\n // ByeString constructor to create a random 160 bit node ID\n\n pub fn random_new() -> Self {\n\n let mut node = [0; ID_LENGTH];\n\n for i in 0..ID_LENGTH {\n\n node[i] = random::<u8>();\n\n }\n\n ByteString(node, 0)\n\n }\n", "file_path": "src/node.rs", "rank": 13, "score": 19207.277264890632 }, { "content": "use rand::random;\n\nuse std::cmp::Ordering;\n\nuse std::collections::HashMap;\n\nuse std::iter::Iterator;\n\nuse std::net::{IpAddr, Ipv4Addr, Ipv6Addr};\n\n\n\nuse crate::route::RouteTable;\n\n\n\npub const ID_LENGTH: usize = 20;\n\n#[derive(Debug)]\n\npub enum Bit {\n\n Zero,\n\n One,\n\n Root,\n\n}\n\n\n\n// 160 bit Node ID in tuple, 0 position is an array of bits, and 1 position is the size\n\n#[derive(Debug, Clone, Copy)]\n\npub struct ByteString(pub [u8; ID_LENGTH], usize);\n\n\n", "file_path": "src/node.rs", "rank": 14, "score": 19206.203645171954 }, { "content": " pub fn test_node(ip_addr: IpAddr, port: u16, arr: [u8; ID_LENGTH]) -> Self {\n\n let empty_bytestring = ByteString::new(arr);\n\n Node {\n\n node_id: empty_bytestring,\n\n ip_addr,\n\n port,\n\n }\n\n }\n\n\n\n // Calculates and returns XOR Distance between two nodes\n\n pub fn distance(&self, node_id: ByteString) -> ByteString {\n\n let mut nodeid = ByteString::new_empty();\n\n for i in 0..20 {\n\n nodeid.0[i] = node_id.0[i] ^ self.node_id.0[i]\n\n }\n\n return nodeid;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/node.rs", "rank": 15, "score": 19205.325379139926 }, { "content": " assert_eq!(xor_distance.0, test_arr);\n\n }\n\n\n\n #[test]\n\n fn new_node() {\n\n let new_ip = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));\n\n let arr: [u8; ID_LENGTH] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7];\n\n let port: u16 = 5111;\n\n let new_node = Node::test_node(new_ip, port, arr);\n\n assert_eq!(new_node.ip_addr, new_ip);\n\n assert_eq!(new_node.port, port);\n\n }\n\n\n\n #[test]\n\n fn bytestring_eq() {\n\n let arr_one: [u8; ID_LENGTH] =\n\n [0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7];\n\n let arr_two: [u8; ID_LENGTH] =\n\n [0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7];\n\n let arr_three: [u8; ID_LENGTH] =\n", "file_path": "src/node.rs", "rank": 16, "score": 19202.589472127875 }, { "content": "mod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn node_distance() {\n\n let new_ip = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));\n\n let arr: [u8; ID_LENGTH] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7];\n\n let port: u16 = 5111;\n\n let new_node = Node::test_node(new_ip, port, arr);\n\n\n\n let comp_arr: [u8; ID_LENGTH] =\n\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9];\n\n\n\n let comp_byte = ByteString::new(comp_arr);\n\n\n\n let test_arr: [u8; ID_LENGTH] =\n\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14];\n\n\n\n let xor_distance = new_node.distance(comp_byte);\n\n\n", "file_path": "src/node.rs", "rank": 17, "score": 19202.45580514667 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n let ret;\n\n if self.1 > ID_LENGTH * 8 - 1 {\n\n ret = None;\n\n } else {\n\n ret = Some(self.index(self.1));\n\n }\n\n self.1 += 1;\n\n ret\n\n }\n\n}\n", "file_path": "src/node.rs", "rank": 18, "score": 19201.40254563671 }, { "content": " }\n\n}\n\n\n\n// UNTESTED IMPLEMENTATION\n\nimpl PartialOrd for ByteString {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n for i in 0..ID_LENGTH {\n\n if self.0[i].cmp(&other.0[i]) == Ordering::Equal {\n\n continue;\n\n } else {\n\n return Some(self.0[i].cmp(&other.0[i]));\n\n }\n\n }\n\n return Some(Ordering::Equal);\n\n }\n\n}\n\n\n\nimpl Iterator for ByteString {\n\n type Item = u8;\n\n\n", "file_path": "src/node.rs", "rank": 19, "score": 19200.8096850008 }, { "content": " KademNode {\n\n node_id,\n\n ip_addr,\n\n port,\n\n route_table: Some(RouteTable::empty_new(node_id.clone())),\n\n hash_map: Some(HashMap::new()),\n\n }\n\n }\n\n}\n\n// Node represents a entity that is running a Kademlia DHT protocol\n\n// It can be uniquely identified by the node_id and has ip addr and port for UDP connection\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Node {\n\n pub node_id: ByteString,\n\n pub ip_addr: IpAddr,\n\n pub port: u16,\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 20, "score": 19200.802429118216 }, { "content": " [0, 0, 0, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7];\n\n\n\n let byte_one = ByteString::new(arr_one);\n\n let byte_two = ByteString::new(arr_two);\n\n let byte_three = ByteString::new(arr_three);\n\n\n\n assert_eq!(byte_one, byte_two);\n\n assert_ne!(byte_one, byte_three);\n\n }\n\n}\n", "file_path": "src/node.rs", "rank": 21, "score": 19197.76910546421 }, { "content": "#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]\n\npub enum ResponseFuncs {\n\n Ping(bool),\n\n Store(bool),\n\n FindNode(i32),\n\n Error(String),\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]\n\npub enum Message {\n\n Request(RequestFuncs),\n\n Response(ResponseFuncs),\n\n}\n\n\n\npub struct MessageO {\n\n demux_key: u8,\n\n sender_id: u8,\n\n payload: Option<u8>,\n\n}\n\n\n", "file_path": "src/rpc.rs", "rank": 34, "score": 6.531449915543714 }, { "content": "use crate::node::{Bit, ByteString, KademNode, Node, ID_LENGTH};\n\nuse std::net::{IpAddr, Ipv4Addr, Ipv6Addr};\n\nuse std::str;\n\n\n", "file_path": "src/kademlia_dht.rs", "rank": 35, "score": 5.5978065999425635 }, { "content": "use bincode;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::net::SocketAddr;\n\nuse std::net::UdpSocket;\n\nuse std::{thread, time};\n\npub enum Request<K, V> {\n\n Ping,\n\n Store(K, V),\n\n FindNode,\n\n FindValue(K, V),\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]\n\npub enum RequestFuncs {\n\n Ping,\n\n Store(i32),\n\n FindNode(i32),\n\n FindValue(i32),\n\n}\n\n\n", "file_path": "src/rpc.rs", "rank": 36, "score": 5.012413083676704 }, { "content": "mod kademlia_dht;\n\nmod node;\n\nmod route;\n\nmod rpc;\n\n\n\nuse kademlia_dht::kade_init;\n\nuse lib::test;\n\nuse node::{ByteString, Node};\n\nuse route::RouteTable;\n\nuse rpc::{handle_message, init_client, init_server};\n\nuse std::env;\n\nuse std::net::UdpSocket;\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n", "file_path": "src/main.rs", "rank": 37, "score": 4.050943340757343 }, { "content": " println!(\"Store: {}\", s);\n\n ResponseFuncs::Store(true)\n\n }\n\n RequestFuncs::FindValue(s) => {\n\n // TODO Handle Find Value\n\n println!(\"Store: {}\", s);\n\n ResponseFuncs::Store(true)\n\n }\n\n }\n\n }\n\n\n\n let resp_msg = Message::Response(resp);\n\n let serialized = serialize_message(resp_msg);\n\n\n\n sock.send_to(&serialized, &src)\n\n .expect(\"Failed to send a resposne\");\n\n}\n", "file_path": "src/rpc.rs", "rank": 38, "score": 1.953258144842682 }, { "content": " \"Your Kademlia DHT Node Server is binded to port: {}\",\n\n args.get(2).unwrap()\n\n );\n\n let rpc = thread::spawn(move || rpc::init_server(&addr));\n\n\n\n kademlia_dht::kade_init(&addr_2);\n\n rpc.join().unwrap();\n\n }\n\n \"full\" => {\n\n println!(\n\n \"Your Kademlia DHT Node Server is binded to port: {}\",\n\n args.get(2).unwrap()\n\n );\n\n let rpc = thread::spawn(move || rpc::init_server(&addr));\n\n\n\n kademlia_dht::kade_init(&addr_2);\n\n rpc.join().unwrap();\n\n }\n\n _ => {\n\n println!(\"You must provide a valid type (server / client / full).\");\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 39, "score": 1.0325440985986807 }, { "content": "# Kademlia Distributed Hash Table\n\n[![Rust](https://github.com/quarterblue/kademlia-dht/actions/workflows/rust.yml/badge.svg)](https://github.com/quarterblue/kademlia-dht/actions/workflows/rust.yml)\n\n[![MIT licensed](https://img.shields.io/badge/license-MIT-blue.svg)](./LICENSE)\n\n<img src=https://img.shields.io/github/last-commit/quarterblue/kademlia-dht></img>\n\n\n\n<img width=\"40%\" src=\"https://upload.wikimedia.org/wikipedia/commons/6/63/Dht_example_SVG.svg\" alt=\"kademlia\">\n\n\n\nKademlia distributed hash table is a peer-to-peer dht with provable consistency and performance in a fault-prone environment. It is based on XOR distance metric topology to facilitate the communication between nodes.\n\n\n\nThis is a standalone implementation of the Kademlia distributed table in pure Rust. This implementation tries to follow the original paper as close as possible. Refer to the <a href=\"https://pdos.csail.mit.edu/~petar/papers/maymounkov-kademlia-lncs.pdf\">original paper</a> for more information.\n\n\n\n**Other references:**\n\n\n\n- <a href=\"https://pub.tik.ee.ethz.ch/students/2006-So/SA-2006-19.pdf\">Implementation of Kademlia by Bruno Spori</a>\n\n\n\n- <a href=\"http://maude.sip.ucm.es/kademlia/files/pita_kademlia.pdf\">Specification of the Kademlia DHT by Isabel Pita</a>\n\n\n\n## Installation\n\n\n\nTo run a server, clone the repo and run\n\n```bash\n\n$ cargo run server 8111\n\n```\n\n\n\nTo run a client, clone the repo and run\n\n```bash\n\n$ cargo run client 8222\n\n```\n\n\n\nTo run a full node, clone the repo and run\n\n```bash\n\n$ cargo run full 8111\n\n```\n\n\n\n## Usage\n\n\n\n```rust\n\n// To be updated\n\n```\n", "file_path": "README.md", "rank": 40, "score": 0.9998883155594809 } ]
Rust
sulis_module/src/generator/terrain_tiles.rs
ThyWoof/sulis
e89eda94a1a72228224e1926d307aa4c9228bdcb
use std::collections::HashMap; use std::io::Error; use std::rc::Rc; use crate::{ area::tile::{EdgeRules, TerrainKind, TerrainRules, Tile}, Module, }; use sulis_core::util::unable_to_create_error; #[derive(Clone)] pub struct TerrainTiles { pub id: String, pub base: Rc<Tile>, pub base_weight: u32, pub variants: Vec<Rc<Tile>>, pub edges: EdgesList, pub borders: HashMap<usize, EdgesList>, } impl PartialEq for TerrainTiles { fn eq(&self, other: &TerrainTiles) -> bool { self.id == other.id } } impl Eq for TerrainTiles {} #[derive(Clone)] pub struct EdgesList { pub inner_nw: Option<Rc<Tile>>, pub inner_ne: Option<Rc<Tile>>, pub inner_sw: Option<Rc<Tile>>, pub inner_se: Option<Rc<Tile>>, pub outer_n: Option<Rc<Tile>>, pub outer_s: Option<Rc<Tile>>, pub outer_e: Option<Rc<Tile>>, pub outer_w: Option<Rc<Tile>>, pub outer_se: Option<Rc<Tile>>, pub outer_ne: Option<Rc<Tile>>, pub outer_sw: Option<Rc<Tile>>, pub outer_nw: Option<Rc<Tile>>, pub outer_all: Option<Rc<Tile>>, pub inner_ne_sw: Option<Rc<Tile>>, pub inner_nw_se: Option<Rc<Tile>>, } impl EdgesList { pub fn new(id: &str, prefix: &str, rules: &EdgeRules) -> Result<EdgesList, Error> { let inner_nw = EdgesList::get_edge(prefix, id, &rules.inner_edge_postfix, &rules.nw_postfix); let inner_ne = EdgesList::get_edge(prefix, id, &rules.inner_edge_postfix, &rules.ne_postfix); let inner_sw = EdgesList::get_edge(prefix, id, &rules.inner_edge_postfix, &rules.sw_postfix); let inner_se = EdgesList::get_edge(prefix, id, &rules.inner_edge_postfix, &rules.se_postfix); let outer_n = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.n_postfix); let outer_s = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.s_postfix); let outer_e = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.e_postfix); let outer_w = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.w_postfix); let outer_ne = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.ne_postfix); let outer_nw = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.nw_postfix); let outer_se = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.se_postfix); let outer_sw = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.sw_postfix); let outer_all = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.all_postfix); let inner_ne_sw = EdgesList::get_edge( prefix, id, &rules.inner_edge_postfix, &rules.ne_sw_postfix, ); let inner_nw_se = EdgesList::get_edge( prefix, id, &rules.inner_edge_postfix, &rules.nw_se_postfix, ); Ok(EdgesList { inner_nw, inner_ne, inner_sw, inner_se, outer_n, outer_s, outer_e, outer_w, outer_se, outer_ne, outer_sw, outer_nw, outer_all, inner_ne_sw, inner_nw_se, }) } fn get_edge(prefix: &str, id: &str, edge_postfix: &str, dir_postfix: &str) -> Option<Rc<Tile>> { let tile_id = format!("{}{}{}{}", prefix, id, edge_postfix, dir_postfix); match Module::tile(&tile_id) { None => { trace!( "Edge tile with '{}', '{}' not found for '{}'. Full path: '{}'", edge_postfix, dir_postfix, id, tile_id, ); None } Some(tile) => Some(tile), } } } impl TerrainTiles { pub fn new( rules: &TerrainRules, kind: &TerrainKind, all_kinds: &[TerrainKind], ) -> Result<TerrainTiles, Error> { let base_tile_id = format!("{}{}{}", rules.prefix, kind.id, rules.base_postfix); let base = match Module::tile(&base_tile_id) { None => { warn!("Base tile for terrain kind '{}' not found", kind.id); return unable_to_create_error("terrain_tiles", &kind.id); } Some(tile) => tile, }; let base_weight = match kind.base_weight { None => rules.base_weight, Some(weight) => weight, }; let mut variants = Vec::new(); for i in kind.variants.iter() { let tile_id = format!( "{}{}{}{}", rules.prefix, kind.id, rules.variant_postfix, i.to_string() ); let tile = match Module::tile(&tile_id) { None => { warn!( "Tile variant '{}' not found for terrain kind '{}'", i, kind.id ); continue; } Some(tile) => tile, }; variants.push(tile); } let mut borders = HashMap::new(); for (other_terrain, id) in kind.borders.iter() { let edges = EdgesList::new(id, &rules.prefix, &rules.edges)?; let mut index = None; for (i, other_kind) in all_kinds.iter().enumerate() { if &other_kind.id == other_terrain { index = Some(i); break; } } match index { None => { warn!( "Other terrain '{}' not found for border of '{}'", other_terrain, kind.id ); continue; } Some(index) => { borders.insert(index, edges); } } } let edges = EdgesList::new(&kind.id, &rules.prefix, &rules.edges)?; Ok(TerrainTiles { id: kind.id.clone(), base, base_weight, variants, borders, edges, }) } pub fn matching_edges(&self, index: Option<usize>) -> &EdgesList { match index { None => &self.edges, Some(index) => match self.borders.get(&index) { None => &self.edges, Some(edges) => edges, }, } } }
use std::collections::HashMap; use std::io::Error; use std::rc::Rc; use crate::{ area::tile::{EdgeRules, TerrainKind, TerrainRules, Tile}, Module, }; use sulis_core::util::unable_to_create_error; #[derive(Clone)] pub struct TerrainTiles { pub id: String, pub base: Rc<Tile>, pub base_weight: u32, pub variants: Vec<Rc<Tile>>, pub edges: EdgesList, pub borders: HashMap<usize, EdgesList>, } impl PartialEq for TerrainTiles { fn eq(&self, other: &TerrainTiles) -> bool { self.id == other.id } } impl Eq for TerrainTiles {} #[derive(Clone)] pub struct EdgesList { pub inner_nw: Option<Rc<Tile>>, pub inner_ne: Option<Rc<Tile>>, pub inner_sw: Option<Rc<Tile>>, pub inner_se: Option<Rc<Tile>>, pub outer_n: Option<Rc<Tile>>, pub outer_s: Option<Rc<Tile>>, pub outer_e: Option<Rc<Tile>>, pub outer_w: Option<Rc<Tile>>, pub outer_se: Option<Rc<Tile>>, pub outer_ne: Option<Rc<Tile>>, pub outer_sw: Option<Rc<Tile>>, pub outer_nw: Option<Rc<Tile>>, pub outer_all: Option<Rc<Tile>>, pub inner_ne_sw: Option<Rc<Tile>>, pub inner_nw_se: Option<Rc<Tile>>, } impl EdgesList { pub fn new(id: &str, prefix: &str, rules: &EdgeRules) -> Result<EdgesList, Error> { let inner_nw = EdgesList::get_edge(prefix, id, &rules.inner_edge_postfix, &rules.nw_postfix); let inner_ne = EdgesList::get_edge(prefix, id, &rules.inner_edge_postfix, &rules.ne_postfix); let inner_sw = EdgesList::get_edge(prefix, id, &rules.inner_edge_postfix, &rules.sw_postfix); let inner_se = EdgesList::get_edge(prefix, id, &rules.inner_edge_postfix, &rules.se_postfix); let outer_n = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.n_postfix); let outer_s = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.s_postfix); let outer_e = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.e_postfix); let outer_w = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.w_postfix); let outer_ne = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.ne_postfix); let outer_nw = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.nw_postfix); let outer_se = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.se_postfix); let outer_sw = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.sw_postfix); let outer_all = EdgesList::get_edge(prefix, id, &rules.outer_edge_postfix, &rules.all_postfix); let inner_ne_sw = EdgesList::get_edge( prefix, id, &rules.inner_edge_postfix, &rules.ne_sw_postfix, ); let inner_nw_se = EdgesList::get_edge( prefix, id, &rules.inner_edge_postfix, &rules.nw_se_postfix, );
} fn get_edge(prefix: &str, id: &str, edge_postfix: &str, dir_postfix: &str) -> Option<Rc<Tile>> { let tile_id = format!("{}{}{}{}", prefix, id, edge_postfix, dir_postfix); match Module::tile(&tile_id) { None => { trace!( "Edge tile with '{}', '{}' not found for '{}'. Full path: '{}'", edge_postfix, dir_postfix, id, tile_id, ); None } Some(tile) => Some(tile), } } } impl TerrainTiles { pub fn new( rules: &TerrainRules, kind: &TerrainKind, all_kinds: &[TerrainKind], ) -> Result<TerrainTiles, Error> { let base_tile_id = format!("{}{}{}", rules.prefix, kind.id, rules.base_postfix); let base = match Module::tile(&base_tile_id) { None => { warn!("Base tile for terrain kind '{}' not found", kind.id); return unable_to_create_error("terrain_tiles", &kind.id); } Some(tile) => tile, }; let base_weight = match kind.base_weight { None => rules.base_weight, Some(weight) => weight, }; let mut variants = Vec::new(); for i in kind.variants.iter() { let tile_id = format!( "{}{}{}{}", rules.prefix, kind.id, rules.variant_postfix, i.to_string() ); let tile = match Module::tile(&tile_id) { None => { warn!( "Tile variant '{}' not found for terrain kind '{}'", i, kind.id ); continue; } Some(tile) => tile, }; variants.push(tile); } let mut borders = HashMap::new(); for (other_terrain, id) in kind.borders.iter() { let edges = EdgesList::new(id, &rules.prefix, &rules.edges)?; let mut index = None; for (i, other_kind) in all_kinds.iter().enumerate() { if &other_kind.id == other_terrain { index = Some(i); break; } } match index { None => { warn!( "Other terrain '{}' not found for border of '{}'", other_terrain, kind.id ); continue; } Some(index) => { borders.insert(index, edges); } } } let edges = EdgesList::new(&kind.id, &rules.prefix, &rules.edges)?; Ok(TerrainTiles { id: kind.id.clone(), base, base_weight, variants, borders, edges, }) } pub fn matching_edges(&self, index: Option<usize>) -> &EdgesList { match index { None => &self.edges, Some(index) => match self.borders.get(&index) { None => &self.edges, Some(edges) => edges, }, } } }
Ok(EdgesList { inner_nw, inner_ne, inner_sw, inner_se, outer_n, outer_s, outer_e, outer_w, outer_se, outer_ne, outer_sw, outer_nw, outer_all, inner_ne_sw, inner_nw_se, })
call_expression
[]
Rust
src/i2s/rx_timing.rs
ForsakenHarmony/esp32c3-pac
7d9eb9a5b5a51077d1d1eb6c6efd186064b7149b
#[doc = "Reader of register RX_TIMING"] pub type R = crate::R<u32, super::RX_TIMING>; #[doc = "Writer for register RX_TIMING"] pub type W = crate::W<u32, super::RX_TIMING>; #[doc = "Register RX_TIMING `reset()`'s with value 0"] impl crate::ResetValue for super::RX_TIMING { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `RX_BCK_IN_DM`"] pub type RX_BCK_IN_DM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RX_BCK_IN_DM`"] pub struct RX_BCK_IN_DM_W<'a> { w: &'a mut W, } impl<'a> RX_BCK_IN_DM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 28)) | (((value as u32) & 0x03) << 28); self.w } } #[doc = "Reader of field `RX_WS_IN_DM`"] pub type RX_WS_IN_DM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RX_WS_IN_DM`"] pub struct RX_WS_IN_DM_W<'a> { w: &'a mut W, } impl<'a> RX_WS_IN_DM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 24)) | (((value as u32) & 0x03) << 24); self.w } } #[doc = "Reader of field `RX_BCK_OUT_DM`"] pub type RX_BCK_OUT_DM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RX_BCK_OUT_DM`"] pub struct RX_BCK_OUT_DM_W<'a> { w: &'a mut W, } impl<'a> RX_BCK_OUT_DM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 20)) | (((value as u32) & 0x03) << 20); self.w } } #[doc = "Reader of field `RX_WS_OUT_DM`"] pub type RX_WS_OUT_DM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RX_WS_OUT_DM`"] pub struct RX_WS_OUT_DM_W<'a> { w: &'a mut W, } impl<'a> RX_WS_OUT_DM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 16)) | (((value as u32) & 0x03) << 16); self.w } } #[doc = "Reader of field `RX_SD_IN_DM`"] pub type RX_SD_IN_DM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RX_SD_IN_DM`"] pub struct RX_SD_IN_DM_W<'a> { w: &'a mut W, } impl<'a> RX_SD_IN_DM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03); self.w } } impl R { #[doc = "Bits 28:29"] #[inline(always)] pub fn rx_bck_in_dm(&self) -> RX_BCK_IN_DM_R { RX_BCK_IN_DM_R::new(((self.bits >> 28) & 0x03) as u8) } #[doc = "Bits 24:25"] #[inline(always)] pub fn rx_ws_in_dm(&self) -> RX_WS_IN_DM_R { RX_WS_IN_DM_R::new(((self.bits >> 24) & 0x03) as u8) } #[doc = "Bits 20:21"] #[inline(always)] pub fn rx_bck_out_dm(&self) -> RX_BCK_OUT_DM_R { RX_BCK_OUT_DM_R::new(((self.bits >> 20) & 0x03) as u8) } #[doc = "Bits 16:17"] #[inline(always)] pub fn rx_ws_out_dm(&self) -> RX_WS_OUT_DM_R { RX_WS_OUT_DM_R::new(((self.bits >> 16) & 0x03) as u8) } #[doc = "Bits 0:1"] #[inline(always)] pub fn rx_sd_in_dm(&self) -> RX_SD_IN_DM_R { RX_SD_IN_DM_R::new((self.bits & 0x03) as u8) } } impl W { #[doc = "Bits 28:29"] #[inline(always)] pub fn rx_bck_in_dm(&mut self) -> RX_BCK_IN_DM_W { RX_BCK_IN_DM_W { w: self } } #[doc = "Bits 24:25"] #[inline(always)] pub fn rx_ws_in_dm(&mut self) -> RX_WS_IN_DM_W { RX_WS_IN_DM_W { w: self } } #[doc = "Bits 20:21"] #[inline(always)] pub fn rx_bck_out_dm(&mut self) -> RX_BCK_OUT_DM_W { RX_BCK_OUT_DM_W { w: self } } #[doc = "Bits 16:17"] #[inline(always)] pub fn rx_ws_out_dm(&mut self) -> RX_WS_OUT_DM_W { RX_WS_OUT_DM_W { w: self } } #[doc = "Bits 0:1"] #[inline(always)] pub fn rx_sd_in_dm(&mut self) -> RX_SD_IN_DM_W { RX_SD_IN_DM_W { w: self } } }
#[doc = "Reader of register RX_TIMING"] pub type R = crate::R<u32, super::RX_TIMING>; #[doc = "Writer for register RX_TIMING"] pub type W = crate::W<u32, super::RX_TIMING>; #[doc = "Register RX_TIMING `reset()`'s with value 0"] impl crate::ResetValue for super::RX_TIMING { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `RX_BCK_IN_DM`"] pub type RX_BCK_IN_DM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RX_BCK_IN_DM`"] pub struct RX_BCK_IN_DM_W<'a> { w: &'a mut W, } impl<'a> RX_BCK_IN_DM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 28)) | (((value as u32) & 0x03) << 28); self.w } } #[doc = "Reader of field `RX_WS_IN_DM`"] pub type RX_WS_IN_DM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RX_WS_IN_DM`"] pub struct RX_WS_IN_DM_W<'a> { w: &'a mut W, } impl<'a> RX_WS_IN_DM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 24)) | (((value as u32) & 0x03) << 24); self.w } } #[doc = "Reader of field `RX_BCK_OUT_DM`"] pub type RX_BCK_OUT_DM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RX_BCK_OUT_DM`"] pub struct RX_BCK_OUT_DM_W<'a> { w: &'a mut W, } impl<'a> RX_BCK_OUT_DM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 20)) | (((value as u32) & 0x03) << 20); self.w } } #[doc = "Reader of field `RX_WS_OUT_DM`"] pub type RX_WS_OUT_DM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RX_WS_OUT_DM`"] pub struct RX_WS_OUT_DM_W<'a> { w: &'a mut W,
w: self } } #[doc = "Bits 16:17"] #[inline(always)] pub fn rx_ws_out_dm(&mut self) -> RX_WS_OUT_DM_W { RX_WS_OUT_DM_W { w: self } } #[doc = "Bits 0:1"] #[inline(always)] pub fn rx_sd_in_dm(&mut self) -> RX_SD_IN_DM_W { RX_SD_IN_DM_W { w: self } } }
} impl<'a> RX_WS_OUT_DM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 16)) | (((value as u32) & 0x03) << 16); self.w } } #[doc = "Reader of field `RX_SD_IN_DM`"] pub type RX_SD_IN_DM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RX_SD_IN_DM`"] pub struct RX_SD_IN_DM_W<'a> { w: &'a mut W, } impl<'a> RX_SD_IN_DM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03); self.w } } impl R { #[doc = "Bits 28:29"] #[inline(always)] pub fn rx_bck_in_dm(&self) -> RX_BCK_IN_DM_R { RX_BCK_IN_DM_R::new(((self.bits >> 28) & 0x03) as u8) } #[doc = "Bits 24:25"] #[inline(always)] pub fn rx_ws_in_dm(&self) -> RX_WS_IN_DM_R { RX_WS_IN_DM_R::new(((self.bits >> 24) & 0x03) as u8) } #[doc = "Bits 20:21"] #[inline(always)] pub fn rx_bck_out_dm(&self) -> RX_BCK_OUT_DM_R { RX_BCK_OUT_DM_R::new(((self.bits >> 20) & 0x03) as u8) } #[doc = "Bits 16:17"] #[inline(always)] pub fn rx_ws_out_dm(&self) -> RX_WS_OUT_DM_R { RX_WS_OUT_DM_R::new(((self.bits >> 16) & 0x03) as u8) } #[doc = "Bits 0:1"] #[inline(always)] pub fn rx_sd_in_dm(&self) -> RX_SD_IN_DM_R { RX_SD_IN_DM_R::new((self.bits & 0x03) as u8) } } impl W { #[doc = "Bits 28:29"] #[inline(always)] pub fn rx_bck_in_dm(&mut self) -> RX_BCK_IN_DM_W { RX_BCK_IN_DM_W { w: self } } #[doc = "Bits 24:25"] #[inline(always)] pub fn rx_ws_in_dm(&mut self) -> RX_WS_IN_DM_W { RX_WS_IN_DM_W { w: self } } #[doc = "Bits 20:21"] #[inline(always)] pub fn rx_bck_out_dm(&mut self) -> RX_BCK_OUT_DM_W { RX_BCK_OUT_DM_W {
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 0, "score": 153083.51901650874 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 1, "score": 58583.6630925218 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 2, "score": 53767.823776449186 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 3, "score": 53756.44927678981 }, { "content": "#[doc = \"Reader of register RESET_STATE\"]\n\npub type R = crate::R<u32, super::RESET_STATE>;\n\n#[doc = \"Writer for register RESET_STATE\"]\n\npub type W = crate::W<u32, super::RESET_STATE>;\n\n#[doc = \"Register RESET_STATE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RESET_STATE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DRESET_MASK_PROCPU`\"]\n\npub type DRESET_MASK_PROCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DRESET_MASK_PROCPU`\"]\n\npub struct DRESET_MASK_PROCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DRESET_MASK_PROCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 4, "score": 52553.76983904776 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RESET_CAUSE_APPCPU`\"]\n\npub type RESET_CAUSE_APPCPU_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `RESET_CAUSE_PROCPU`\"]\n\npub type RESET_CAUSE_PROCPU_R = crate::R<u8, u8>;\n\nimpl R {\n\n #[doc = \"Bit 25\"]\n\n #[inline(always)]\n\n pub fn dreset_mask_procpu(&self) -> DRESET_MASK_PROCPU_R {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 5, "score": 52545.85872540039 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `OCD_HALT_ON_RESET_APPCPU`\"]\n\npub type OCD_HALT_ON_RESET_APPCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OCD_HALT_ON_RESET_APPCPU`\"]\n\npub struct OCD_HALT_ON_RESET_APPCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OCD_HALT_ON_RESET_APPCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 6, "score": 52543.79243576746 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `STAT_VECTOR_SEL_APPCPU`\"]\n\npub type STAT_VECTOR_SEL_APPCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `STAT_VECTOR_SEL_APPCPU`\"]\n\npub struct STAT_VECTOR_SEL_APPCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> STAT_VECTOR_SEL_APPCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 7, "score": 52537.54650596362 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `JTAG_RESET_FLAG_APPCPU`\"]\n\npub type JTAG_RESET_FLAG_APPCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `JTAG_RESET_FLAG_PROCPU`\"]\n\npub type JTAG_RESET_FLAG_PROCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OCD_HALT_ON_RESET_PROCPU`\"]\n\npub type OCD_HALT_ON_RESET_PROCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OCD_HALT_ON_RESET_PROCPU`\"]\n\npub struct OCD_HALT_ON_RESET_PROCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OCD_HALT_ON_RESET_PROCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 8, "score": 52537.53629328451 }, { "content": "impl<'a> ALL_RESET_FLAG_CLR_APPCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ALL_RESET_FLAG_CLR_PROCPU`\"]\n\npub struct ALL_RESET_FLAG_CLR_PROCPU_W<'a> {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 9, "score": 52537.00239890746 }, { "content": "#[doc = \"Write proxy for field `JTAG_RESET_FLAG_CLR_APPCPU`\"]\n\npub struct JTAG_RESET_FLAG_CLR_APPCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> JTAG_RESET_FLAG_CLR_APPCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23);\n\n self.w\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 10, "score": 52536.60390921234 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DRESET_MASK_APPCPU`\"]\n\npub type DRESET_MASK_APPCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DRESET_MASK_APPCPU`\"]\n\npub struct DRESET_MASK_APPCPU_W<'a> {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 11, "score": 52534.930134467686 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ALL_RESET_FLAG_CLR_APPCPU`\"]\n\npub struct ALL_RESET_FLAG_CLR_APPCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 12, "score": 52534.5268334017 }, { "content": "#[doc = \"Reader of register LSTIMER2_VALUE\"]\n\npub type R = crate::R<u32, super::LSTIMER2_VALUE>;\n\n#[doc = \"Reader of field `LSTIMER2_CNT`\"]\n\npub type LSTIMER2_CNT_R = crate::R<u16, u16>;\n\nimpl R {\n\n #[doc = \"Bits 0:13\"]\n\n #[inline(always)]\n\n pub fn lstimer2_cnt(&self) -> LSTIMER2_CNT_R {\n\n LSTIMER2_CNT_R::new((self.bits & 0x3fff) as u16)\n\n }\n\n}\n", "file_path": "src/ledc/lstimer2_value.rs", "rank": 13, "score": 52533.81184300065 }, { "content": "#[doc = \"Reader of register LSTIMER0_VALUE\"]\n\npub type R = crate::R<u32, super::LSTIMER0_VALUE>;\n\n#[doc = \"Reader of field `LSTIMER0_CNT`\"]\n\npub type LSTIMER0_CNT_R = crate::R<u16, u16>;\n\nimpl R {\n\n #[doc = \"Bits 0:13\"]\n\n #[inline(always)]\n\n pub fn lstimer0_cnt(&self) -> LSTIMER0_CNT_R {\n\n LSTIMER0_CNT_R::new((self.bits & 0x3fff) as u16)\n\n }\n\n}\n", "file_path": "src/ledc/lstimer0_value.rs", "rank": 14, "score": 52533.81184300065 }, { "content": "#[doc = \"Reader of register LSTIMER3_VALUE\"]\n\npub type R = crate::R<u32, super::LSTIMER3_VALUE>;\n\n#[doc = \"Reader of field `LSTIMER3_CNT`\"]\n\npub type LSTIMER3_CNT_R = crate::R<u16, u16>;\n\nimpl R {\n\n #[doc = \"Bits 0:13\"]\n\n #[inline(always)]\n\n pub fn lstimer3_cnt(&self) -> LSTIMER3_CNT_R {\n\n LSTIMER3_CNT_R::new((self.bits & 0x3fff) as u16)\n\n }\n\n}\n", "file_path": "src/ledc/lstimer3_value.rs", "rank": 15, "score": 52533.81184300065 }, { "content": "#[doc = \"Reader of register LSTIMER1_VALUE\"]\n\npub type R = crate::R<u32, super::LSTIMER1_VALUE>;\n\n#[doc = \"Reader of field `LSTIMER1_CNT`\"]\n\npub type LSTIMER1_CNT_R = crate::R<u16, u16>;\n\nimpl R {\n\n #[doc = \"Bits 0:13\"]\n\n #[inline(always)]\n\n pub fn lstimer1_cnt(&self) -> LSTIMER1_CNT_R {\n\n LSTIMER1_CNT_R::new((self.bits & 0x3fff) as u16)\n\n }\n\n}\n", "file_path": "src/ledc/lstimer1_value.rs", "rank": 16, "score": 52533.81184300065 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `JTAG_RESET_FLAG_CLR_PROCPU`\"]\n\npub struct JTAG_RESET_FLAG_CLR_PROCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> JTAG_RESET_FLAG_CLR_PROCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 17, "score": 52532.78007568858 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> ALL_RESET_FLAG_CLR_PROCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 18, "score": 52532.10544363614 }, { "content": "#[doc = \"Reader of field `ALL_RESET_FLAG_APPCPU`\"]\n\npub type ALL_RESET_FLAG_APPCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ALL_RESET_FLAG_PROCPU`\"]\n\npub type ALL_RESET_FLAG_PROCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `STAT_VECTOR_SEL_PROCPU`\"]\n\npub type STAT_VECTOR_SEL_PROCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `STAT_VECTOR_SEL_PROCPU`\"]\n\npub struct STAT_VECTOR_SEL_PROCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> STAT_VECTOR_SEL_PROCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 19, "score": 52531.37456772528 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> DRESET_MASK_APPCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 20, "score": 52528.04757901661 }, { "content": " STAT_VECTOR_SEL_PROCPU_R::new(((self.bits >> 13) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 12\"]\n\n #[inline(always)]\n\n pub fn stat_vector_sel_appcpu(&self) -> STAT_VECTOR_SEL_APPCPU_R {\n\n STAT_VECTOR_SEL_APPCPU_R::new(((self.bits >> 12) & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 6:11\"]\n\n #[inline(always)]\n\n pub fn reset_cause_appcpu(&self) -> RESET_CAUSE_APPCPU_R {\n\n RESET_CAUSE_APPCPU_R::new(((self.bits >> 6) & 0x3f) as u8)\n\n }\n\n #[doc = \"Bits 0:5\"]\n\n #[inline(always)]\n\n pub fn reset_cause_procpu(&self) -> RESET_CAUSE_PROCPU_R {\n\n RESET_CAUSE_PROCPU_R::new((self.bits & 0x3f) as u8)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 25\"]\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 21, "score": 52519.656623150506 }, { "content": " #[inline(always)]\n\n pub fn ocd_halt_on_reset_procpu(&mut self) -> OCD_HALT_ON_RESET_PROCPU_W {\n\n OCD_HALT_ON_RESET_PROCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 18\"]\n\n #[inline(always)]\n\n pub fn ocd_halt_on_reset_appcpu(&mut self) -> OCD_HALT_ON_RESET_APPCPU_W {\n\n OCD_HALT_ON_RESET_APPCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 17\"]\n\n #[inline(always)]\n\n pub fn all_reset_flag_clr_appcpu(&mut self) -> ALL_RESET_FLAG_CLR_APPCPU_W {\n\n ALL_RESET_FLAG_CLR_APPCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 16\"]\n\n #[inline(always)]\n\n pub fn all_reset_flag_clr_procpu(&mut self) -> ALL_RESET_FLAG_CLR_PROCPU_W {\n\n ALL_RESET_FLAG_CLR_PROCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 13\"]\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 22, "score": 52513.55006972652 }, { "content": " #[inline(always)]\n\n pub fn dreset_mask_procpu(&mut self) -> DRESET_MASK_PROCPU_W {\n\n DRESET_MASK_PROCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 24\"]\n\n #[inline(always)]\n\n pub fn dreset_mask_appcpu(&mut self) -> DRESET_MASK_APPCPU_W {\n\n DRESET_MASK_APPCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 23\"]\n\n #[inline(always)]\n\n pub fn jtag_reset_flag_clr_appcpu(&mut self) -> JTAG_RESET_FLAG_CLR_APPCPU_W {\n\n JTAG_RESET_FLAG_CLR_APPCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 22\"]\n\n #[inline(always)]\n\n pub fn jtag_reset_flag_clr_procpu(&mut self) -> JTAG_RESET_FLAG_CLR_PROCPU_W {\n\n JTAG_RESET_FLAG_CLR_PROCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 19\"]\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 23, "score": 52512.83079287005 }, { "content": " OCD_HALT_ON_RESET_PROCPU_R::new(((self.bits >> 19) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 18\"]\n\n #[inline(always)]\n\n pub fn ocd_halt_on_reset_appcpu(&self) -> OCD_HALT_ON_RESET_APPCPU_R {\n\n OCD_HALT_ON_RESET_APPCPU_R::new(((self.bits >> 18) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 15\"]\n\n #[inline(always)]\n\n pub fn all_reset_flag_appcpu(&self) -> ALL_RESET_FLAG_APPCPU_R {\n\n ALL_RESET_FLAG_APPCPU_R::new(((self.bits >> 15) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 14\"]\n\n #[inline(always)]\n\n pub fn all_reset_flag_procpu(&self) -> ALL_RESET_FLAG_PROCPU_R {\n\n ALL_RESET_FLAG_PROCPU_R::new(((self.bits >> 14) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 13\"]\n\n #[inline(always)]\n\n pub fn stat_vector_sel_procpu(&self) -> STAT_VECTOR_SEL_PROCPU_R {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 24, "score": 52508.77244111089 }, { "content": " DRESET_MASK_PROCPU_R::new(((self.bits >> 25) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 24\"]\n\n #[inline(always)]\n\n pub fn dreset_mask_appcpu(&self) -> DRESET_MASK_APPCPU_R {\n\n DRESET_MASK_APPCPU_R::new(((self.bits >> 24) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 21\"]\n\n #[inline(always)]\n\n pub fn jtag_reset_flag_appcpu(&self) -> JTAG_RESET_FLAG_APPCPU_R {\n\n JTAG_RESET_FLAG_APPCPU_R::new(((self.bits >> 21) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 20\"]\n\n #[inline(always)]\n\n pub fn jtag_reset_flag_procpu(&self) -> JTAG_RESET_FLAG_PROCPU_R {\n\n JTAG_RESET_FLAG_PROCPU_R::new(((self.bits >> 20) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 19\"]\n\n #[inline(always)]\n\n pub fn ocd_halt_on_reset_procpu(&self) -> OCD_HALT_ON_RESET_PROCPU_R {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 25, "score": 52508.47685936761 }, { "content": " #[inline(always)]\n\n pub fn stat_vector_sel_procpu(&mut self) -> STAT_VECTOR_SEL_PROCPU_W {\n\n STAT_VECTOR_SEL_PROCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 12\"]\n\n #[inline(always)]\n\n pub fn stat_vector_sel_appcpu(&mut self) -> STAT_VECTOR_SEL_APPCPU_W {\n\n STAT_VECTOR_SEL_APPCPU_W { w: self }\n\n }\n\n}\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 26, "score": 52504.01901501371 }, { "content": "#[doc = \"Reader of register INT_RAW\"]\n\npub type R = crate::R<u32, super::INT_RAW>;\n\n#[doc = \"Writer for register INT_RAW\"]\n\npub type W = crate::W<u32, super::INT_RAW>;\n\n#[doc = \"Register INT_RAW `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INT_RAW {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `WAKEUP_INT_RAW`\"]\n\npub type WAKEUP_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `WAKEUP_INT_RAW`\"]\n\npub struct WAKEUP_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WAKEUP_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uart/int_raw.rs", "rank": 27, "score": 52342.56391807873 }, { "content": "#[doc = \"Reader of register INT_RAW\"]\n\npub type R = crate::R<u32, super::INT_RAW>;\n\n#[doc = \"Writer for register INT_RAW\"]\n\npub type W = crate::W<u32, super::INT_RAW>;\n\n#[doc = \"Register INT_RAW `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INT_RAW {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `GENERAL_CALL_INT_RAW`\"]\n\npub type GENERAL_CALL_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `GENERAL_CALL_INT_RAW`\"]\n\npub struct GENERAL_CALL_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> GENERAL_CALL_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/i2c/int_raw.rs", "rank": 28, "score": 52341.26932567825 }, { "content": "#[doc = \"Reader of register INT_RAW\"]\n\npub type R = crate::R<u32, super::INT_RAW>;\n\n#[doc = \"Writer for register INT_RAW\"]\n\npub type W = crate::W<u32, super::INT_RAW>;\n\n#[doc = \"Register INT_RAW `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INT_RAW {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `APP_CTRL1_INT_RAW`\"]\n\npub type APP_CTRL1_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `APP_CTRL1_INT_RAW`\"]\n\npub struct APP_CTRL1_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> APP_CTRL1_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uhci/int_raw.rs", "rank": 29, "score": 52341.26932567825 }, { "content": "#[doc = \"Reader of register INT_RAW\"]\n\npub type R = crate::R<u32, super::INT_RAW>;\n\n#[doc = \"Writer for register INT_RAW\"]\n\npub type W = crate::W<u32, super::INT_RAW>;\n\n#[doc = \"Register INT_RAW `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INT_RAW {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CH1_TX_LOOP_INT_RAW`\"]\n\npub type CH1_TX_LOOP_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH1_TX_LOOP_INT_RAW`\"]\n\npub struct CH1_TX_LOOP_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH1_TX_LOOP_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rmt/int_raw.rs", "rank": 30, "score": 52340.02688219789 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TIME_OUT_INT_RAW`\"]\n\npub type TIME_OUT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TIME_OUT_INT_RAW`\"]\n\npub struct TIME_OUT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TIME_OUT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/i2c/int_raw.rs", "rank": 31, "score": 52324.910192425894 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXFIFO_FULL_INT_RAW`\"]\n\npub type RXFIFO_FULL_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_FULL_INT_RAW`\"]\n\npub struct RXFIFO_FULL_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_FULL_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/uart/int_raw.rs", "rank": 32, "score": 52323.83631708117 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH0_ERR_INT_RAW`\"]\n\npub type CH0_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH0_ERR_INT_RAW`\"]\n\npub struct CH0_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH0_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/rmt/int_raw.rs", "rank": 33, "score": 52323.83631708117 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SCL_ST_TO_INT_RAW`\"]\n\npub type SCL_ST_TO_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SCL_ST_TO_INT_RAW`\"]\n\npub struct SCL_ST_TO_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SCL_ST_TO_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/i2c/int_raw.rs", "rank": 34, "score": 52323.83631708117 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `END_DETECT_INT_RAW`\"]\n\npub type END_DETECT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `END_DETECT_INT_RAW`\"]\n\npub struct END_DETECT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> END_DETECT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/i2c/int_raw.rs", "rank": 35, "score": 52323.83631708117 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SW_XOFF_INT_RAW`\"]\n\npub type SW_XOFF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SW_XOFF_INT_RAW`\"]\n\npub struct SW_XOFF_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SW_XOFF_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/uart/int_raw.rs", "rank": 36, "score": 52323.83631708117 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DSR_CHG_INT_RAW`\"]\n\npub type DSR_CHG_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DSR_CHG_INT_RAW`\"]\n\npub struct DSR_CHG_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DSR_CHG_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/uart/int_raw.rs", "rank": 37, "score": 52323.83631708117 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TRANS_COMPLETE_INT_RAW`\"]\n\npub type TRANS_COMPLETE_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TRANS_COMPLETE_INT_RAW`\"]\n\npub struct TRANS_COMPLETE_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TRANS_COMPLETE_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/i2c/int_raw.rs", "rank": 38, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SW_XON_INT_RAW`\"]\n\npub type SW_XON_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SW_XON_INT_RAW`\"]\n\npub struct SW_XON_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SW_XON_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uart/int_raw.rs", "rank": 39, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXFIFO_OVF_INT_RAW`\"]\n\npub type RXFIFO_OVF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_OVF_INT_RAW`\"]\n\npub struct RXFIFO_OVF_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_OVF_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/i2c/int_raw.rs", "rank": 40, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXFIFO_OVF_INT_RAW`\"]\n\npub type RXFIFO_OVF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_OVF_INT_RAW`\"]\n\npub struct RXFIFO_OVF_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_OVF_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uart/int_raw.rs", "rank": 41, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TX_HUNG_INT_RAW`\"]\n\npub type TX_HUNG_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TX_HUNG_INT_RAW`\"]\n\npub struct TX_HUNG_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TX_HUNG_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uhci/int_raw.rs", "rank": 42, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXFIFO_UDF_INT_RAW`\"]\n\npub type RXFIFO_UDF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_UDF_INT_RAW`\"]\n\npub struct RXFIFO_UDF_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_UDF_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/i2c/int_raw.rs", "rank": 43, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TX_DONE_INT_RAW`\"]\n\npub type TX_DONE_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TX_DONE_INT_RAW`\"]\n\npub struct TX_DONE_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TX_DONE_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uart/int_raw.rs", "rank": 44, "score": 52323.32979761053 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RS485_PARITY_ERR_INT_RAW`\"]\n\npub type RS485_PARITY_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RS485_PARITY_ERR_INT_RAW`\"]\n\npub struct RS485_PARITY_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RS485_PARITY_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/uart/int_raw.rs", "rank": 45, "score": 52322.81130667641 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SEND_S_Q_INT_RAW`\"]\n\npub type SEND_S_Q_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SEND_S_Q_INT_RAW`\"]\n\npub struct SEND_S_Q_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SEND_S_Q_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/uhci/int_raw.rs", "rank": 46, "score": 52322.81130667641 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH3_RX_END_INT_RAW`\"]\n\npub type CH3_RX_END_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH3_RX_END_INT_RAW`\"]\n\npub struct CH3_RX_END_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH3_RX_END_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rmt/int_raw.rs", "rank": 47, "score": 52322.33667171628 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH1_TX_THR_EVENT_INT_RAW`\"]\n\npub type CH1_TX_THR_EVENT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH1_TX_THR_EVENT_INT_RAW`\"]\n\npub struct CH1_TX_THR_EVENT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH1_TX_THR_EVENT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/rmt/int_raw.rs", "rank": 48, "score": 52321.83153230491 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH0_TX_THR_EVENT_INT_RAW`\"]\n\npub type CH0_TX_THR_EVENT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH0_TX_THR_EVENT_INT_RAW`\"]\n\npub struct CH0_TX_THR_EVENT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH0_TX_THR_EVENT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rmt/int_raw.rs", "rank": 49, "score": 52321.38663386117 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SEND_A_Q_INT_RAW`\"]\n\npub type SEND_A_Q_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SEND_A_Q_INT_RAW`\"]\n\npub struct SEND_A_Q_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SEND_A_Q_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uhci/int_raw.rs", "rank": 50, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXFIFO_EMPTY_INT_RAW`\"]\n\npub type TXFIFO_EMPTY_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXFIFO_EMPTY_INT_RAW`\"]\n\npub struct TXFIFO_EMPTY_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXFIFO_EMPTY_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uart/int_raw.rs", "rank": 51, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `GLITCH_DET_INT_RAW`\"]\n\npub type GLITCH_DET_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `GLITCH_DET_INT_RAW`\"]\n\npub struct GLITCH_DET_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> GLITCH_DET_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uart/int_raw.rs", "rank": 52, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RX_START_INT_RAW`\"]\n\npub type RX_START_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RX_START_INT_RAW`\"]\n\npub struct RX_START_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RX_START_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uhci/int_raw.rs", "rank": 53, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH1_ERR_INT_RAW`\"]\n\npub type CH1_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH1_ERR_INT_RAW`\"]\n\npub struct CH1_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH1_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/rmt/int_raw.rs", "rank": 54, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TRANS_START_INT_RAW`\"]\n\npub type TRANS_START_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TRANS_START_INT_RAW`\"]\n\npub struct TRANS_START_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TRANS_START_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/i2c/int_raw.rs", "rank": 55, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CTS_CHG_INT_RAW`\"]\n\npub type CTS_CHG_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CTS_CHG_INT_RAW`\"]\n\npub struct CTS_CHG_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CTS_CHG_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uart/int_raw.rs", "rank": 56, "score": 52321.04353693204 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXFIFO_TOUT_INT_RAW`\"]\n\npub type RXFIFO_TOUT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_TOUT_INT_RAW`\"]\n\npub struct RXFIFO_TOUT_INT_RAW_W<'a> {\n", "file_path": "src/uart/int_raw.rs", "rank": 57, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH3_ERR_INT_RAW`\"]\n\npub type CH3_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH3_ERR_INT_RAW`\"]\n\npub struct CH3_ERR_INT_RAW_W<'a> {\n", "file_path": "src/rmt/int_raw.rs", "rank": 58, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SLAVE_STRETCH_INT_RAW`\"]\n\npub type SLAVE_STRETCH_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SLAVE_STRETCH_INT_RAW`\"]\n\npub struct SLAVE_STRETCH_INT_RAW_W<'a> {\n", "file_path": "src/i2c/int_raw.rs", "rank": 59, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `FRM_ERR_INT_RAW`\"]\n\npub type FRM_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `FRM_ERR_INT_RAW`\"]\n\npub struct FRM_ERR_INT_RAW_W<'a> {\n", "file_path": "src/uart/int_raw.rs", "rank": 60, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXFIFO_OVF_INT_RAW`\"]\n\npub type TXFIFO_OVF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXFIFO_OVF_INT_RAW`\"]\n\npub struct TXFIFO_OVF_INT_RAW_W<'a> {\n", "file_path": "src/i2c/int_raw.rs", "rank": 61, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXFIFO_WM_INT_RAW`\"]\n\npub type TXFIFO_WM_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXFIFO_WM_INT_RAW`\"]\n\npub struct TXFIFO_WM_INT_RAW_W<'a> {\n", "file_path": "src/i2c/int_raw.rs", "rank": 62, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RX_HUNG_INT_RAW`\"]\n\npub type RX_HUNG_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RX_HUNG_INT_RAW`\"]\n\npub struct RX_HUNG_INT_RAW_W<'a> {\n", "file_path": "src/uhci/int_raw.rs", "rank": 63, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `APP_CTRL0_INT_RAW`\"]\n\npub type APP_CTRL0_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `APP_CTRL0_INT_RAW`\"]\n\npub struct APP_CTRL0_INT_RAW_W<'a> {\n", "file_path": "src/uhci/int_raw.rs", "rank": 64, "score": 52320.202012592876 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RS485_FRM_ERR_INT_RAW`\"]\n\npub type RS485_FRM_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RS485_FRM_ERR_INT_RAW`\"]\n\npub struct RS485_FRM_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RS485_FRM_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uart/int_raw.rs", "rank": 65, "score": 52320.00871537642 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `BYTE_TRANS_DONE_INT_RAW`\"]\n\npub type BYTE_TRANS_DONE_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `BYTE_TRANS_DONE_INT_RAW`\"]\n\npub struct BYTE_TRANS_DONE_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BYTE_TRANS_DONE_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/i2c/int_raw.rs", "rank": 66, "score": 52320.00871537642 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SCL_MAIN_ST_TO_INT_RAW`\"]\n\npub type SCL_MAIN_ST_TO_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SCL_MAIN_ST_TO_INT_RAW`\"]\n\npub struct SCL_MAIN_ST_TO_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SCL_MAIN_ST_TO_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/i2c/int_raw.rs", "rank": 67, "score": 52320.00871537642 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH0_TX_END_INT_RAW`\"]\n\npub type CH0_TX_END_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH0_TX_END_INT_RAW`\"]\n\npub struct CH0_TX_END_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH0_TX_END_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/rmt/int_raw.rs", "rank": 68, "score": 52320.00871537642 }, { "content": "#[doc = \"Reader of field `NACK_INT_RAW`\"]\n\npub type NACK_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `NACK_INT_RAW`\"]\n\npub struct NACK_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> NACK_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/i2c/int_raw.rs", "rank": 69, "score": 52319.78220853642 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `MST_TXFIFO_UDF_INT_RAW`\"]\n\npub type MST_TXFIFO_UDF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `MST_TXFIFO_UDF_INT_RAW`\"]\n\npub struct MST_TXFIFO_UDF_INT_RAW_W<'a> {\n", "file_path": "src/i2c/int_raw.rs", "rank": 70, "score": 52319.462191538325 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH0_TX_LOOP_INT_RAW`\"]\n\npub type CH0_TX_LOOP_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH0_TX_LOOP_INT_RAW`\"]\n\npub struct CH0_TX_LOOP_INT_RAW_W<'a> {\n", "file_path": "src/rmt/int_raw.rs", "rank": 71, "score": 52319.462191538325 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `AT_CMD_CHAR_DET_INT_RAW`\"]\n\npub type AT_CMD_CHAR_DET_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `AT_CMD_CHAR_DET_INT_RAW`\"]\n\npub struct AT_CMD_CHAR_DET_INT_RAW_W<'a> {\n", "file_path": "src/uart/int_raw.rs", "rank": 72, "score": 52319.462191538325 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH2_RX_END_INT_RAW`\"]\n\npub type CH2_RX_END_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH2_RX_END_INT_RAW`\"]\n\npub struct CH2_RX_END_INT_RAW_W<'a> {\n", "file_path": "src/rmt/int_raw.rs", "rank": 73, "score": 52319.462191538325 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH2_RX_THR_EVENT_INT_RAW`\"]\n\npub type CH2_RX_THR_EVENT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH2_RX_THR_EVENT_INT_RAW`\"]\n\npub struct CH2_RX_THR_EVENT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH2_RX_THR_EVENT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/rmt/int_raw.rs", "rank": 74, "score": 52319.02334423624 }, { "content": "#[doc = \"Reader of field `PARITY_ERR_INT_RAW`\"]\n\npub type PARITY_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `PARITY_ERR_INT_RAW`\"]\n\npub struct PARITY_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PARITY_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uart/int_raw.rs", "rank": 75, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `BRK_DET_INT_RAW`\"]\n\npub type BRK_DET_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `BRK_DET_INT_RAW`\"]\n\npub struct BRK_DET_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BRK_DET_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uart/int_raw.rs", "rank": 76, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `TX_START_INT_RAW`\"]\n\npub type TX_START_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TX_START_INT_RAW`\"]\n\npub struct TX_START_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TX_START_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uhci/int_raw.rs", "rank": 77, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `RXFIFO_WM_INT_RAW`\"]\n\npub type RXFIFO_WM_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_WM_INT_RAW`\"]\n\npub struct RXFIFO_WM_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_WM_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/i2c/int_raw.rs", "rank": 78, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `RS485_CLASH_INT_RAW`\"]\n\npub type RS485_CLASH_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RS485_CLASH_INT_RAW`\"]\n\npub struct RS485_CLASH_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RS485_CLASH_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uart/int_raw.rs", "rank": 79, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `CH2_ERR_INT_RAW`\"]\n\npub type CH2_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH2_ERR_INT_RAW`\"]\n\npub struct CH2_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH2_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/rmt/int_raw.rs", "rank": 80, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `DET_START_INT_RAW`\"]\n\npub type DET_START_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DET_START_INT_RAW`\"]\n\npub struct DET_START_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DET_START_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/i2c/int_raw.rs", "rank": 81, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `ARBITRATION_LOST_INT_RAW`\"]\n\npub type ARBITRATION_LOST_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ARBITRATION_LOST_INT_RAW`\"]\n\npub struct ARBITRATION_LOST_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ARBITRATION_LOST_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/i2c/int_raw.rs", "rank": 82, "score": 52318.89117799336 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TX_BRK_IDLE_DONE_INT_RAW`\"]\n\npub type TX_BRK_IDLE_DONE_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TX_BRK_IDLE_DONE_INT_RAW`\"]\n\npub struct TX_BRK_IDLE_DONE_INT_RAW_W<'a> {\n", "file_path": "src/uart/int_raw.rs", "rank": 83, "score": 52318.74772349692 }, { "content": "#[doc = \"Reader of field `CH1_TX_END_INT_RAW`\"]\n\npub type CH1_TX_END_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH1_TX_END_INT_RAW`\"]\n\npub struct CH1_TX_END_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH1_TX_END_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/rmt/int_raw.rs", "rank": 84, "score": 52318.03912017424 }, { "content": "#[doc = \"Reader of field `OUTLINK_EOF_ERR_INT_RAW`\"]\n\npub type OUTLINK_EOF_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OUTLINK_EOF_ERR_INT_RAW`\"]\n\npub struct OUTLINK_EOF_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OUTLINK_EOF_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uhci/int_raw.rs", "rank": 85, "score": 52318.03912017424 }, { "content": "#[doc = \"Reader of field `TX_BRK_DONE_INT_RAW`\"]\n\npub type TX_BRK_DONE_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TX_BRK_DONE_INT_RAW`\"]\n\npub struct TX_BRK_DONE_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TX_BRK_DONE_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uart/int_raw.rs", "rank": 86, "score": 52318.03912017424 }, { "content": "#[doc = \"Reader of field `CH3_RX_THR_EVENT_INT_RAW`\"]\n\npub type CH3_RX_THR_EVENT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH3_RX_THR_EVENT_INT_RAW`\"]\n\npub struct CH3_RX_THR_EVENT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH3_RX_THR_EVENT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/rmt/int_raw.rs", "rank": 87, "score": 52317.22318408221 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 19\"]\n\n #[inline(always)]\n\n pub fn wakeup_int_raw(&self) -> WAKEUP_INT_RAW_R {\n\n WAKEUP_INT_RAW_R::new(((self.bits >> 19) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 18\"]\n\n #[inline(always)]\n", "file_path": "src/uart/int_raw.rs", "rank": 88, "score": 52316.1623948121 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 8\"]\n\n #[inline(always)]\n\n pub fn app_ctrl1_int_raw(&self) -> APP_CTRL1_INT_RAW_R {\n\n APP_CTRL1_INT_RAW_R::new(((self.bits >> 8) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 7\"]\n\n #[inline(always)]\n\n pub fn app_ctrl0_int_raw(&self) -> APP_CTRL0_INT_RAW_R {\n\n APP_CTRL0_INT_RAW_R::new(((self.bits >> 7) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 6\"]\n", "file_path": "src/uhci/int_raw.rs", "rank": 89, "score": 52313.09253957422 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> SLAVE_STRETCH_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n", "file_path": "src/i2c/int_raw.rs", "rank": 90, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> TXFIFO_WM_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n", "file_path": "src/i2c/int_raw.rs", "rank": 91, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> RX_HUNG_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n", "file_path": "src/uhci/int_raw.rs", "rank": 92, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> APP_CTRL0_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n", "file_path": "src/uhci/int_raw.rs", "rank": 93, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_TOUT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n", "file_path": "src/uart/int_raw.rs", "rank": 94, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> TXFIFO_OVF_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n", "file_path": "src/i2c/int_raw.rs", "rank": 95, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> CH3_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n", "file_path": "src/rmt/int_raw.rs", "rank": 96, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> FRM_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n", "file_path": "src/uart/int_raw.rs", "rank": 97, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> CH2_RX_END_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n", "file_path": "src/rmt/int_raw.rs", "rank": 98, "score": 52312.20832489181 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> CH0_TX_LOOP_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n", "file_path": "src/rmt/int_raw.rs", "rank": 99, "score": 52312.20832489181 } ]
Rust
src/entity/manager.rs
Connicpu/conniecs
b914ff8a5ebc8214869f588aa21e2a11d695f306
use index_pool::IndexPool; use vec_map::VecMap; use std::collections::hash_map::HashMap; use std::marker::PhantomData; use std::mem; use crate::component::ComponentManager; use crate::entity::iter::{EntityIter, IndexedEntityIter}; use crate::entity::{BuildData, Entity, EntityBuilder, EntityData, Id, IndexedEntity}; use crate::services::ServiceManager; use crate::system::SystemManager; enum Event { BuildEntity(Entity), RemoveEntity(Entity), } pub struct EntityManager<C> where C: ComponentManager, { indices: IndexPool, indexed_entities: VecMap<IndexedEntity<C>>, entities: HashMap<Entity, IndexedEntity<C>>, event_queue: Vec<Event>, next_id: Id, } impl<C> EntityManager<C> where C: ComponentManager, { pub fn new() -> Self { EntityManager { indices: IndexPool::new(), indexed_entities: VecMap::new(), entities: HashMap::new(), event_queue: Vec::new(), next_id: 0, } } pub fn flush_queue<M, S>(&mut self, components: &mut C, services: &mut M, systems: &mut S) where M: ServiceManager, S: SystemManager<Components = C, Services = M>, { use self::Event::*; let mut queue = mem::replace(&mut self.event_queue, Vec::new()); for e in queue.drain(..) { match e { BuildEntity(entity) => { systems.activated(EntityData(self.indexed(entity)), components, services); } RemoveEntity(entity) => { systems.deactivated(EntityData(self.indexed(entity)), components, services); } } } self.event_queue = queue; } pub fn create_entity<B, M>( &mut self, builder: B, components: &mut C, services: &mut M, ) -> Entity where B: EntityBuilder<C, M>, M: ServiceManager, { let entity = self.create(); builder.build(BuildData(self.indexed(entity)), components, services); self.event_queue.push(Event::BuildEntity(entity)); entity } pub fn remove_entity(&mut self, entity: Entity) -> bool { if self.entities.contains_key(&entity) { self.event_queue.push(Event::RemoveEntity(entity)); true } else { false } } pub fn iter(&self) -> EntityIter<C> { EntityIter::Indexed(IndexedEntityIter { iter: self.indices.all_indices(), values: &self.indexed_entities, }) } pub fn count(&self) -> usize { self.indices.maximum() } pub fn indexed(&self, entity: Entity) -> &IndexedEntity<C> { &self.entities[&entity] } pub fn create(&mut self) -> Entity { self.next_id += 1; let entity = Entity { id: self.next_id }; let ie = IndexedEntity { index: self.indices.new_id(), entity, _marker: PhantomData, }; self.indexed_entities.insert(ie.index, ie.__clone()); self.entities.insert(entity, ie); entity } #[inline] pub fn is_valid(&self, entity: Entity) -> bool { self.entities.contains_key(&entity) } pub fn remove(&mut self, entity: Entity) { self.entities .remove(&entity) .map(|e| self.indices.return_id(e.index())); } pub fn clear(&mut self) { self.entities.clear(); self.indices = IndexPool::new(); } }
use index_pool::IndexPool; use vec_map::VecMap; use std::collections::hash_map::HashMap; use std::marker::PhantomData; use std::mem; use crate::component::ComponentManager; use crate::entity::iter::{EntityIter, IndexedEntityIter}; use crate::entity::{BuildData, Entity, EntityBuilder, EntityData, Id, IndexedEntity}; use crate::services::ServiceManager; use crate::system::SystemManager; enum Event { BuildEntity(Entity), RemoveEntity(Entity), } pub struct EntityManager<C> where C: ComponentManager, { indices: IndexPool, indexed_entities: VecMap<IndexedEntity<C>>, entities: HashMap<Entity, IndexedEntity<C>>, event_queue: Vec<Event>, next_id: Id, } impl<C> EntityManager<C> where C: ComponentManager, { pub fn new() -> Self { EntityManager { indices: IndexPool::new(), indexed_entities: VecMap::new(), entities: HashMap::new(), event_queue: Vec::new(), next_id: 0, } } pub fn flush_queue<M, S>(&mut self, components: &mut C, services: &mut M, systems: &mut S) where M: ServiceManager, S: SystemManager<Components = C, Services = M>, { use self::Event::*; let mut queue = mem::replace(&mut self.event_queue, Vec::new()); for e in queue.drain(..) { match e { BuildEntity(entity) => { systems.activated(EntityData(self.indexed(entity)), components, services); } RemoveEntity(entity) => { systems.deactivated(EntityData(self.indexed(entity)), components, services); } } } self.event_queue = queue; } pub fn create_entity<B, M>( &mut self, builder: B, components: &mut C, services: &mut M, ) -> Entity where B: EntityBuilder<C, M>, M: ServiceManager, { let entity = self.create(); builder.build(BuildData(self.indexed(entity)), components, services); self.event_queue.push(Event::BuildEntity(entity)); entity } pub fn remove_entity(&mut self, entity: Entity) -> bool {
} pub fn iter(&self) -> EntityIter<C> { EntityIter::Indexed(IndexedEntityIter { iter: self.indices.all_indices(), values: &self.indexed_entities, }) } pub fn count(&self) -> usize { self.indices.maximum() } pub fn indexed(&self, entity: Entity) -> &IndexedEntity<C> { &self.entities[&entity] } pub fn create(&mut self) -> Entity { self.next_id += 1; let entity = Entity { id: self.next_id }; let ie = IndexedEntity { index: self.indices.new_id(), entity, _marker: PhantomData, }; self.indexed_entities.insert(ie.index, ie.__clone()); self.entities.insert(entity, ie); entity } #[inline] pub fn is_valid(&self, entity: Entity) -> bool { self.entities.contains_key(&entity) } pub fn remove(&mut self, entity: Entity) { self.entities .remove(&entity) .map(|e| self.indices.return_id(e.index())); } pub fn clear(&mut self) { self.entities.clear(); self.indices = IndexPool::new(); } }
if self.entities.contains_key(&entity) { self.event_queue.push(Event::RemoveEntity(entity)); true } else { false }
if_condition
[ { "content": "fn activated(_: &mut IVSystem, _: EntityData, _: &Components, _: &mut Services) {\n\n ATOMIC_BOOP.store(true, std::sync::atomic::Ordering::SeqCst);\n\n}\n\n\n", "file_path": "tests/aspects.rs", "rank": 0, "score": 173722.56101871424 }, { "content": "pub trait EntityBuilder<C, M>\n\nwhere\n\n C: ComponentManager,\n\n M: ServiceManager,\n\n{\n\n fn build<'a>(self, entity: BuildData<'a, C>, components: &mut C, services: &mut M);\n\n}\n\n\n\nimpl<C, M> EntityBuilder<C, M> for ()\n\nwhere\n\n C: ComponentManager,\n\n M: ServiceManager,\n\n{\n\n fn build(self, _: BuildData<C>, _: &mut C, _: &mut M) {}\n\n}\n\n\n\nimpl<C, M, F> EntityBuilder<C, M> for F\n\nwhere\n\n C: ComponentManager,\n\n M: ServiceManager,\n\n F: FnOnce(BuildData<C>, &mut C, &mut M),\n\n{\n\n fn build(self, entity: BuildData<C>, components: &mut C, services: &mut M) {\n\n self(entity, components, services)\n\n }\n\n}\n\n\n", "file_path": "src/entity/builder.rs", "rank": 1, "score": 149730.37438659018 }, { "content": "fn eprocess(_: &mut ESystem, entities: EntityIter, data: &mut DataHelper) {\n\n for entity in entities {\n\n data.components.foo[entity].push_str(\"ghjkl\");\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, System)]\n\n#[system_type(Interact)]\n\n#[process = \"iprocess\"]\n\n#[aspect_a(all(bar), none(baz))]\n\n#[aspect_b(all(baz), none(bar))]\n\npub struct ISystem;\n\n\n", "file_path": "tests/aspects.rs", "rank": 2, "score": 144772.27767466046 }, { "content": "pub trait EntityModifier<C, M>\n\nwhere\n\n C: ComponentManager,\n\n M: ServiceManager,\n\n{\n\n fn modify<'a>(self, entity: ModifyData<'a, C>, components: &mut C, services: &mut M);\n\n}\n\n\n\nimpl<C, M> EntityModifier<C, M> for ()\n\nwhere\n\n C: ComponentManager,\n\n M: ServiceManager,\n\n{\n\n fn modify(self, _: ModifyData<C>, _: &mut C, _: &mut M) {}\n\n}\n\n\n\nimpl<C, M, F> EntityModifier<C, M> for F\n\nwhere\n\n C: ComponentManager,\n\n M: ServiceManager,\n\n F: FnOnce(ModifyData<C>, &mut C, &mut M),\n\n{\n\n fn modify(self, entity: ModifyData<C>, components: &mut C, services: &mut M) {\n\n self(entity, components, services)\n\n }\n\n}\n", "file_path": "src/entity/builder.rs", "rank": 3, "score": 140801.4132852698 }, { "content": "/// This trait is implemented automatically when you `#[derive(System)]` with the following:\n\n///\n\n/// ```\n\n/// # #[macro_use] extern crate conniecs_derive; extern crate conniecs;\n\n/// # use conniecs::{EntityIter, DataHelper};\n\n/// # #[derive(ComponentManager)] struct Components {\n\n/// # #[hot] pub foo: conniecs::ComponentList<Components, ()>, }\n\n/// #[derive(System, Default)]\n\n/// #[system_type(entity)]\n\n/// #[aspect = \"some::aspect::UnitStruct\"]\n\n/// #[process(process)]\n\n/// struct MySystem;\n\n/// # fn process(_: &mut MySystem, _: EntityIter<Components>, _:\n\n/// # &mut DataHelper<Components, Services>) {}\n\n/// # mod some { pub mod aspect { #[derive(Aspect, Copy, Clone)]\n\n/// # #[aspect(all(foo))] pub struct UnitStruct; } }\n\n/// # #[derive(ServiceManager, Default)] struct Services {}\n\n/// # #[derive(SystemManager)] struct Systems {\n\n/// # #[passive] sys: conniecs::system::EntitySystem<MySystem> }\n\n/// # fn main() { conniecs::World::<Systems>::new(); }\n\n/// ```\n\n///\n\n/// or\n\n///\n\n/// ```\n\n/// # #[macro_use] extern crate conniecs_derive; extern crate conniecs;\n\n/// # use conniecs::{EntityIter};\n\n/// # #[derive(ComponentManager)] struct Components { #[hot] pub foo:\n\n/// # conniecs::ComponentList<Components, String>, }\n\n/// #[derive(System, Default)]\n\n/// #[system_type(entity)]\n\n/// #[aspect(all(foo))]\n\n/// #[process = \"process\"]\n\n/// struct MySystem;\n\n///\n\n/// type DataHelper = conniecs::DataHelper<Components, Services>;\n\n/// fn process(_: &mut MySystem, entities: EntityIter<Components>, data: &mut DataHelper) {\n\n/// for entity in entities {\n\n/// println!(\"boop the {}\", &data.components.foo[entity]);\n\n/// }\n\n/// }\n\n/// # #[derive(ServiceManager, Default)] struct Services {}\n\n/// # #[derive(SystemManager)] struct Systems {\n\n/// # #[passive] sys: conniecs::system::EntitySystem<MySystem> }\n\n/// # fn main() { conniecs::World::<Systems>::new(); }\n\n/// ```\n\npub trait FilteredEntitySystem: System {\n\n fn create_aspect() -> Aspect<Self::Components>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct EntitySystem<T>\n\nwhere\n\n T: EntityProcess,\n\n{\n\n pub inner: T,\n\n pub watcher: Watcher<T::Components>,\n\n}\n\n\n\nimpl<T> Deref for EntitySystem<T>\n\nwhere\n\n T: EntityProcess,\n\n{\n\n type Target = T;\n\n fn deref(&self) -> &T {\n\n &self.inner\n", "file_path": "src/system/entity.rs", "rank": 5, "score": 128674.9644513795 }, { "content": "pub trait EditData<C: ComponentManager> {\n\n #[doc(hidden)]\n\n fn entity(&self) -> &IndexedEntity<C>;\n\n #[doc(hidden)]\n\n fn can_insert_components() -> bool;\n\n}\n\n\n\nimpl<'a, C: ComponentManager> EditData<C> for ModifyData<'a, C> {\n\n #[doc(hidden)]\n\n #[inline]\n\n fn entity(&self) -> &IndexedEntity<C> {\n\n self.0\n\n }\n\n\n\n #[doc(hidden)]\n\n #[inline]\n\n fn can_insert_components() -> bool {\n\n true\n\n }\n\n}\n", "file_path": "src/entity/data.rs", "rank": 6, "score": 128055.03335763628 }, { "content": "fn iprocess(_: &mut ISystem, bars: EntityIter, bazes: EntityIter, data: &mut DataHelper) {\n\n for bar_entity in bars {\n\n for baz_entity in bazes.clone() {\n\n data.components.baz[baz_entity][0] += data.components.bar[bar_entity];\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, System)]\n\n#[system_type(Interval)]\n\n#[process = \"interval_process\"]\n\n#[activated = \"activated\"]\n\n#[interval = \"3\"]\n\npub struct IVSystem {\n\n pub booped: bool,\n\n}\n\n\n", "file_path": "tests/aspects.rs", "rank": 7, "score": 117024.93631364533 }, { "content": "pub trait EntityProcess: FilteredEntitySystem {\n\n fn process<'a>(\n\n &mut self,\n\n entities: EntityIter<'a, Self::Components>,\n\n data: &mut DataHelper<Self::Components, Self::Services>,\n\n );\n\n}\n\n\n", "file_path": "src/system/entity.rs", "rank": 8, "score": 104954.29049761867 }, { "content": "fn interval_process(iv: &mut IVSystem, _: &mut DataHelper) {\n\n iv.booped = !iv.booped;\n\n}\n\n\n\nstatic ATOMIC_BOOP: std::sync::atomic::AtomicBool = std::sync::atomic::ATOMIC_BOOL_INIT;\n\n\n", "file_path": "tests/aspects.rs", "rank": 9, "score": 104759.24728308315 }, { "content": "pub fn read_data_items<'a>(\n\n mut items: impl Iterator<Item = &'a NestedMeta>,\n\n) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) {\n\n let (item0, item1) = match (items.next(), items.next(), items.next()) {\n\n (Some(item0), Some(item1), None) => (item0, item1),\n\n _ => improper_data_fmt(),\n\n };\n\n\n\n let comps = match item0 {\n\n NestedMeta::Literal(Lit::Str(path)) => path.value(),\n\n NestedMeta::Meta(Meta::Word(word)) => word.to_string(),\n\n NestedMeta::Meta(Meta::NameValue(MetaNameValue {\n\n ident,\n\n lit: Lit::Str(path),\n\n ..\n\n })) if ident == \"components\" => path.value(),\n\n _ => improper_data_fmt(),\n\n };\n\n\n\n let servs = match item1 {\n", "file_path": "conniecs-derive/src/system.rs", "rank": 10, "score": 103335.42093760139 }, { "content": "pub trait AspectFilter<C: ComponentManager>: 'static {\n\n fn check<'a>(&self, entity: EntityData<'a, C>, components: &C) -> bool;\n\n}\n\n\n\nimpl<F, C> AspectFilter<C> for F\n\nwhere\n\n C: ComponentManager,\n\n F: Fn(EntityData<C>, &C) -> bool + 'static,\n\n{\n\n #[inline]\n\n fn check<'a>(&self, entity: EntityData<'a, C>, components: &C) -> bool {\n\n (*self)(entity, components)\n\n }\n\n}\n\n\n", "file_path": "src/aspect.rs", "rank": 11, "score": 103098.15760177544 }, { "content": "fn is_passive(attrs: &[Attribute]) -> bool {\n\n for attr in attrs {\n\n let meta = attr.parse_meta().unwrap();\n\n if meta.name() == \"passive\" {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n", "file_path": "conniecs-derive/src/systems.rs", "rank": 12, "score": 103084.62574952564 }, { "content": "pub trait System {\n\n type Components: ComponentManager;\n\n type Services: ServiceManager;\n\n\n\n fn build_system() -> Self;\n\n\n\n #[inline]\n\n fn activated(\n\n &mut self,\n\n entity: EntityData<Self::Components>,\n\n components: &Self::Components,\n\n services: &mut Self::Services,\n\n ) {\n\n let (_, _, _) = (entity, components, services);\n\n }\n\n\n\n #[inline]\n\n fn reactivated(\n\n &mut self,\n\n entity: EntityData<Self::Components>,\n", "file_path": "src/system/mod.rs", "rank": 13, "score": 102998.58634926792 }, { "content": "pub trait SystemInterval: System {\n\n fn create_interval() -> TickerState;\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct IntervalSystem<T>\n\nwhere\n\n T: SystemInterval,\n\n{\n\n pub inner: T,\n\n pub ticker: TickerState,\n\n}\n\n\n\nimpl<T> Deref for IntervalSystem<T>\n\nwhere\n\n T: SystemInterval,\n\n{\n\n type Target = T;\n\n fn deref(&self) -> &T {\n\n &self.inner\n", "file_path": "src/system/interval.rs", "rank": 14, "score": 98823.3750660349 }, { "content": "pub trait InteractSystemFilter: System {\n\n fn create_filter_a() -> Aspect<Self::Components>;\n\n fn create_filter_b() -> Aspect<Self::Components>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct InteractSystem<T>\n\nwhere\n\n T: InteractProcess,\n\n{\n\n pub inner: T,\n\n pub watcher_a: Watcher<T::Components>,\n\n pub watcher_b: Watcher<T::Components>,\n\n}\n\n\n\nimpl<T> Deref for InteractSystem<T>\n\nwhere\n\n T: InteractProcess,\n\n{\n\n type Target = T;\n", "file_path": "src/system/interact.rs", "rank": 15, "score": 97138.98978068968 }, { "content": "pub trait Process: System {\n\n fn process(&mut self, data: &mut DataHelper<Self::Components, Self::Services>);\n\n}\n\n\n", "file_path": "src/system/mod.rs", "rank": 16, "score": 96840.81473837285 }, { "content": "#[test]\n\npub fn simulate() {\n\n let mut world = conniecs::World::<Systems>::new();\n\n assert_eq!(world.systems.ivsystem.booped, false);\n\n assert_eq!(ATOMIC_BOOP.load(std::sync::atomic::Ordering::SeqCst), false);\n\n\n\n let asdf = world.data.create_entity(|e, c, _| {\n\n // We need a foo!\n\n c.foo.add(e, \"asdf\".to_string());\n\n });\n\n\n\n let bar = world.data.create_entity(|e, c, _| {\n\n // We need a bar\n\n c.bar.add(e, 0.25);\n\n });\n\n let _bar1 = world.data.create_entity(|e, c, _| {\n\n // We need another bar\n\n c.bar.add(e, 0.25);\n\n });\n\n\n\n let baz = world.data.create_entity(|e, c, _| {\n", "file_path": "tests/aspects.rs", "rank": 17, "score": 95164.16475673037 }, { "content": "#[proc_macro_derive(ServiceManager)]\n\npub fn derive_services(input: TokenStream) -> TokenStream {\n\n // Parse the string representation\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n\n\n // Build the impl\n\n let result = services::impl_services(ast);\n\n\n\n // Return the generated impl\n\n result.into()\n\n}\n\n\n\n#[proc_macro_derive(\n\n System,\n\n attributes(\n\n data,\n\n system_type,\n\n process,\n\n aspect,\n\n aspect_a,\n\n aspect_b,\n\n interval,\n\n timed_interval,\n\n activated,\n\n reactivated,\n\n deactivated\n\n )\n\n)]\n", "file_path": "conniecs-derive/src/lib.rs", "rank": 18, "score": 91208.66804811859 }, { "content": "#[proc_macro_derive(ComponentManager, attributes(hot, cold))]\n\npub fn derive_components(input: TokenStream) -> TokenStream {\n\n // Parse the string representation\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n\n\n // Build the impl\n\n let result = components::impl_components(ast);\n\n\n\n // Return the generated impl\n\n result.into()\n\n}\n\n\n", "file_path": "conniecs-derive/src/lib.rs", "rank": 19, "score": 90828.21923434854 }, { "content": "pub fn impl_services(ast: syn::DeriveInput) -> proc_macro2::TokenStream {\n\n let name = ast.ident;\n\n\n\n quote!{\n\n impl ::conniecs::services::ServiceManager for #name {\n\n #[doc(hidden)]\n\n fn __please_use_the_derive_attribute() {}\n\n }\n\n }\n\n}\n", "file_path": "conniecs-derive/src/services.rs", "rank": 20, "score": 90227.76621705634 }, { "content": "pub fn impl_components(ast: syn::DeriveInput) -> proc_macro2::TokenStream {\n\n if ast.generics != Default::default() {\n\n panic!(\"There may not be generics attached to the Components struct\");\n\n }\n\n\n\n let name = ast.ident;\n\n let fields = match &ast.data {\n\n Data::Struct(data) => match &data.fields {\n\n Fields::Named(fields) => Some(&fields.named),\n\n Fields::Unit => None,\n\n Fields::Unnamed(_) => {\n\n panic!(\"Components may not be represented by a tuple struct.\");\n\n }\n\n },\n\n Data::Union(_) => {\n\n panic!(\"Components may not be represented by a union. Structs only.\");\n\n }\n\n Data::Enum(_) => {\n\n panic!(\"Components may not be represented by an enum. Structs only.\");\n\n }\n", "file_path": "conniecs-derive/src/components.rs", "rank": 21, "score": 89720.5495584287 }, { "content": "fn panicker_update(_: &mut Panicker, _: &mut DataHelper) {\n\n panic!(\"this shouldn't get called\");\n\n}\n\n\n\n#[derive(Debug, Default, System)]\n\n#[system_type(Entity)]\n\n#[process = \"eprocess\"]\n\n#[aspect(all(foo), none(qux))]\n\npub struct ESystem;\n\n\n", "file_path": "tests/aspects.rs", "rank": 22, "score": 89545.712135905 }, { "content": "#[proc_macro_derive(SystemManager, attributes(data, passive))]\n\npub fn derive_systems(input: TokenStream) -> TokenStream {\n\n // Parse the string representation\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n\n\n // Build the impl\n\n let result = systems::impl_systems(ast);\n\n\n\n // Return the generated impl\n\n result.into()\n\n}\n\n\n", "file_path": "conniecs-derive/src/lib.rs", "rank": 23, "score": 88461.00995124456 }, { "content": "pub fn derive_system(input: TokenStream) -> TokenStream {\n\n // Parse the string representation\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n\n\n // Build the impl\n\n let result = system::impl_system(ast);\n\n\n\n // Return the generated impl\n\n result.into()\n\n}\n\n\n", "file_path": "conniecs-derive/src/lib.rs", "rank": 24, "score": 88458.30032049588 }, { "content": "pub fn quote_aspect(\n\n ty: &Ident,\n\n cty: &proc_macro2::TokenStream,\n\n all_filters: &[Ident],\n\n none_filters: &[Ident],\n\n) -> proc_macro2::TokenStream {\n\n quote! {\n\n impl ::conniecs::aspect::AspectFilter<#cty> for #ty {\n\n fn check<'a>(&self, entity: ::conniecs::EntityData<'a, #cty >, components: & #cty ) -> bool {\n\n #(\n\n if !components.#all_filters.has(entity) {\n\n return false;\n\n }\n\n )*\n\n #(\n\n if components.#none_filters.has(entity) {\n\n return false;\n\n }\n\n )*\n\n true\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/aspect.rs", "rank": 25, "score": 88232.96993270525 }, { "content": "fn process_update(_: &mut Update, data: &mut DataHelper) {\n\n for entity in data.entities() {\n\n if data.components.foo.has(entity) {\n\n println!(\"{}\", data.components.foo[entity]);\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, System)]\n\n#[process(panicker_update)]\n\npub struct Panicker;\n\n\n", "file_path": "tests/aspects.rs", "rank": 26, "score": 87891.5438859821 }, { "content": "pub fn impl_systems(ast: syn::DeriveInput) -> proc_macro2::TokenStream {\n\n let name = &ast.ident;\n\n let mut cs_data = None;\n\n\n\n for attr in &ast.attrs {\n\n let meta = attr.parse_meta().unwrap();\n\n match meta.name().to_string().as_str() {\n\n \"data\" => cs_data = Some(read_data(&meta)),\n\n _ => (),\n\n }\n\n }\n\n\n\n let fields = match &ast.data {\n\n Data::Struct(data) => match &data.fields {\n\n Fields::Named(fields) => Some(&fields.named),\n\n Fields::Unit => None,\n\n Fields::Unnamed(_) => {\n\n panic!(\"Components may not be represented by a tuple struct.\")\n\n }\n\n }\n", "file_path": "conniecs-derive/src/systems.rs", "rank": 27, "score": 86559.36300630885 }, { "content": "pub fn impl_system(ast: syn::DeriveInput) -> proc_macro2::TokenStream {\n\n let mut kind = SystemType::Basic;\n\n\n\n for attr in &ast.attrs {\n\n let meta = attr.parse_meta().unwrap();\n\n match meta.name().to_string().as_str() {\n\n \"system_type\" => kind = read_systy(&meta),\n\n _ => (),\n\n }\n\n }\n\n\n\n match kind {\n\n SystemType::Basic => impl_basic_system(&ast),\n\n SystemType::Entity => impl_entity_system(&ast),\n\n SystemType::Lazy => impl_lazy_system(&ast),\n\n SystemType::Interval => impl_interval_system(&ast),\n\n SystemType::Interact => impl_interact_system(&ast),\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 28, "score": 86559.36300630885 }, { "content": "pub fn read_aspect<'a>(\n\n items: impl IntoIterator<Item = &'a NestedMeta>,\n\n all: &mut Vec<Ident>,\n\n none: &mut Vec<Ident>,\n\n) {\n\n for item in items {\n\n let item = unwrap_meta(item);\n\n let items = unwrap_list(item);\n\n match item.name().to_string().as_str() {\n\n \"all\" => {\n\n for item in items {\n\n let item = unwrap_meta(item);\n\n let component = unwrap_word(item);\n\n all.push(component.clone());\n\n }\n\n }\n\n \"none\" => {\n\n for item in items {\n\n let item = unwrap_meta(item);\n\n let component = unwrap_word(item);\n\n none.push(component.clone());\n\n }\n\n }\n\n _ => improper_format(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/aspect.rs", "rank": 29, "score": 84607.61276766294 }, { "content": "enum SystemType {\n\n Basic,\n\n Entity,\n\n Lazy,\n\n Interval,\n\n Interact,\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 30, "score": 82817.91342342037 }, { "content": "pub fn read_aspect_meta<'a>(\n\n attr: &'a Meta,\n\n all: &mut Vec<Ident>,\n\n none: &mut Vec<Ident>,\n\n) -> Option<proc_macro2::TokenStream> {\n\n match attr {\n\n Meta::List(list) => {\n\n read_aspect(list.nested.iter(), all, none);\n\n None\n\n }\n\n Meta::NameValue(MetaNameValue {\n\n lit: Lit::Str(path),\n\n ..\n\n }) => Some(quote_path(&path.value())),\n\n _ => improper_format(),\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/aspect.rs", "rank": 31, "score": 82672.37799603821 }, { "content": "/// Marks types which are suitable for being components. It is implemented for all\n\n/// types which are `'static`.\n\npub trait Component: 'static {}\n\nimpl<T: 'static> Component for T {}\n\n\n", "file_path": "src/component.rs", "rank": 32, "score": 81323.40175766646 }, { "content": "type EntityData<'a> = conniecs::EntityData<'a, Components>;\n\n\n\n#[derive(Aspect)]\n\n#[aspect(all(foo, bar))]\n\npub struct FooBarAspect;\n\n\n\n#[derive(Debug, Default, ServiceManager)]\n\npub struct Services;\n\n\n\n#[derive(Debug, ComponentManager)]\n\npub struct Components {\n\n #[cold]\n\n pub foo: Comps<String>,\n\n\n\n #[hot]\n\n pub bar: Comps<f32>,\n\n #[hot]\n\n pub baz: Comps<[f32; 3]>,\n\n\n\n #[cold]\n", "file_path": "tests/aspects.rs", "rank": 33, "score": 79793.31173080736 }, { "content": "pub trait ServiceManager: 'static {\n\n #[doc(hidden)]\n\n fn __please_use_the_derive_attribute();\n\n}\n\n\n\nimpl ServiceManager for () {\n\n #[doc(hidden)]\n\n fn __please_use_the_derive_attribute() {}\n\n}\n", "file_path": "src/services.rs", "rank": 34, "score": 79759.77470232907 }, { "content": "/// This is the trait implemented for your struct containing all of your\n\n/// component lists. You should not try to implement this manually. Use\n\n/// `#[derive(Components)]` instead. See the module documentation for more\n\n/// information.\n\npub trait ComponentManager: 'static {\n\n fn build_manager() -> Self;\n\n\n\n #[doc(hidden)]\n\n fn __wipe_all(&mut self);\n\n\n\n #[doc(hidden)]\n\n fn __please_use_the_derive_attribute();\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ComponentList<C, T>\n\nwhere\n\n C: ComponentManager,\n\n T: Component,\n\n{\n\n pub(crate) inner: InnerComponentList<T>,\n\n _marker: PhantomData<C>,\n\n}\n\n\n", "file_path": "src/component.rs", "rank": 35, "score": 79125.51502380779 }, { "content": "pub trait SystemManager {\n\n type Components: ComponentManager;\n\n type Services: ServiceManager;\n\n\n\n fn build_manager() -> Self;\n\n\n\n fn activated(\n\n &mut self,\n\n entity: EntityData<Self::Components>,\n\n components: &Self::Components,\n\n services: &mut Self::Services,\n\n );\n\n\n\n fn reactivated(\n\n &mut self,\n\n entity: EntityData<Self::Components>,\n\n components: &Self::Components,\n\n services: &mut Self::Services,\n\n );\n\n\n", "file_path": "src/system/mod.rs", "rank": 36, "score": 77061.21282814152 }, { "content": "pub fn read_data(item: &Meta) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) {\n\n match item {\n\n Meta::List(items) => read_data_items(items.nested.iter()),\n\n _ => improper_data_fmt(),\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 37, "score": 74159.29632420116 }, { "content": "fn impl_entity_system(ast: &syn::DeriveInput) -> proc_macro2::TokenStream {\n\n let name = &ast.ident;\n\n let mut cs_data = None;\n\n let mut init_func = None;\n\n let mut process_func = None;\n\n let mut aspect_all = vec![];\n\n let mut aspect_none = vec![];\n\n\n\n let aspect_id = Ident::new(&format!(\"{}EntityAspect\", name), Span::call_site());\n\n let mut aspect_path = None;\n\n\n\n for attr in &ast.attrs {\n\n let meta = attr.parse_meta().unwrap();\n\n match meta.name().to_string().as_str() {\n\n \"data\" => cs_data = Some(read_data(&meta)),\n\n \"init\" => init_func = Some(read_path_item(&meta, || improper_init_fmt())),\n\n \"process\" => process_func = Some(read_path_item(&meta, || improper_process_fmt())),\n\n \"aspect\" => aspect_path = read_aspect_meta(&meta, &mut aspect_all, &mut aspect_none),\n\n _ => (),\n\n }\n", "file_path": "conniecs-derive/src/system.rs", "rank": 38, "score": 72653.85847000366 }, { "content": "#[proc_macro_derive(Aspect, attributes(aspect, components))]\n\npub fn derive_aspect(input: TokenStream) -> TokenStream {\n\n // Parse the string representation\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n\n\n // Build the impl\n\n let result = aspect::impl_aspect(ast);\n\n\n\n //panic!(\"{}\", result);\n\n\n\n result.into()\n\n}\n\n\n", "file_path": "conniecs-derive/src/lib.rs", "rank": 39, "score": 70684.21195401519 }, { "content": "pub trait InteractProcess: InteractSystemFilter {\n\n fn process<'a>(\n\n &mut self,\n\n entities_a: EntityIter<'a, Self::Components>,\n\n entities_b: EntityIter<'a, Self::Components>,\n\n data: &mut DataHelper<Self::Components, Self::Services>,\n\n );\n\n}\n\n\n", "file_path": "src/system/interact.rs", "rank": 40, "score": 69716.6293217522 }, { "content": "fn improper_interval_fmt() -> ! {\n\n improper_attr_format(\"#[interval = ...]\", \"conniecs::system\");\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 41, "score": 65864.24787402073 }, { "content": "fn improper_process_fmt() -> ! {\n\n improper_attr_format(\"#[process(...)]\", \"conniecs::system\");\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 42, "score": 65864.24787402073 }, { "content": "fn improper_reactivated_fmt() -> ! {\n\n improper_attr_format(\"#[reactivated = ...]\", \"conniecs::system\");\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 43, "score": 65864.24787402073 }, { "content": "fn improper_deactivated_fmt() -> ! {\n\n improper_attr_format(\"#[deactivated = ...]\", \"conniecs::system\");\n\n}\n", "file_path": "conniecs-derive/src/system.rs", "rank": 44, "score": 65864.24787402073 }, { "content": "fn improper_activated_fmt() -> ! {\n\n improper_attr_format(\"#[activated = ...]\", \"conniecs::system\");\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 45, "score": 65864.24787402073 }, { "content": "fn improper_init_fmt() -> ! {\n\n improper_attr_format(\"#[init(...)]\", \"conniecs::system\");\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 46, "score": 65864.24787402073 }, { "content": "fn improper_systy_fmt() -> ! {\n\n improper_attr_format(\"#[system_type(...)]\", \"conniecs::system\");\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 47, "score": 65864.24787402073 }, { "content": "fn improper_data_fmt() -> ! {\n\n improper_attr_format(\"#[data(...)]\", \"conniecs::system\");\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 48, "score": 65864.24787402073 }, { "content": "fn parse_explicit_iv<'a>(\n\n mut items: impl Iterator<Item = &'a NestedMeta>,\n\n) -> proc_macro2::TokenStream {\n\n let item = items.next();\n\n if item.is_none() || items.next().is_some() {\n\n improper_interval_fmt();\n\n }\n\n\n\n match item.unwrap() {\n\n NestedMeta::Literal(Lit::Str(time)) => parse_iv_time(&time.value()),\n\n NestedMeta::Literal(Lit::Int(time)) => frame_iv(time.value()),\n\n NestedMeta::Meta(Meta::NameValue(mnv)) => {\n\n let iv = match &mnv.lit {\n\n Lit::Str(time) => parse_u64(&time.value()),\n\n Lit::Int(iv) => iv.value(),\n\n _ => improper_interval_fmt(),\n\n };\n\n let ns = match mnv.ident.to_string().as_str() {\n\n \"ticks\" => return frame_iv(iv),\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 49, "score": 63816.95845070253 }, { "content": "fn read_systy(attr: &Meta) -> SystemType {\n\n let systy = read_path_item(attr, || improper_systy_fmt());\n\n match &systy[..] {\n\n \"Basic\" | \"basic\" => SystemType::Basic,\n\n \"Entity\" | \"entity\" => SystemType::Entity,\n\n \"Lazy\" | \"lazy\" => SystemType::Lazy,\n\n \"Interval\" | \"interval\" => SystemType::Interval,\n\n \"Interact\" | \"interact\" => SystemType::Interact,\n\n _ => improper_systy_fmt(),\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 50, "score": 63416.73393206665 }, { "content": "pub fn impl_aspect(ast: syn::DeriveInput) -> proc_macro2::TokenStream {\n\n let ty = &ast.ident;\n\n let mut all_filters = vec![];\n\n let mut none_filters = vec![];\n\n let mut components_ty = None;\n\n\n\n for attr in &ast.attrs {\n\n let meta = attr.parse_meta().unwrap();\n\n\n\n match (attr.path.segments[0].ident.to_string().as_str(), &meta) {\n\n (\"components\", meta) => {\n\n let word = read_path_item(meta, || improper_comp_format());\n\n components_ty = Some(word);\n\n }\n\n (\"aspect\", Meta::List(list)) => {\n\n read_aspect(list.nested.iter(), &mut all_filters, &mut none_filters);\n\n }\n\n _ => continue,\n\n }\n\n }\n\n\n\n let cty = match components_ty {\n\n Some(ty) => quote_path(&ty),\n\n None => quote_path(\"crate::Components\"),\n\n };\n\n\n\n quote_aspect(ty, &cty, &all_filters, &none_filters)\n\n}\n\n\n", "file_path": "conniecs-derive/src/aspect.rs", "rank": 51, "score": 62824.10493988923 }, { "content": "use crate::component::ComponentManager;\n\nuse crate::entity::{BuildData, ModifyData};\n\nuse crate::services::ServiceManager;\n\n\n", "file_path": "src/entity/builder.rs", "rank": 52, "score": 58872.30554334964 }, { "content": "struct All;\n", "file_path": "src/aspect.rs", "rank": 53, "score": 58382.3852751766 }, { "content": "fn parse_u64(time: &str) -> u64 {\n\n time.parse().map_err(|_| improper_interval_fmt()).unwrap()\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 54, "score": 57218.71902429292 }, { "content": "fn field_info(field: &Field) -> (&Ident, Ident) {\n\n let kind_attr = field\n\n .attrs\n\n .iter()\n\n .filter_map(|a| a.parse_meta().ok())\n\n .filter(|m| m.name() == \"hot\" || m.name() == \"cold\")\n\n .nth(0);\n\n\n\n match kind_attr {\n\n Some(Meta::Word(kind)) => (field.ident.as_ref().unwrap(), kind),\n\n _ => panic!(\"All component lists must be marked with either #[hot] or #[cold]\"),\n\n }\n\n}\n", "file_path": "conniecs-derive/src/components.rs", "rank": 55, "score": 57013.80092252123 }, { "content": "struct None;\n\n\n\nimpl<C> AspectFilter<C> for All\n\nwhere\n\n C: ComponentManager,\n\n{\n\n #[inline]\n\n fn check<'a>(&self, _: EntityData<'a, C>, _: &C) -> bool {\n\n true\n\n }\n\n}\n\n\n\nimpl<C> AspectFilter<C> for None\n\nwhere\n\n C: ComponentManager,\n\n{\n\n #[inline]\n\n fn check<'a>(&self, _: EntityData<'a, C>, _: &C) -> bool {\n\n false\n\n }\n\n}\n", "file_path": "src/aspect.rs", "rank": 56, "score": 56767.607737633705 }, { "content": "type EntityIter<'a> = conniecs::EntityIter<'a, Components>;\n", "file_path": "tests/aspects.rs", "rank": 57, "score": 54929.330668913644 }, { "content": "fn impl_basic_system(ast: &syn::DeriveInput) -> proc_macro2::TokenStream {\n\n let name = &ast.ident;\n\n let mut cs_data = None;\n\n let mut init_func = None;\n\n let mut process_func = None;\n\n\n\n for attr in &ast.attrs {\n\n let meta = attr.parse_meta().unwrap();\n\n match meta.name().to_string().as_str() {\n\n \"data\" => cs_data = Some(read_data(&meta)),\n\n \"init\" => init_func = Some(read_path_item(&meta, || improper_init_fmt())),\n\n \"process\" => process_func = Some(read_path_item(&meta, || improper_process_fmt())),\n\n _ => (),\n\n }\n\n }\n\n\n\n let (components, services) = match cs_data {\n\n Some((c, s)) => (c, s),\n\n None => (quote_path(\"crate::Components\"), quote_path(\"crate::Services\")),\n\n };\n", "file_path": "conniecs-derive/src/system.rs", "rank": 58, "score": 54776.470619773725 }, { "content": "fn impl_lazy_system(ast: &syn::DeriveInput) -> proc_macro2::TokenStream {\n\n let name = &ast.ident;\n\n let mut cs_data = None;\n\n let mut process_func = None;\n\n\n\n for attr in &ast.attrs {\n\n let meta = attr.parse_meta().unwrap();\n\n match meta.name().to_string().as_str() {\n\n \"data\" => cs_data = Some(read_data(&meta)),\n\n \"process\" => process_func = Some(read_path_item(&meta, || improper_process_fmt())),\n\n _ => (),\n\n }\n\n }\n\n\n\n let (components, services) = match cs_data {\n\n Some((c, s)) => (c, s),\n\n None => (quote_path(\"crate::Components\"), quote_path(\"crate::Services\")),\n\n };\n\n\n\n let process = if let Some(proc_func) = process_func {\n", "file_path": "conniecs-derive/src/system.rs", "rank": 59, "score": 54776.470619773725 }, { "content": "fn impl_interval_system(ast: &syn::DeriveInput) -> proc_macro2::TokenStream {\n\n let name = &ast.ident;\n\n let mut cs_data = None;\n\n let mut init_func = None;\n\n let mut process_func = None;\n\n let mut interval = None;\n\n\n\n for attr in &ast.attrs {\n\n let meta = attr.parse_meta().unwrap();\n\n match meta.name().to_string().as_str() {\n\n \"data\" => cs_data = Some(read_data(&meta)),\n\n \"init\" => init_func = Some(read_path_item(&meta, || improper_init_fmt())),\n\n \"process\" => process_func = Some(read_path_item(&meta, || improper_process_fmt())),\n\n \"interval\" => interval = Some(parse_interval(&meta)),\n\n _ => (),\n\n }\n\n }\n\n\n\n let (components, services) = match cs_data {\n\n Some((c, s)) => (c, s),\n", "file_path": "conniecs-derive/src/system.rs", "rank": 60, "score": 54776.470619773725 }, { "content": "fn impl_interact_system(ast: &syn::DeriveInput) -> proc_macro2::TokenStream {\n\n let name = &ast.ident;\n\n let mut cs_data = None;\n\n let mut init_func = None;\n\n let mut process_func = None;\n\n let mut aspect_all_a = vec![];\n\n let mut aspect_none_a = vec![];\n\n let mut aspect_all_b = vec![];\n\n let mut aspect_none_b = vec![];\n\n\n\n let aspect_id_a = Ident::new(&format!(\"{}EntityAspectA\", name), Span::call_site());\n\n let mut aspect_path_a = None;\n\n let aspect_id_b = Ident::new(&format!(\"{}EntityAspectB\", name), Span::call_site());\n\n let mut aspect_path_b = None;\n\n\n\n for attr in &ast.attrs {\n\n let meta = attr.parse_meta().unwrap();\n\n match meta.name().to_string().as_str() {\n\n \"data\" => cs_data = Some(read_data(&meta)),\n\n \"init\" => init_func = Some(read_path_item(&meta, || improper_init_fmt())),\n", "file_path": "conniecs-derive/src/system.rs", "rank": 61, "score": 54776.470619773725 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<T> System for EntitySystem<T>\n\nwhere\n\n T: EntityProcess,\n\n{\n\n type Components = T::Components;\n\n type Services = T::Services;\n\n\n\n fn build_system() -> Self {\n\n EntitySystem::new()\n\n }\n\n\n\n fn activated(\n\n &mut self,\n\n entity: EntityData<T::Components>,\n\n components: &T::Components,\n\n services: &mut T::Services,\n", "file_path": "src/system/entity.rs", "rank": 62, "score": 54747.74391829918 }, { "content": " }\n\n}\n\n\n\nimpl<T> DerefMut for EntitySystem<T>\n\nwhere\n\n T: EntityProcess,\n\n{\n\n fn deref_mut(&mut self) -> &mut T {\n\n &mut self.inner\n\n }\n\n}\n\n\n\nimpl<T> EntitySystem<T>\n\nwhere\n\n T: EntityProcess,\n\n{\n\n pub fn new() -> EntitySystem<T> {\n\n EntitySystem {\n\n inner: T::build_system(),\n\n watcher: Watcher::new(T::create_aspect()),\n", "file_path": "src/system/entity.rs", "rank": 63, "score": 54742.905328109075 }, { "content": " ) {\n\n self.watcher\n\n .activated(entity, components, services, &mut self.inner);\n\n }\n\n\n\n fn reactivated(\n\n &mut self,\n\n entity: EntityData<T::Components>,\n\n components: &T::Components,\n\n services: &mut T::Services,\n\n ) {\n\n self.watcher\n\n .reactivated(entity, components, services, &mut self.inner);\n\n }\n\n\n\n fn deactivated(\n\n &mut self,\n\n entity: EntityData<T::Components>,\n\n components: &T::Components,\n\n services: &mut T::Services,\n", "file_path": "src/system/entity.rs", "rank": 64, "score": 54741.993639543354 }, { "content": " ) {\n\n self.watcher\n\n .deactivated(entity, components, services, &mut self.inner);\n\n }\n\n}\n\n\n\nimpl<T> Process for EntitySystem<T>\n\nwhere\n\n T: EntityProcess,\n\n{\n\n fn process(&mut self, data: &mut DataHelper<T::Components, T::Services>) {\n\n self.inner.process(self.watcher.iter(), data);\n\n }\n\n}\n", "file_path": "src/system/entity.rs", "rank": 65, "score": 54741.62745973646 }, { "content": "//! TODO: Add documentation including describing how the derive macros work\n\n\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse crate::aspect::Aspect;\n\nuse crate::entity::{EntityData, EntityIter};\n\nuse crate::system::watcher::Watcher;\n\nuse crate::system::{Process, System};\n\nuse crate::world::DataHelper;\n\n\n", "file_path": "src/system/entity.rs", "rank": 66, "score": 54739.82934168165 }, { "content": "fn frame_ns(ns: u64) -> proc_macro2::TokenStream {\n\n quote! {\n\n ::conniecs::system::interval::TickerState::Timed {\n\n interval: #ns,\n\n next_tick: None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 67, "score": 51678.95167196641 }, { "content": "fn frame_iv(iv: u64) -> proc_macro2::TokenStream {\n\n quote! {\n\n ::conniecs::system::interval::TickerState::Frames {\n\n interval: #iv,\n\n ticks: 0,\n\n }\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 68, "score": 51678.95167196641 }, { "content": "fn parse_interval(attr: &Meta) -> proc_macro2::TokenStream {\n\n match attr {\n\n Meta::NameValue(mnv) => match &mnv.lit {\n\n Lit::Str(time) => parse_iv_time(&time.value()),\n\n Lit::Int(time) => frame_iv(time.value()),\n\n _ => improper_interval_fmt(),\n\n },\n\n Meta::List(list) => parse_explicit_iv(list.nested.iter()),\n\n _ => improper_interval_fmt(),\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 69, "score": 51678.95167196641 }, { "content": "fn read_activations(attrs: &[Attribute]) -> proc_macro2::TokenStream {\n\n let mut activated = None;\n\n let mut reactivated = None;\n\n let mut deactivated = None;\n\n\n\n for attr in attrs {\n\n let meta = attr.parse_meta().unwrap();\n\n match meta.name().to_string().as_str() {\n\n \"activated\" => activated = Some(read_path_item(&meta, || improper_activated_fmt())),\n\n \"reactivated\" => {\n\n reactivated = Some(read_path_item(&meta, || improper_reactivated_fmt()))\n\n }\n\n \"deactivated\" => {\n\n deactivated = Some(read_path_item(&meta, || improper_deactivated_fmt()))\n\n }\n\n _ => (),\n\n }\n\n }\n\n\n\n let activated = activation_fn(\n", "file_path": "conniecs-derive/src/system.rs", "rank": 70, "score": 51678.95167196641 }, { "content": "fn parse_iv_time(time: &str) -> proc_macro2::TokenStream {\n\n let len = time.len();\n\n\n\n let ns = if time.ends_with(\"ms\") && len > 2 {\n\n let iv = parse_u64(&time[..len - 2]);\n\n iv * 1_000_000\n\n } else if (time.ends_with(\"us\") || time.ends_with(\"μs\")) && len > 2 {\n\n let iv = parse_u64(&time[..len - 2]);\n\n iv * 1_000\n\n } else if time.ends_with(\"ns\") && len > 2 {\n\n let iv = parse_u64(&time[..len - 2]);\n\n iv\n\n } else if time.ends_with(\"s\") && len > 1 {\n\n let iv = parse_u64(&time[..len - 1]);\n\n iv * 1_000_000_000\n\n } else {\n\n let iv = parse_u64(time);\n\n return frame_iv(iv);\n\n };\n\n\n\n frame_ns(ns)\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 71, "score": 50425.40414368996 }, { "content": "fn activation_fn(name: Ident, item: Option<proc_macro2::TokenStream>) -> proc_macro2::TokenStream {\n\n if let Some(item) = item {\n\n quote! {\n\n fn #name (\n\n &mut self,\n\n entity: ::conniecs::entity::EntityData<Self::Components>,\n\n components: &Self::Components,\n\n services: &mut Self::Services,\n\n ) {\n\n #item (self, entity, components, services)\n\n }\n\n }\n\n } else {\n\n quote! {}\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/system.rs", "rank": 72, "score": 50019.769853784645 }, { "content": "type DataHelper = conniecs::DataHelper<Components, Services>;\n", "file_path": "tests/aspects.rs", "rank": 73, "score": 45427.5395728787 }, { "content": "fn improper_format() -> ! {\n\n improper_attr_format(\"#[aspect(...)]\", \"conniecs::aspect\")\n\n}\n\n\n", "file_path": "conniecs-derive/src/aspect.rs", "rank": 74, "score": 45363.79111462626 }, { "content": "fn improper_comp_format() -> ! {\n\n improper_attr_format(\"#[components(...)]\", \"conniecs::aspect\")\n\n}\n", "file_path": "conniecs-derive/src/aspect.rs", "rank": 75, "score": 44326.89363386683 }, { "content": "fn unwrap_word(item: &Meta) -> &Ident {\n\n match item {\n\n Meta::Word(ident) => ident,\n\n _ => improper_format(),\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/aspect.rs", "rank": 76, "score": 37361.77109944514 }, { "content": "fn unwrap_meta(item: &NestedMeta) -> &Meta {\n\n match item {\n\n NestedMeta::Meta(item) => item,\n\n NestedMeta::Literal(_) => improper_format(),\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/aspect.rs", "rank": 77, "score": 36549.35323839393 }, { "content": "fn improper_attr_format(attr: &str, module: &str) -> ! {\n\n panic!(\n\n \"{} was not in the correct format. Please refer to the {} \\\n\n module documentation for more information.\",\n\n attr, module\n\n )\n\n}\n\n\n", "file_path": "conniecs-derive/src/lib.rs", "rank": 78, "score": 34603.44238246091 }, { "content": "fn quote_path(path: &str) -> proc_macro2::TokenStream {\n\n let mut tokens = proc_macro2::TokenStream::new();\n\n for (i, part) in path.split(\"::\").enumerate() {\n\n use proc_macro2::{Ident, Punct, Spacing, Span};\n\n if i != 0 {\n\n tokens.append_all(&[\n\n Punct::new(':', Spacing::Joint),\n\n Punct::new(':', Spacing::Joint),\n\n ]);\n\n }\n\n if part.len() > 0 {\n\n tokens.append(Ident::new(part, Span::call_site()));\n\n }\n\n }\n\n tokens\n\n}\n", "file_path": "conniecs-derive/src/lib.rs", "rank": 79, "score": 33902.462909842674 }, { "content": "type Comps<T> = ComponentList<Components, T>;\n", "file_path": "tests/aspects.rs", "rank": 80, "score": 32142.33031583885 }, { "content": "//! TODO: Add documentation including describing how the derive macros work\n\n\n", "file_path": "src/services.rs", "rank": 81, "score": 31538.4649034213 }, { "content": " let result = self.inner.insert(entity.entity().index(), component);\n\n if result.is_none() && !E::can_insert_components() {\n\n panic!(\"ComponentList::set was used to insert a new component when modification of activated components was not allowed\");\n\n }\n\n result\n\n }\n\n\n\n pub fn get<E>(&self, entity: E) -> Option<T>\n\n where\n\n E: EditData<C>,\n\n T: Clone,\n\n {\n\n self.inner.get(entity.entity().index()).cloned()\n\n }\n\n\n\n pub fn has<E>(&self, entity: E) -> bool\n\n where\n\n E: EditData<C>,\n\n {\n\n self.inner.contains_key(entity.entity().index())\n", "file_path": "src/component.rs", "rank": 82, "score": 30972.6200604517 }, { "content": "\n\n pub fn cold() -> Self {\n\n ComponentList {\n\n inner: Cold(HashMap::with_hasher(Default::default())),\n\n _marker: PhantomData,\n\n }\n\n }\n\n\n\n pub fn add(&mut self, entity: BuildData<C>, component: T) -> Option<T> {\n\n self.inner.insert(entity.0.index(), component)\n\n }\n\n\n\n pub fn remove(&mut self, entity: ModifyData<C>) -> Option<T> {\n\n self.inner.remove(entity.0.index())\n\n }\n\n\n\n pub fn set<E>(&mut self, entity: E, component: T) -> Option<T>\n\n where\n\n E: EditData<C>,\n\n {\n", "file_path": "src/component.rs", "rank": 83, "score": 30972.02563237945 }, { "content": "impl<C, T, E> Index<E> for ComponentList<C, T>\n\nwhere\n\n C: ComponentManager,\n\n T: Component,\n\n E: EditData<C>,\n\n{\n\n type Output = T;\n\n fn index(&self, entity: E) -> &T {\n\n self.inner.index(entity.entity().index())\n\n }\n\n}\n\n\n\nimpl<C, T, E> IndexMut<E> for ComponentList<C, T>\n\nwhere\n\n C: ComponentManager,\n\n T: Component,\n\n E: EditData<C>,\n\n{\n\n fn index_mut(&mut self, entity: E) -> &mut T {\n\n self.inner.index_mut(entity.entity().index())\n", "file_path": "src/component.rs", "rank": 84, "score": 30970.256787293725 }, { "content": " }\n\n\n\n pub fn borrow<E>(&mut self, entity: E) -> Option<&mut T>\n\n where\n\n E: EditData<C>,\n\n {\n\n self.inner.get_mut(entity.entity().index())\n\n }\n\n\n\n #[doc(hidden)]\n\n pub fn __clear(&mut self, entity: &IndexedEntity<C>) {\n\n self.inner.remove(entity.index());\n\n }\n\n\n\n #[doc(hidden)]\n\n pub fn __wipe(&mut self) {\n\n self.inner.clear();\n\n }\n\n}\n\n\n", "file_path": "src/component.rs", "rank": 85, "score": 30969.78832680097 }, { "content": "\n\n pub(crate) fn contains_key(&self, index: usize) -> bool {\n\n match *self {\n\n Hot(ref map) => map.contains_key(index),\n\n Cold(ref map) => map.contains_key(&index),\n\n }\n\n }\n\n\n\n pub(crate) fn index(&self, index: usize) -> &T {\n\n self.get(index)\n\n .unwrap_or_else(|| panic!(\"Entity at index {} does not have this component attached\"))\n\n }\n\n\n\n pub(crate) fn index_mut(&mut self, index: usize) -> &mut T {\n\n self.get_mut(index)\n\n .unwrap_or_else(|| panic!(\"Entity at index {} does not have this component attached\"))\n\n }\n\n\n\n pub(crate) fn get(&self, index: usize) -> Option<&T> {\n\n match *self {\n", "file_path": "src/component.rs", "rank": 86, "score": 30967.106565232276 }, { "content": "#[derive(Debug)]\n\npub(crate) enum InnerComponentList<T>\n\nwhere\n\n T: Component,\n\n{\n\n Hot(VecMap<T>),\n\n Cold(FnvHashMap<usize, T>),\n\n}\n\n\n\nimpl<C, T> ComponentList<C, T>\n\nwhere\n\n C: ComponentManager,\n\n T: Component,\n\n{\n\n pub fn hot() -> Self {\n\n ComponentList {\n\n inner: Hot(VecMap::new()),\n\n _marker: PhantomData,\n\n }\n\n }\n", "file_path": "src/component.rs", "rank": 87, "score": 30966.16892555551 }, { "content": " }\n\n}\n\n\n\nimpl<T> InnerComponentList<T>\n\nwhere\n\n T: Component,\n\n{\n\n pub(crate) fn insert(&mut self, index: usize, component: T) -> Option<T> {\n\n match *self {\n\n Hot(ref mut map) => map.insert(index, component),\n\n Cold(ref mut map) => map.insert(index, component),\n\n }\n\n }\n\n\n\n pub(crate) fn remove(&mut self, index: usize) -> Option<T> {\n\n match *self {\n\n Hot(ref mut map) => map.remove(index),\n\n Cold(ref mut map) => map.remove(&index),\n\n }\n\n }\n", "file_path": "src/component.rs", "rank": 88, "score": 30964.754039773223 }, { "content": "//! TODO: Add documentation including describing how the derive macros work\n\n\n\nuse fnv::FnvHashMap;\n\nuse vec_map::VecMap;\n\n\n\nuse std::collections::HashMap;\n\nuse std::marker::PhantomData;\n\nuse std::ops::{Index, IndexMut};\n\n\n\nuse self::InnerComponentList::{Cold, Hot};\n\nuse crate::entity::{BuildData, EditData, IndexedEntity, ModifyData};\n\n\n\n/// Marks types which are suitable for being components. It is implemented for all\n\n/// types which are `'static`.\n", "file_path": "src/component.rs", "rank": 89, "score": 30963.35577841149 }, { "content": " Hot(ref map) => map.get(index),\n\n Cold(ref map) => map.get(&index),\n\n }\n\n }\n\n\n\n pub(crate) fn get_mut(&mut self, index: usize) -> Option<&mut T> {\n\n match *self {\n\n Hot(ref mut map) => map.get_mut(index),\n\n Cold(ref mut map) => map.get_mut(&index),\n\n }\n\n }\n\n\n\n pub(crate) fn clear(&mut self) {\n\n match *self {\n\n Hot(ref mut map) => map.clear(),\n\n Cold(ref mut map) => map.clear(),\n\n }\n\n }\n\n}\n", "file_path": "src/component.rs", "rank": 90, "score": 30962.622932916696 }, { "content": "fn read_path_item<F>(attr: &syn::Meta, fail: F) -> String\n\nwhere\n\n F: FnOnce(),\n\n{\n\n match attr {\n\n syn::Meta::Word(word) => word.to_string(),\n\n syn::Meta::List(list) => {\n\n let items = &list.nested;\n\n if items.len() != 1 {\n\n fail();\n\n unreachable!();\n\n }\n\n\n\n match items[0] {\n\n syn::NestedMeta::Literal(syn::Lit::Str(ref value)) => value.value(),\n\n syn::NestedMeta::Meta(syn::Meta::Word(ref word)) => word.to_string(),\n\n _ => {\n\n fail();\n\n unreachable!();\n\n }\n", "file_path": "conniecs-derive/src/lib.rs", "rank": 91, "score": 29944.744622344497 }, { "content": "fn unwrap_list<'a>(item: &'a Meta) -> impl Iterator<Item = &'a NestedMeta> {\n\n match item {\n\n Meta::List(list) => list.nested.iter(),\n\n _ => improper_format(),\n\n }\n\n}\n\n\n", "file_path": "conniecs-derive/src/aspect.rs", "rank": 92, "score": 29253.91949896077 }, { "content": " }\n\n }\n\n}\n\n\n\npub struct WatchedEntityIter<'a, C>\n\nwhere\n\n C: ComponentManager,\n\n{\n\n pub(crate) current_range: Range,\n\n pub(crate) indices: BIter<'a, Range>,\n\n pub(crate) entities: &'a FnvHashMap<usize, IndexedEntity<C>>,\n\n}\n\n\n\nimpl<'a, C> Clone for WatchedEntityIter<'a, C>\n\nwhere\n\n C: ComponentManager,\n\n{\n\n fn clone(&self) -> Self {\n\n WatchedEntityIter {\n\n current_range: self.current_range,\n", "file_path": "src/entity/iter.rs", "rank": 97, "score": 28820.32873995822 } ]
Rust
tests/integration.rs
saltyrtc/saltyrtc-task-relayed-data-rs
c290b4ae7e1cbed145b16c34b17e1f6bf8380465
extern crate env_logger; #[macro_use] extern crate log; extern crate saltyrtc_client; extern crate saltyrtc_task_relayed_data; extern crate tokio_core; extern crate tokio_timer; use std::boxed::Box; use std::env; use std::fs::File; use std::io::Read; use std::path::Path; use std::sync::{Arc, RwLock}; use std::time::Duration; use saltyrtc_client::{WsClient, SaltyClient, CloseCode, BoxedFuture}; use saltyrtc_client::crypto::{PublicKey, KeyPair, AuthToken}; use saltyrtc_client::dep::futures::{Future, Stream, Sink}; use saltyrtc_client::dep::futures::future; use saltyrtc_client::dep::futures::sync::mpsc; use saltyrtc_client::dep::native_tls::{Certificate, TlsConnector, Protocol}; use saltyrtc_client::dep::rmpv::Value; use saltyrtc_client::tasks::Task; use saltyrtc_task_relayed_data::{RelayedDataTask, MessageEvent, OutgoingMessage, RelayedDataError}; use tokio_core::reactor::{Core, Remote}; use tokio_timer::Timer; macro_rules! boxed { ($future:expr) => {{ Box::new($future) as BoxedFuture<_, _> }} } fn setup_initiator( keypair: KeyPair, remote: Remote, ) -> (SaltyClient, mpsc::UnboundedReceiver<MessageEvent>) { let (tx, rx) = mpsc::unbounded(); let task = RelayedDataTask::new(remote, tx); let salty = SaltyClient::build(keypair) .add_task(Box::new(task)) .initiator() .expect("Could not create initiator"); (salty, rx) } fn setup_responder( keypair: KeyPair, remote: Remote, pubkey: PublicKey, auth_token: AuthToken, ) -> (SaltyClient, mpsc::UnboundedReceiver<MessageEvent>) { let (tx, rx) = mpsc::unbounded(); let task = RelayedDataTask::new(remote, tx); let salty = SaltyClient::build(keypair) .add_task(Box::new(task)) .responder(pubkey, auth_token) .expect("Could not create initiator"); (salty, rx) } #[test] fn integration_test() { env::set_var("RUST_LOG", "saltyrtc_client=debug,saltyrtc_task_relayed_data=debug,integration=trace"); env_logger::init(); let mut core = Core::new().unwrap(); let mut server_cert_bytes: Vec<u8> = vec![]; File::open(&Path::new("saltyrtc.der")) .expect("Could not open saltyrtc.der") .read_to_end(&mut server_cert_bytes) .expect("Could not read saltyrtc.der"); let server_cert = Certificate::from_der(&server_cert_bytes) .unwrap_or_else(|e| { panic!("Problem with CA cert: {}", e); }); let tls_connector = TlsConnector::builder() .min_protocol_version(Some(Protocol::Tlsv11)) .add_root_certificate(server_cert) .build() .unwrap_or_else(|e| panic!("Could not initialize TlsConnector: {}", e)); let initiator_keypair = KeyPair::new(); let responder_keypair = KeyPair::new(); let pubkey = initiator_keypair.public_key().clone(); let (initiator, rx_initiator) = setup_initiator(initiator_keypair, core.remote()); let (responder, rx_responder) = setup_responder(responder_keypair, core.remote(), pubkey, initiator.auth_token().cloned().unwrap()); let initiator = Arc::new(RwLock::new(initiator)); let responder = Arc::new(RwLock::new(responder)); let timeout = Some(Duration::from_secs(2)); let (connect_initiator, event_channel_initiator) = saltyrtc_client::connect( "localhost", 8765, Some(tls_connector.clone()), &core.handle(), initiator.clone(), ) .unwrap(); let handshake_initiator = connect_initiator .and_then(|client| saltyrtc_client::do_handshake( client, initiator.clone(), event_channel_initiator.clone_tx(), timeout, )); let (connect_responder, event_channel_responder) = saltyrtc_client::connect( "localhost", 8765, Some(tls_connector.clone()), &core.handle(), responder.clone(), ) .unwrap(); let handshake_responder = connect_responder .and_then(|client| saltyrtc_client::do_handshake( client, responder.clone(), event_channel_responder.clone_tx(), timeout, )); let (client_initiator, client_responder): (WsClient, WsClient) = core.run( handshake_initiator.join(handshake_responder) ).unwrap(); let (task_initiator, initiator_task_loop) = saltyrtc_client::task_loop( client_initiator, initiator.clone(), event_channel_initiator.clone_tx(), ).unwrap(); let (task_responder, responder_task_loop) = saltyrtc_client::task_loop( client_responder, responder.clone(), event_channel_responder.clone_tx(), ).unwrap(); let (tx_initiator, tx_responder) = { let mut t_initiator = task_initiator.lock().expect("Could not lock task mutex"); let mut t_responder = task_responder.lock().expect("Could not lock task mutex"); let rd_task_initiator: &mut RelayedDataTask = (&mut **t_initiator as &mut dyn Task) .downcast_mut::<RelayedDataTask>() .expect("Chosen task is not a RelayedDataTask"); let rd_task_responder: &mut RelayedDataTask = (&mut **t_responder as &mut dyn Task) .downcast_mut::<RelayedDataTask>() .expect("Chosen task is not a RelayedDataTask"); let tx_initiator = rd_task_initiator.get_sender().unwrap(); let tx_responder = rd_task_responder.get_sender().unwrap(); (tx_initiator, tx_responder) }; let rx_loop_responder = rx_responder .map_err(|_| Err(RelayedDataError::Channel(("Could not read from rx_responder").into()))) .for_each(move |ev: MessageEvent| match ev { MessageEvent::Data(data) => { assert_eq!(data.as_i64(), Some(1)); debug!("R: Received 1"); let future = tx_responder .clone() .send(OutgoingMessage::Data(Value::Integer(2.into()))) .map(|tx| { debug!("R: Sent 2"); tx }) .and_then(|tx| tx.send(OutgoingMessage::Data(Value::Integer(3.into())))) .map(|_tx| { debug!("R: Sent 3"); () }) .map_err(|e| Err(RelayedDataError::Channel(format!("Could not send message to tx_responder: {}", e)))); boxed!(future) }, MessageEvent::Application(data) => { assert_eq!(data.as_i64(), Some(4)); debug!("R: Received 4 (application)"); let future = tx_responder .clone() .send(OutgoingMessage::Application(Value::Integer(5.into()))) .map(|_tx| { debug!("R: Sent 5 (application)"); () }) .map_err(|e| Err(RelayedDataError::Channel(format!("Could not send message to tx_responder: {}", e)))); boxed!(future) }, MessageEvent::Close(reason) => { assert_eq!(reason, CloseCode::WsGoingAway); boxed!(future::err(Ok(()))) }, }) .or_else(|e| e) .then(|f| { debug!("† rx_loop_responder done"); f }); let tx_initiator_clone = tx_initiator.clone(); let rx_loop_initiator = rx_initiator .map_err(|_| RelayedDataError::Channel(("Could not read from rx_initiator").into())) .for_each(move |ev: MessageEvent| match ev { MessageEvent::Data(data) => { match data.as_i64() { Some(2) => { debug!("I: Received 2"); /* Ok, wait for 3 */ boxed!(future::ok(())) }, Some(3) => { debug!("I: Received 3"); boxed!( tx_initiator_clone .clone() .send(OutgoingMessage::Application(Value::Integer(4.into()))) .map(|_| debug!("I: Sent 4 (application)")) .map_err(|e| RelayedDataError::Channel(e.to_string())) ) }, _ => panic!("I: Received invalid value: {}", data), } }, MessageEvent::Application(data) => match data.as_i64() { Some(5) => { debug!("I: Received 5 (application)"); debug!("Done, disconnecting"); task_initiator.lock().unwrap().close(CloseCode::WsGoingAway); boxed!(future::ok(())) }, _ => panic!("I: Received invalid application value: {}", data), }, MessageEvent::Close(_) => panic!("Initiator should disconnect first!"), }) .then(|f| { debug!("† rx_loop_initiator done"); f }); let start = tx_initiator .send(OutgoingMessage::Data(Value::Integer(1.into()))) .map(|_| debug!("I: Sent 1")) .map_err(|e| RelayedDataError::Channel(e.to_string())); let test_future = start .join(initiator_task_loop.from_err().select(rx_loop_initiator).map_err(|(e, _)| e)) .join(responder_task_loop.from_err().select(rx_loop_responder).map_err(|(e, _)| e)); let timer = Timer::default(); let timeout = timer.sleep(Duration::from_secs(3)); match core.run(test_future.select2(timeout)) { Ok(res) => match res { future::Either::A(_) => debug!("Success"), future::Either::B(_) => panic!("The test timed out"), }, Err(e) => match e { future::Either::A((task_error, _)) => panic!("A task error occurred: {}", task_error), future::Either::B(_) => panic!("The timeout failed"), }, }; }
extern crate env_logger; #[macro_use] extern crate log; extern crate saltyrtc_client; extern crate saltyrtc_task_relayed_data; extern crate tokio_core; extern crate tokio_timer; use std::boxed::Box; use std::env; use std::fs::File; use std::io::Read; use std::path::Path; use std::sync::{Arc, RwLock}; use std::time::Duration; use saltyrtc_client::{WsClient, SaltyClient, CloseCode, BoxedFuture}; use saltyrtc_client::crypto::{PublicKey, KeyPair, AuthToken}; use saltyrtc_client::dep::futures::{Future, Stream, Sink}; use saltyrtc_client::dep::futures::future; use saltyrtc_client::dep::futures::sync::mpsc; use saltyrtc_client::dep::native_tls::{Certificate, TlsConnector, Protocol}; use saltyrtc_client::dep::rmpv::Value; use saltyrtc_client::tasks::Task; use saltyrtc_task_relayed_data::{RelayedDataTask, MessageEvent, OutgoingMessage, RelayedDataError}; use tokio_core::reactor::{Core, Remote}; use tokio_timer::Timer; macro_rules! boxed { ($future:expr) => {{ Box::new($future) as BoxedFuture<_, _> }} } fn setup_initiator( keypair: KeyPair, remote: Remote, ) -> (SaltyClient, mpsc::UnboundedReceiver<MessageEvent>) { let (tx, rx) = mpsc::unbounded(); let task = RelayedDataTask::new(remote, tx); let salty = SaltyClient::build(keypair) .add_task(Box::new(task)) .initiator() .expect("Could not create initiator"); (salty, rx) } fn setup_responder( keypair: KeyPair, remote: Remote, pubkey: PublicKey, auth_token: AuthToken, ) -> (SaltyClient, mpsc::UnboundedReceiver<MessageEvent>) { let (tx, rx) = mpsc::unbounded(); let task = RelayedDataTask::new(remote, tx); let salty = SaltyClient::build(keypair) .add_task(Box::new(task)) .responder(pubkey, auth_token) .expect("Could not create initiator"); (salty, rx) } #[test] fn integration_test() { env::set_var("RUST_LOG", "saltyrtc_client=debug,saltyrtc_task_relayed_data=debug,integration=trace"); env_logger::init(); let mut core = Core::new().unwrap(); let mut server_cert_bytes: Vec<u8> = vec![]; File::open(&Path::new("saltyrtc.der")) .expect("Could not open saltyrtc.der") .read_to_end(&mut server_cert_bytes) .expect("Could not read saltyrtc.der"); let server_cert = Certificate::from_der(&server_cert_bytes) .unwrap_or_else(|e| { panic!("Problem with CA cert: {}", e); }); let tls_connector = TlsConnector::builder() .min_protocol_version(Some(Protocol::Tlsv11)) .add_root_certificate(server_cert) .build() .unwrap_or_else(|e| panic!("Could not initialize TlsConnector: {}", e)); let initiator_keypair = KeyPair::new(); let responder_keypair = KeyPair::new(); let pubkey = initiator_keypair.public_key().clone(); let (initiator, rx_initiator) = setup_initiator(initiator_keypair, core.remote()); let (responder, rx_responder) = setup_responder(responder_keypair, core.remote(), pubkey, initiator.auth_token().cloned().unwrap()); let initiator = Arc::new(RwLock::new(initiator)); let responder = Arc::new(RwLock::new(responder)); let timeout = Some(Duration::from_secs(2)); let (connect_initiator, event_channel_initiator) = saltyrtc_client::connect( "localhost", 8765, Some(tls_connector.clone()), &core.handle(), initiator.clone(), ) .unwrap(); let handshake_initiator = connect_initiator .and_then(|client| saltyrtc_client::do_handshake( client, initiator.clone(), event_channel_initiator.clone_tx(), timeout, )); let (connect_responder, event_channel_responder) = saltyrtc_client::connect( "localhost", 8765, Some(tls_connector.clone()), &core.handle(), responder.clone(), ) .unwrap(); let handshake_responder = connect_responder .and_then(|client| saltyrtc_client::do_handshake( client, responder.clone(), event_channel_responder.clone_tx(), timeout, )); let (client_initiator, client_responder): (WsClient, WsClient) = core.run( handshake_initiator.join(handshake_responder) ).unwrap(); let (task_initiator, initiator_task_loop) = saltyrtc_client::task_loop( client_initiator, initiator.clone(), event_channel_initiator.clone_tx(), ).unwrap(); let (task_responder, responder_task_loop) = saltyrtc_client::task_loop( client_responder, responder.clone(), event_channel_responder.clone_tx(), ).unwrap(); let (tx_initiator, tx_responder) = { let mut t_initiator = task_initiator.lock().expect("Could not lock task mutex"); let mut t_responder = task_responder.lock().expect("Could not lock task mutex");
let rd_task_responder: &mut RelayedDataTask = (&mut **t_responder as &mut dyn Task) .downcast_mut::<RelayedDataTask>() .expect("Chosen task is not a RelayedDataTask"); let tx_initiator = rd_task_initiator.get_sender().unwrap(); let tx_responder = rd_task_responder.get_sender().unwrap(); (tx_initiator, tx_responder) }; let rx_loop_responder = rx_responder .map_err(|_| Err(RelayedDataError::Channel(("Could not read from rx_responder").into()))) .for_each(move |ev: MessageEvent| match ev { MessageEvent::Data(data) => { assert_eq!(data.as_i64(), Some(1)); debug!("R: Received 1"); let future = tx_responder .clone() .send(OutgoingMessage::Data(Value::Integer(2.into()))) .map(|tx| { debug!("R: Sent 2"); tx }) .and_then(|tx| tx.send(OutgoingMessage::Data(Value::Integer(3.into())))) .map(|_tx| { debug!("R: Sent 3"); () }) .map_err(|e| Err(RelayedDataError::Channel(format!("Could not send message to tx_responder: {}", e)))); boxed!(future) }, MessageEvent::Application(data) => { assert_eq!(data.as_i64(), Some(4)); debug!("R: Received 4 (application)"); let future = tx_responder .clone() .send(OutgoingMessage::Application(Value::Integer(5.into()))) .map(|_tx| { debug!("R: Sent 5 (application)"); () }) .map_err(|e| Err(RelayedDataError::Channel(format!("Could not send message to tx_responder: {}", e)))); boxed!(future) }, MessageEvent::Close(reason) => { assert_eq!(reason, CloseCode::WsGoingAway); boxed!(future::err(Ok(()))) }, }) .or_else(|e| e) .then(|f| { debug!("† rx_loop_responder done"); f }); let tx_initiator_clone = tx_initiator.clone(); let rx_loop_initiator = rx_initiator .map_err(|_| RelayedDataError::Channel(("Could not read from rx_initiator").into())) .for_each(move |ev: MessageEvent| match ev { MessageEvent::Data(data) => { match data.as_i64() { Some(2) => { debug!("I: Received 2"); /* Ok, wait for 3 */ boxed!(future::ok(())) }, Some(3) => { debug!("I: Received 3"); boxed!( tx_initiator_clone .clone() .send(OutgoingMessage::Application(Value::Integer(4.into()))) .map(|_| debug!("I: Sent 4 (application)")) .map_err(|e| RelayedDataError::Channel(e.to_string())) ) }, _ => panic!("I: Received invalid value: {}", data), } }, MessageEvent::Application(data) => match data.as_i64() { Some(5) => { debug!("I: Received 5 (application)"); debug!("Done, disconnecting"); task_initiator.lock().unwrap().close(CloseCode::WsGoingAway); boxed!(future::ok(())) }, _ => panic!("I: Received invalid application value: {}", data), }, MessageEvent::Close(_) => panic!("Initiator should disconnect first!"), }) .then(|f| { debug!("† rx_loop_initiator done"); f }); let start = tx_initiator .send(OutgoingMessage::Data(Value::Integer(1.into()))) .map(|_| debug!("I: Sent 1")) .map_err(|e| RelayedDataError::Channel(e.to_string())); let test_future = start .join(initiator_task_loop.from_err().select(rx_loop_initiator).map_err(|(e, _)| e)) .join(responder_task_loop.from_err().select(rx_loop_responder).map_err(|(e, _)| e)); let timer = Timer::default(); let timeout = timer.sleep(Duration::from_secs(3)); match core.run(test_future.select2(timeout)) { Ok(res) => match res { future::Either::A(_) => debug!("Success"), future::Either::B(_) => panic!("The test timed out"), }, Err(e) => match e { future::Either::A((task_error, _)) => panic!("A task error occurred: {}", task_error), future::Either::B(_) => panic!("The timeout failed"), }, }; }
let rd_task_initiator: &mut RelayedDataTask = (&mut **t_initiator as &mut dyn Task) .downcast_mut::<RelayedDataTask>() .expect("Chosen task is not a RelayedDataTask");
assignment_statement
[ { "content": "fn build_tests() -> (MutexGuard<'static, ()>, PathBuf) {\n\n let guard = match C_TEST_MUTEX.lock() {\n\n Ok(guard) => guard,\n\n Err(poisoned) => poisoned.into_inner(),\n\n };\n\n\n\n let out_dir = env!(\"OUT_DIR\");\n\n let build_dir = Path::new(out_dir).join(\"build\");\n\n\n\n println!(\"Running meson...\");\n\n Command::new(\"meson\")\n\n .arg(build_dir.to_str().unwrap())\n\n .env(\"CC\", \"clang\")\n\n .output()\n\n .expect(\"Could not run meson to build C tests\");\n\n\n\n println!(\"Running ninja...\");\n\n let output = Command::new(\"ninja\")\n\n .current_dir(&build_dir)\n\n .output()\n\n .expect(\"Could not run ninja to build C tests\");\n\n assert_output_success(output);\n\n\n\n println!(\"Copying test certificate...\");\n\n copy(\"../saltyrtc.der\", build_dir.join(\"saltyrtc.der\"))\n\n .expect(\"Could not copy test certificate (saltyrtc.der)\");\n\n\n\n (guard, build_dir)\n\n}\n\n\n", "file_path": "ffi/tests/integration.rs", "rank": 0, "score": 113786.40864564222 }, { "content": "/// Helper function to return error values when creating a client instance.\n\nfn make_client_create_error(reason: salty_relayed_data_success_t) -> salty_relayed_data_client_ret_t {\n\n salty_relayed_data_client_ret_t {\n\n success: reason,\n\n client: ptr::null(),\n\n receiver_rx: ptr::null(),\n\n sender_tx: ptr::null(),\n\n sender_rx: ptr::null(),\n\n disconnect_tx: ptr::null(),\n\n disconnect_rx: ptr::null(),\n\n }\n\n}\n\n\n", "file_path": "ffi/src/lib.rs", "rank": 4, "score": 103000.50390243322 }, { "content": " const salty_keypair_t *keypair;\n", "file_path": "ffi/tests/integration.c", "rank": 5, "score": 94362.86782556414 }, { "content": " const uint8_t *ca_cert;\n", "file_path": "ffi/tests/integration.c", "rank": 6, "score": 90280.40058090781 }, { "content": " const uint8_t *initiator_pubkey;\n", "file_path": "ffi/tests/integration.c", "rank": 7, "score": 90134.67763318452 }, { "content": " long ca_cert_len;\n", "file_path": "ffi/tests/integration.c", "rank": 8, "score": 87140.45251682721 }, { "content": "fn main() {\n\n let crate_dir = env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n if env::var(\"SKIP_CBINDGEN\").unwrap_or(\"0\".to_string()) == \"1\" {\n\n return;\n\n }\n\n cbindgen::generate(&crate_dir)\n\n .expect(\"Unable to generate C bindings\")\n\n .write_to_file(\"saltyrtc_task_relayed_data_ffi.h\");\n\n}\n", "file_path": "ffi/build.rs", "rank": 9, "score": 82182.4263534448 }, { "content": "// #[test] Disabled for now due to false errors, see\n\n// https://bugs.kde.org/show_bug.cgi?id=381289 and\n\n// https://bugzilla.redhat.com/show_bug.cgi?id=1462258\n\n// Additionally, the log4rs logger initializes global memory that cannot\n\n// currently be freed.\n\nfn c_tests_no_memory_leaks() {\n\n let (_guard, build_dir) = build_tests();\n\n\n\n let output = Command::new(\"valgrind\")\n\n .arg(\"--error-exitcode=23\")\n\n .arg(\"--leak-check=full\")\n\n .arg(\"--track-fds=yes\")\n\n .arg(\"./integration\")\n\n .current_dir(&build_dir)\n\n .output()\n\n .expect(\"Could not run valgrind\");\n\n if !output.status.success() {\n\n let stdout = String::from_utf8_lossy(&output.stdout);\n\n let stderr = String::from_utf8_lossy(&output.stderr);\n\n println!(\"Stdout:\\n{}\\nStderr:\\n{}\\n\", stdout, stderr);\n\n panic!(\"Running valgrind failed with non-zero return code\");\n\n }\n\n}\n", "file_path": "ffi/tests/integration.rs", "rank": 10, "score": 81721.57598514242 }, { "content": "#[test]\n\nfn c_tests_disconnect_run() {\n\n c_tests_run(\"./disconnect\", None);\n\n}\n\n\n", "file_path": "ffi/tests/integration.rs", "rank": 11, "score": 81720.0765061444 }, { "content": "fn make_log_config(config: LogConfig) -> Result<Config, String> {\n\n // Log format\n\n let format = \"{d(%Y-%m-%dT%H:%M:%S%.3f)} [{l:<5}] {m} (({f}:{L})){n}\";\n\n\n\n // Appender\n\n let (appender, level) = match config {\n\n LogConfig::Console(level) => {\n\n let console = ConsoleAppender::builder()\n\n .encoder(Box::new(PatternEncoder::new(format)))\n\n .build();\n\n (Box::new(console) as Box<dyn Append>, level)\n\n }\n\n LogConfig::Callback(func, level) => {\n\n (Box::new(CallbackAppender::new(func)) as Box<dyn Append>, level)\n\n }\n\n };\n\n\n\n // Create logging config object\n\n let config_res = Config::builder()\n\n .appender(Appender::builder().build(\"appender\", appender))\n", "file_path": "ffi/src/saltyrtc_client_ffi.rs", "rank": 12, "score": 78723.15720419362 }, { "content": "#[test]\n\nfn c_tests_integration_run_console_logger() {\n\n c_tests_run(\"./integration\", Some(\"console\"));\n\n}\n\n\n", "file_path": "ffi/tests/integration.rs", "rank": 13, "score": 77447.75853365683 }, { "content": "#[test]\n\nfn c_tests_integration_run_callback_logger() {\n\n c_tests_run(\"./integration\", Some(\"callback\"));\n\n}\n\n\n", "file_path": "ffi/tests/integration.rs", "rank": 14, "score": 77447.75853365683 }, { "content": " const salty_client_t *client;\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 15, "score": 75028.00427325869 }, { "content": "/// Create the QR code payload\n\nfn make_qrcode_payload(version: u16, permanent: bool, host: &str, port: u16, pubkey: &[u8], auth_token: &[u8], server_pubkey: &[u8]) -> Vec<u8> {\n\n let mut data: Vec<u8> = Vec::with_capacity(101 + host.as_bytes().len());\n\n\n\n data.write_u16::<BigEndian>(version).unwrap();\n\n data.push(if permanent { 0x02 } else { 0x00 });\n\n data.write_all(&pubkey).unwrap();\n\n data.write_all(&auth_token).unwrap();\n\n data.write_all(&server_pubkey).unwrap();\n\n data.write_u16::<BigEndian>(port).unwrap();\n\n data.write_all(host.as_bytes()).unwrap();\n\n\n\n data\n\n}\n\n\n", "file_path": "examples/testclient.rs", "rank": 16, "score": 70051.73912628788 }, { "content": "static sem_t initialized;\n", "file_path": "ffi/tests/disconnect.c", "rank": 17, "score": 69450.29215047648 }, { "content": "salty_client_send_success_t salty_client_send_task_bytes(const salty_channel_sender_tx_t *sender_tx,\n\n const uint8_t *msg,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 18, "score": 69048.64458025097 }, { "content": "static uint8_t *auth_token = NULL;\n", "file_path": "ffi/tests/integration.c", "rank": 19, "score": 68704.58762942362 }, { "content": "void salty_keypair_free(const salty_keypair_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 20, "score": 67837.07321804513 }, { "content": "const salty_keypair_t *salty_keypair_new(void);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 21, "score": 67836.56183290292 }, { "content": "const salty_keypair_t *salty_keypair_restore(const uint8_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 22, "score": 67834.72565376054 }, { "content": "salty_client_init_ret_t salty_client_init(const char *host,\n\n uint16_t port,\n\n const salty_client_t *client,\n\n const salty_event_loop_t *event_loop,\n\n uint16_t timeout_s,\n\n const uint8_t *ca_cert,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 23, "score": 67364.32065424637 }, { "content": "salty_client_connect_success_t salty_client_connect(const salty_handshake_future_t *handshake_future,\n\n const salty_client_t *client,\n\n const salty_event_loop_t *event_loop,\n\n const salty_channel_event_tx_t *event_tx,\n\n const salty_channel_sender_rx_t *sender_rx,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 24, "score": 67363.39081803856 }, { "content": "salty_client_disconnect_success_t salty_client_disconnect(const salty_channel_disconnect_tx_t *disconnect_tx,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 25, "score": 67359.39267344215 }, { "content": " uint16_t timeout_seconds;\n", "file_path": "ffi/tests/integration.c", "rank": 26, "score": 67109.43770223096 }, { "content": "static void log_callback(uint8_t level, const char *target, const char *message) {\n\n printf(\"****** [%d] %s: %s\\n\", level, target, message);\n", "file_path": "ffi/tests/integration.c", "rank": 27, "score": 67034.98804308508 }, { "content": "static const salty_channel_disconnect_tx_t *disconnect_tx = NULL;\n", "file_path": "ffi/tests/disconnect.c", "rank": 28, "score": 67020.52256748466 }, { "content": "void *connect_responder(void *threadarg) {\n\n struct thread_data *data = (struct thread_data *) threadarg;\n\n printf(\" THREAD: Started responder thread\\n\");\n\n\n\n printf(\" RESPONDER: Creating event loop\\n\");\n\n const salty_event_loop_t *loop = salty_event_loop_new();\n\n\n\n printf(\" RESPONDER: Getting event loop remote handle\\n\");\n\n const salty_remote_t *remote = salty_event_loop_get_remote(loop);\n\n\n\n printf(\" RESPONDER: Waiting for auth token semaphore...\\n\");\n\n sem_wait(&auth_token_set);\n\n\n\n printf(\" RESPONDER: Creating client instance\\n\");\n\n salty_relayed_data_client_ret_t client_ret = salty_relayed_data_responder_new(\n\n data->keypair,\n\n remote,\n\n data->interval_seconds,\n\n data->initiator_pubkey,\n\n auth_token,\n\n NULL\n\n );\n\n if (client_ret.success != OK) {\n\n printf(\" RESPONDER ERROR: Could not create client: %d\", client_ret.success);\n\n pthread_exit(NULL);\n\n }\n\n\n\n responder_sender = client_ret.sender_tx;\n\n responder_receiver = client_ret.receiver_rx;\n\n responder_disconnect = client_ret.disconnect_tx;\n\n printf(\" RESPONDER: Notifying main thread that the channels are ready\\n\");\n\n sem_post(&responder_channels_ready);\n\n\n\n printf(\" RESPONDER: Initializing\\n\");\n\n salty_client_init_ret_t init_ret = salty_client_init(\n\n // Host, port\n\n \"localhost\",\n\n 8765,\n\n // Client\n\n client_ret.client,\n\n // Event loop\n\n loop,\n\n // Timeout seconds\n\n data->timeout_seconds,\n\n // CA certificate\n\n data->ca_cert,\n\n (uint32_t)data->ca_cert_len\n\n );\n\n if (init_ret.success != INIT_OK) {\n\n printf(\" RESPONDER ERROR: Could not initialize connection: %d\", init_ret.success);\n\n pthread_exit(NULL);\n\n }\n\n\n\n printf(\" RESPONDER: Connecting\\n\");\n\n salty_client_connect_success_t connect_success = salty_client_connect(\n\n // Handshake future\n\n init_ret.handshake_future,\n\n // Client\n\n client_ret.client,\n\n // Event loop\n\n loop,\n\n // Event channel, sending end\n\n init_ret.event_tx,\n\n // Sender channel, receiving end\n\n client_ret.sender_rx,\n\n // Disconnect channel, receiving end\n\n client_ret.disconnect_rx\n\n );\n\n\n\n drain_events(init_ret.event_rx, \"RESPONDER\");\n\n\n\n printf(\" RESPONDER: Connection ended with exit code %d\\n\", connect_success);\n\n salty_client_connect_success_t* connect_success_copy = malloc(sizeof(connect_success));\n\n if (connect_success_copy == NULL) {\n\n printf(\" RESPONDER ERROR: Could not malloc %ld bytes\\n\", sizeof(connect_success));\n\n pthread_exit(NULL);\n\n }\n\n memcpy(connect_success_copy, &connect_success, sizeof(connect_success));\n\n\n\n printf(\" RESPONDER: Freeing client instance\\n\");\n\n salty_relayed_data_client_free(client_ret.client);\n\n\n\n printf(\" RESPONDER: Freeing channel instances\\n\");\n\n salty_channel_receiver_rx_free(client_ret.receiver_rx);\n\n salty_channel_sender_tx_free(client_ret.sender_tx);\n\n salty_channel_event_rx_free(init_ret.event_rx);\n\n\n\n printf(\" RESPONDER: Freeing event loop\\n\");\n\n salty_event_loop_free(loop);\n\n\n\n printf(\" THREAD: Stopping responder thread\\n\");\n\n pthread_exit((void *)connect_success_copy);\n", "file_path": "ffi/tests/integration.c", "rank": 29, "score": 67013.63749701824 }, { "content": "static const salty_channel_disconnect_tx_t *responder_disconnect = NULL;\n", "file_path": "ffi/tests/integration.c", "rank": 30, "score": 67006.0404750313 }, { "content": "static const salty_channel_receiver_rx_t *responder_receiver = NULL;\n", "file_path": "ffi/tests/integration.c", "rank": 31, "score": 67006.0404750313 }, { "content": "static const salty_channel_sender_tx_t *responder_sender = NULL;\n", "file_path": "ffi/tests/integration.c", "rank": 32, "score": 67006.0404750313 }, { "content": "static const salty_channel_event_rx_t *event_rx = NULL;\n", "file_path": "ffi/tests/disconnect.c", "rank": 33, "score": 66991.5307617441 }, { "content": "void *connect_initiator(void *threadarg) {\n\n struct thread_data *data = (struct thread_data *) threadarg;\n\n printf(\" THREAD: Started initiator thread\\n\");\n\n\n\n printf(\" INITIATOR: Creating event loop\\n\");\n\n const salty_event_loop_t *loop = salty_event_loop_new();\n\n\n\n printf(\" INITIATOR: Getting event loop remote handle\\n\");\n\n const salty_remote_t *remote = salty_event_loop_get_remote(loop);\n\n const salty_remote_t *unused_remote = salty_event_loop_get_remote(loop);\n\n\n\n printf(\" INITIATOR: Creating client instance\\n\");\n\n salty_relayed_data_client_ret_t client_ret = salty_relayed_data_initiator_new(\n\n data->keypair,\n\n remote,\n\n data->interval_seconds,\n\n NULL,\n\n NULL\n\n );\n\n if (client_ret.success != OK) {\n\n printf(\" INITIATOR ERROR: Could not create client: %d\", client_ret.success);\n\n pthread_exit(NULL);\n\n }\n\n\n\n initiator_sender = client_ret.sender_tx;\n\n initiator_receiver = client_ret.receiver_rx;\n\n initiator_disconnect = client_ret.disconnect_tx;\n\n printf(\" INITIATOR: Notifying main thread that the channels are ready\\n\");\n\n sem_post(&initiator_channels_ready);\n\n\n\n printf(\" INITIATOR: Copying auth token to static variable\\n\");\n\n auth_token = malloc(32 * sizeof(uint8_t));\n\n if (auth_token == NULL) {\n\n printf(\" INITIATOR ERROR: Could not allocate memory for auth token\");\n\n pthread_exit(NULL);\n\n }\n\n const uint8_t *auth_token_ref = salty_relayed_data_client_auth_token(client_ret.client);\n\n memcpy(auth_token, auth_token_ref, 32 * sizeof(uint8_t));\n\n\n\n printf(\" INITIATOR: Notifying responder that the auth token is ready\\n\");\n\n sem_post(&auth_token_set);\n\n\n\n printf(\" INITIATOR: Initializing\\n\");\n\n salty_client_init_ret_t init_ret = salty_client_init(\n\n // Host, port\n\n \"localhost\",\n\n 8765,\n\n // Client\n\n client_ret.client,\n\n // Event loop\n\n loop,\n\n // Timeout seconds\n\n data->timeout_seconds,\n\n // CA certificate\n\n data->ca_cert,\n\n (uint32_t)data->ca_cert_len\n\n );\n\n if (init_ret.success != INIT_OK) {\n\n printf(\" INITIATOR ERROR: Could not initialize connection: %d\", init_ret.success);\n\n pthread_exit(NULL);\n\n }\n\n\n\n printf(\" INITIATOR: Connecting\\n\");\n\n salty_client_connect_success_t connect_success = salty_client_connect(\n\n // Handshake future\n\n init_ret.handshake_future,\n\n // Client\n\n client_ret.client,\n\n // Event loop\n\n loop,\n\n // Event channel, sending end\n\n init_ret.event_tx,\n\n // Sender channel, receiving end\n\n client_ret.sender_rx,\n\n // Disconnect channel, receiving end\n\n client_ret.disconnect_rx\n\n );\n\n\n\n drain_events(init_ret.event_rx, \"INITIATOR\");\n\n\n\n printf(\" INITIATOR: Connection ended with exit code %d\\n\", connect_success);\n\n salty_client_connect_success_t* connect_success_copy = malloc(sizeof(connect_success));\n\n if (connect_success_copy == NULL) {\n\n printf(\" INITIATOR ERROR: Could not malloc %ld bytes\\n\", sizeof(connect_success));\n\n pthread_exit(NULL);\n\n }\n\n memcpy(connect_success_copy, &connect_success, sizeof(connect_success));\n\n\n\n printf(\" INITIATOR: Freeing unused event loop remote handle\\n\");\n\n salty_event_loop_free_remote(unused_remote);\n\n\n\n printf(\" INITIATOR: Freeing client instance\\n\");\n\n salty_relayed_data_client_free(client_ret.client);\n\n\n\n printf(\" INITIATOR: Freeing channel instances\\n\");\n\n salty_channel_receiver_rx_free(client_ret.receiver_rx);\n\n salty_channel_sender_tx_free(client_ret.sender_tx);\n\n salty_channel_event_rx_free(init_ret.event_rx);\n\n\n\n printf(\" INITIATOR: Freeing event loop\\n\");\n\n salty_event_loop_free(loop);\n\n\n\n printf(\" THREAD: Stopping initiator thread\\n\");\n\n pthread_exit((void *)connect_success_copy);\n", "file_path": "ffi/tests/integration.c", "rank": 34, "score": 66955.31248760308 }, { "content": "void *connect_initiator(void *threadarg) {\n\n if (threadarg == NULL) { /* get rid of unused variable warning */ }\n\n\n\n printf(\" Reading DER formatted test CA certificate\\n\");\n\n\n\n // Open file\n\n const char *const ca_cert_name = \"saltyrtc.der\";\n\n FILE *fd = fopen(ca_cert_name, \"rb\");\n\n if (fd == NULL) {\n\n printf(\" ERROR: Could not open `%s`\\n\", ca_cert_name);\n\n pthread_exit((void *)1);\n\n }\n\n\n\n // Get file size\n\n if (fseek(fd, 0, SEEK_END) != 0) {\n\n printf(\" ERROR: Could not fseek `%s`\\n\", ca_cert_name);\n\n pthread_exit((void *)1);\n\n }\n\n long ca_cert_len = ftell(fd);\n\n if (ca_cert_len < 0) {\n\n printf(\" ERROR: Could not ftell `%s`\\n\", ca_cert_name);\n\n pthread_exit((void *)1);\n\n } else if (ca_cert_len >= (1L << 32)) {\n\n printf(\" ERROR: ca_cert_len is larger than 2**32\\n\");\n\n pthread_exit((void *)1);\n\n }\n\n if (fseek(fd, 0, SEEK_SET) != 0) {\n\n printf(\" ERROR: Could not fseek `%s`\\n\", ca_cert_name);\n\n pthread_exit((void *)1);\n\n }\n\n\n\n // Prepare buffer\n\n uint8_t *ca_cert = malloc((size_t)ca_cert_len);\n\n if (ca_cert == NULL) {\n\n printf(\" ERROR: Could not malloc %ld bytes\\n\", ca_cert_len);\n\n pthread_exit((void *)1);\n\n }\n\n size_t read_bytes = fread(ca_cert, (size_t)ca_cert_len, 1, fd);\n\n if (read_bytes != 1) {\n\n printf(\" ERROR: Could not read file\\n\");\n\n pthread_exit((void *)1);\n\n }\n\n if (fclose(fd) != 0) { printf(\"Warning: Closing ca cert file descriptor failed\"); }\n\n\n\n printf(\" Initializing console logger (level WARN)\\n\");\n\n if (!salty_log_init_console(LEVEL_WARN)) {\n\n pthread_exit((void *)1);\n\n }\n\n\n\n printf(\" Creating key pair\\n\");\n\n const salty_keypair_t *keypair = salty_keypair_new();\n\n\n\n printf(\" Creating event loop\\n\");\n\n const salty_event_loop_t *loop = salty_event_loop_new();\n\n\n\n printf(\" Getting event loop remote handle\\n\");\n\n const salty_remote_t *remote = salty_event_loop_get_remote(loop);\n\n\n\n printf(\" Creating client instance\\n\");\n\n salty_relayed_data_client_ret_t client_ret = salty_relayed_data_initiator_new(\n\n keypair,\n\n remote,\n\n 0, // interval seconds\n\n NULL,\n\n NULL\n\n );\n\n if (client_ret.success != OK) {\n\n printf(\" ERROR: Could not create client: %d\", client_ret.success);\n\n pthread_exit((void *)1);\n\n }\n\n\n\n printf(\" Initializing\\n\");\n\n salty_client_init_ret_t init_ret = salty_client_init(\n\n // Host, port\n\n \"localhost\",\n\n 8765,\n\n // Client\n\n client_ret.client,\n\n // Event loop\n\n loop,\n\n // Timeout seconds\n\n 5,\n\n // CA certificate\n\n ca_cert,\n\n (uint32_t)ca_cert_len\n\n );\n\n if (init_ret.success != INIT_OK) {\n\n printf(\" ERROR: Could not initialize connection: %d\", init_ret.success);\n\n pthread_exit((void *)1);\n\n }\n\n\n\n // Assign event_rx to static\n\n event_rx = init_ret.event_rx;\n\n disconnect_tx = client_ret.disconnect_tx;\n\n sem_post(&initialized);\n\n\n\n printf(\" Connecting...\\n\");\n\n salty_client_connect_success_t connect_success = salty_client_connect(\n\n // Handshake future\n\n init_ret.handshake_future,\n\n // Client\n\n client_ret.client,\n\n // Event loop\n\n loop,\n\n // Event channel, sending end\n\n init_ret.event_tx,\n\n // Sender channel, receiving end\n\n client_ret.sender_rx,\n\n // Disconnect channel, receiving end\n\n client_ret.disconnect_rx\n\n );\n\n\n\n printf(\" Connection ended with exit code %d\\n\", connect_success);\n\n\n\n printf(\" Freeing client instance\\n\");\n\n salty_relayed_data_client_free(client_ret.client);\n\n\n\n printf(\" Freeing channel instances\\n\");\n\n salty_channel_receiver_rx_free(client_ret.receiver_rx);\n\n salty_channel_sender_tx_free(client_ret.sender_tx);\n\n salty_channel_event_rx_free(init_ret.event_rx);\n\n\n\n printf(\" Freeing event loop\\n\");\n\n salty_event_loop_free(loop);\n\n\n\n printf(\" Freeing CA cert bytes\\n\");\n\n free(ca_cert);\n\n\n\n pthread_exit((void *)0);\n", "file_path": "ffi/tests/disconnect.c", "rank": 35, "score": 66955.31248760308 }, { "content": "static const salty_channel_receiver_rx_t *initiator_receiver = NULL;\n", "file_path": "ffi/tests/integration.c", "rank": 36, "score": 66947.77691112067 }, { "content": "static const salty_channel_disconnect_tx_t *initiator_disconnect = NULL;\n", "file_path": "ffi/tests/integration.c", "rank": 37, "score": 66947.77691112067 }, { "content": "static const salty_channel_sender_tx_t *initiator_sender = NULL;\n", "file_path": "ffi/tests/integration.c", "rank": 38, "score": 66947.77691112067 }, { "content": "bool salty_log_init_console(uint8_t level);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 39, "score": 65981.12666574852 }, { "content": "const uint8_t *salty_keypair_private_key(const salty_keypair_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 40, "score": 65980.78619451911 }, { "content": "const uint8_t *salty_keypair_public_key(const salty_keypair_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 41, "score": 65980.78619451911 }, { "content": "bool salty_log_init_callback(LogFunction callback,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 42, "score": 65980.49520487343 }, { "content": "salty_client_recv_event_ret_t salty_client_recv_event(const salty_channel_event_rx_t *event_rx,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 43, "score": 65518.43092213048 }, { "content": "salty_client_recv_msg_ret_t salty_client_recv_msg(const salty_channel_receiver_rx_t *receiver_rx,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 44, "score": 65518.35593067606 }, { "content": "static sem_t responder_channels_ready;\n", "file_path": "ffi/tests/integration.c", "rank": 45, "score": 64675.57355510744 }, { "content": "static sem_t initiator_channels_ready;\n", "file_path": "ffi/tests/integration.c", "rank": 46, "score": 64619.33639519514 }, { "content": "const salty_remote_t *salty_event_loop_get_remote(const salty_event_loop_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 47, "score": 64285.49141668977 }, { "content": "void salty_event_loop_free_remote(const salty_remote_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 48, "score": 64273.78710059435 }, { "content": "bool salty_log_change_level_console(uint8_t level);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 49, "score": 64225.91004071958 }, { "content": "void salty_channel_disconnect_tx_free(const salty_channel_disconnect_tx_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 50, "score": 64216.91631927949 }, { "content": "void salty_channel_event_tx_free(const salty_channel_event_tx_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 51, "score": 64216.91631927949 }, { "content": "void salty_channel_sender_tx_free(const salty_channel_sender_tx_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 52, "score": 64216.91631927949 }, { "content": "salty_relayed_data_client_ret_t salty_relayed_data_responder_new(const salty_keypair_t *keypair,\n\n const salty_remote_t *remote,\n\n uint32_t ping_interval_seconds,\n\n const uint8_t *initiator_pubkey,\n\n const uint8_t *auth_token,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 53, "score": 64216.62858990432 }, { "content": "void salty_channel_event_rx_free(const salty_channel_event_rx_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 54, "score": 64193.09207768845 }, { "content": "void salty_channel_disconnect_rx_free(const salty_channel_disconnect_rx_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 55, "score": 64193.09207768845 }, { "content": "void salty_channel_receiver_rx_free(const salty_channel_receiver_rx_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 56, "score": 64193.09207768845 }, { "content": "void salty_channel_sender_rx_free(const salty_channel_sender_rx_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 57, "score": 64193.09207768845 }, { "content": "salty_relayed_data_client_ret_t salty_relayed_data_initiator_new(const salty_keypair_t *keypair,\n\n const salty_remote_t *remote,\n\n uint32_t ping_interval_seconds,\n\n const uint8_t *trusted_responder_key,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 58, "score": 64167.8423515639 }, { "content": "void salty_relayed_data_client_free(const salty_client_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 59, "score": 63779.02258664978 }, { "content": "salty_client_encrypt_decrypt_ret_t salty_client_decrypt_with_session_keys(const salty_client_t *client,\n\n const uint8_t *data,\n\n size_t data_len,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 60, "score": 63778.80477014282 }, { "content": "salty_client_encrypt_decrypt_ret_t salty_client_encrypt_with_session_keys(const salty_client_t *client,\n\n const uint8_t *data,\n\n size_t data_len,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 61, "score": 63778.80477014282 }, { "content": "void salty_client_encrypt_decrypt_free(const uint8_t *data,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 62, "score": 63775.3474073709 }, { "content": "salty_client_send_success_t salty_client_send_application_bytes(const salty_channel_sender_tx_t *sender_tx,\n\n const uint8_t *msg,\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 63, "score": 63774.78667023897 }, { "content": "fn c_tests_run(bin: &str, logger: Option<&str>) {\n\n let (_guard, build_dir) = build_tests();\n\n\n\n // Event loop\n\n let mut core = Core::new().unwrap();\n\n\n\n // Timer\n\n let timer = Timer::default();\n\n\n\n // Create a command future\n\n let mut cmd = Command::new(bin);\n\n if let Some(l) = logger {\n\n cmd.arg(\"-l\").arg(l);\n\n }\n\n let c_tests = cmd\n\n .current_dir(&build_dir)\n\n .output_async(&core.handle());\n\n\n\n // Run command with timeout\n\n let timeout_seconds = 3;\n", "file_path": "ffi/tests/integration.rs", "rank": 64, "score": 63471.24426431905 }, { "content": "const uint8_t *salty_relayed_data_client_auth_token(const salty_client_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 65, "score": 62129.24029792171 }, { "content": "void salty_client_recv_msg_ret_free(salty_client_recv_msg_ret_t recv_ret);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 66, "score": 62128.53621471766 }, { "content": "void salty_client_recv_event_ret_free(salty_client_recv_event_ret_t recv_ret);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 67, "score": 62128.53621471766 }, { "content": "fn assert_output_success(output: Output) {\n\n if !output.status.success() {\n\n let stdout = String::from_utf8_lossy(&output.stdout);\n\n let stderr = String::from_utf8_lossy(&output.stderr);\n\n println!(\"Stdout:\\n{}\\nStderr:\\n{}\\n\", stdout, stderr);\n\n panic!(\"Running C tests failed with non-zero return code\");\n\n }\n\n}\n\n\n", "file_path": "ffi/tests/integration.rs", "rank": 68, "score": 61812.863528911024 }, { "content": "fn level_to_u8(level: Level) -> u8 {\n\n match level {\n\n Level::Trace => LEVEL_TRACE,\n\n Level::Debug => LEVEL_DEBUG,\n\n Level::Info => LEVEL_INFO,\n\n Level::Warn => LEVEL_WARN,\n\n Level::Error => LEVEL_ERROR,\n\n }\n\n}\n\n\n\npub type LogFunction = unsafe extern \"C\" fn(level: u8, target: *const c_char, message: *const c_char);\n\n\n", "file_path": "ffi/src/saltyrtc_client_ffi.rs", "rank": 69, "score": 56577.1834433491 }, { "content": "pub fn new<'a, T>(inner: &'a mut T) -> NonBlocking<'a, T> {\n\n NonBlocking { inner }\n\n}\n\n\n\nimpl<'a, T> Future for NonBlocking<'a, T> where T: Future {\n\n type Item = Option<T::Item>;\n\n type Error = T::Error;\n\n\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n\n match self.inner.poll()? {\n\n Async::Ready(v) => Ok(Async::Ready(Some(v))),\n\n Async::NotReady => Ok(Async::Ready(None)),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::time::Duration;\n\n\n", "file_path": "ffi/src/nonblocking.rs", "rank": 70, "score": 53628.086782400984 }, { "content": "enum LogConfig {\n\n Console(LevelFilter),\n\n Callback(LogFunction, LevelFilter),\n\n}\n\n\n", "file_path": "ffi/src/saltyrtc_client_ffi.rs", "rank": 71, "score": 53402.8784737876 }, { "content": "fn u8_to_levelfilter(level: u8) -> Option<LevelFilter> {\n\n Some(match level {\n\n LEVEL_TRACE => LevelFilter::Trace,\n\n LEVEL_DEBUG => LevelFilter::Debug,\n\n LEVEL_INFO => LevelFilter::Info,\n\n LEVEL_WARN => LevelFilter::Warn,\n\n LEVEL_ERROR => LevelFilter::Error,\n\n LEVEL_OFF => LevelFilter::Off,\n\n _ => return None\n\n })\n\n}\n\n\n", "file_path": "ffi/src/saltyrtc_client_ffi.rs", "rank": 72, "score": 52043.31916016601 }, { "content": "fn main() {\n\n\n\n // Set up CLI arguments\n\n let arg_srv_host = Arg::with_name(ARG_SRV_HOST)\n\n .short(\"h\")\n\n .takes_value(true)\n\n .value_name(\"SRV_HOST\")\n\n .required(true)\n\n .default_value(\"server.saltyrtc.org\")\n\n .help(\"The SaltyRTC server hostname\");\n\n let arg_srv_port = Arg::with_name(ARG_SRV_PORT)\n\n .short(\"p\")\n\n .takes_value(true)\n\n .value_name(\"SRV_PORT\")\n\n .required(true)\n\n .default_value(\"443\")\n\n .help(\"The SaltyRTC server port\");\n\n let arg_srv_pubkey = Arg::with_name(ARG_SRV_PUBKEY)\n\n .short(\"s\")\n\n .takes_value(true)\n", "file_path": "examples/testclient.rs", "rank": 73, "score": 49625.37379428631 }, { "content": " const salty_channel_event_tx_t *event_tx;\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 74, "score": 49312.96797912666 }, { "content": " const salty_channel_disconnect_tx_t *disconnect_tx;\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 75, "score": 49312.96797912666 }, { "content": " const salty_channel_sender_tx_t *sender_tx;\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 76, "score": 49312.96797912666 }, { "content": " const salty_channel_sender_rx_t *sender_rx;\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 77, "score": 49286.804440305685 }, { "content": " const salty_channel_disconnect_rx_t *disconnect_rx;\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 78, "score": 49286.804440305685 }, { "content": " const salty_channel_receiver_rx_t *receiver_rx;\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 79, "score": 49286.804440305685 }, { "content": " const salty_channel_event_rx_t *event_rx;\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 80, "score": 49286.804440305685 }, { "content": "const salty_event_loop_t *salty_event_loop_new(void);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 81, "score": 46386.17514410299 }, { "content": "void salty_event_loop_free(const salty_event_loop_t *ptr);\n", "file_path": "ffi/saltyrtc_task_relayed_data_ffi.h", "rank": 82, "score": 46376.35781714266 }, { "content": "int main() {\n\n printf(\"START C TEST\\n\");\n\n\n\n printf(\" START THREAD\\n\");\n\n pthread_t thread;\n\n pthread_create(&thread, NULL, connect_initiator, NULL);\n\n\n\n printf(\" WAITING FOR INIT\\n\");\n\n sem_wait(&initialized);\n\n\n\n printf(\" WAITING FOR SERVER HANDSHAKE COMPLETION\\n\");\n\n bool success = wait_for_server_handshake_completion(5000);\n\n if (!success) {\n\n printf(\" Waiting for server handshake completion failed!\");\n\n return EXIT_FAILURE;\n\n }\n\n printf(\" SERVER HANDSHAKE DONE\\n\");\n\n printf(\" DISCONNECT\\n\");\n\n salty_client_disconnect_success_t disconnect_success = salty_client_disconnect(disconnect_tx, 1001);\n\n if (disconnect_success != DISCONNECT_OK) {\n\n printf(\" Disconnect failed with code %d\\n\", disconnect_success);\n\n return EXIT_FAILURE;\n\n }\n\n\n\n printf(\" JOIN THREAD\\n\");\n\n int *result;\n\n pthread_join(thread, (void*)&result);\n\n if (result != 0) {\n\n printf(\" Thread failed\\n\");\n\n return EXIT_FAILURE;\n\n }\n\n\n\n printf(\"END C TEST\\n\");\n\n\n\n // Close stdout / stderr to please valgrind\n\n if (fclose(stdin) != 0) { printf(\"Warning: Closing stdin failed\"); }\n\n if (fclose(stdout) != 0) { printf(\"Warning: Closing stdout failed\"); }\n\n if (fclose(stderr) != 0) { printf(\"Warning: Closing stderr failed\"); }\n\n\n\n return EXIT_SUCCESS;\n", "file_path": "ffi/tests/disconnect.c", "rank": 83, "score": 45547.83262529388 }, { "content": "int main(int argc, char *argv[]) {\n\n // Parse arguments\n\n int opt;\n\n enum { LOGGER_CONSOLE, LOGGER_CALLBACK } logger = LOGGER_CONSOLE;\n\n while ((opt = getopt(argc, argv, \"l:\")) != -1) {\n\n switch (opt) {\n\n case 'l':\n\n if (strcmp(optarg, \"console\") == 0) {\n\n logger = LOGGER_CONSOLE;\n\n break;\n\n }\n\n if (strcmp(optarg, \"callback\") == 0) {\n\n logger = LOGGER_CALLBACK;\n\n break;\n\n }\n\n fprintf(stderr, \"Invalid logger mode: %s\\n\", optarg);\n\n return EXIT_FAILURE;\n\n default:\n\n fprintf(stderr, \"Usage: %s [-l LOGGER_MODE]\\n\\n\", argv[0]);\n\n fprintf(stderr, \"Note: The logger mode may be either 'console' or 'callback'.\\n\");\n\n fprintf(stderr, \" The default value is 'console'.\\n\");\n\n return EXIT_FAILURE;\n\n }\n\n }\n\n printf(\"Logger: %d\\n\", logger);\n\n\n\n printf(\"START C TEST\\n\");\n\n\n\n printf(\" Reading DER formatted test CA certificate\\n\");\n\n\n\n // Open file\n\n const char *const ca_cert_name = \"saltyrtc.der\";\n\n FILE *fd = fopen(ca_cert_name, \"rb\");\n\n if (fd == NULL) {\n\n printf(\" ERROR: Could not open `%s`\\n\", ca_cert_name);\n\n return EXIT_FAILURE;\n\n }\n\n\n\n // Get file size\n\n if (fseek(fd, 0, SEEK_END) != 0) {\n\n printf(\" ERROR: Could not fseek `%s`\\n\", ca_cert_name);\n\n return EXIT_FAILURE;\n\n }\n\n long ca_cert_len = ftell(fd);\n\n if (ca_cert_len < 0) {\n\n printf(\" ERROR: Could not ftell `%s`\\n\", ca_cert_name);\n\n return EXIT_FAILURE;\n\n } else if (ca_cert_len >= (1L << 32)) {\n\n printf(\" ERROR: ca_cert_len is larger than 2**32\\n\");\n\n return EXIT_FAILURE;\n\n }\n\n if (fseek(fd, 0, SEEK_SET) != 0) {\n\n printf(\" ERROR: Could not fseek `%s`\\n\", ca_cert_name);\n\n return EXIT_FAILURE;\n\n }\n\n\n\n // Prepare buffer\n\n uint8_t *ca_cert = malloc((size_t)ca_cert_len);\n\n if (ca_cert == NULL) {\n\n printf(\" ERROR: Could not malloc %ld bytes\\n\", ca_cert_len);\n\n return EXIT_FAILURE;\n\n }\n\n size_t read_bytes = fread(ca_cert, (size_t)ca_cert_len, 1, fd);\n\n if (read_bytes != 1) {\n\n printf(\" ERROR: Could not read file\\n\");\n\n return EXIT_FAILURE;\n\n }\n\n if (fclose(fd) != 0) printf(\"Warning: Closing ca cert file descriptor failed\");\n\n\n\n if (logger == LOGGER_CONSOLE) {\n\n printf(\" Initializing console logger (level DEBUG)\\n\");\n\n if (!salty_log_init_console(LEVEL_DEBUG)) {\n\n return EXIT_FAILURE;\n\n }\n\n printf(\" Updating logger (level WARN)\\n\");\n\n if (!salty_log_change_level_console(LEVEL_WARN)) {\n\n return EXIT_FAILURE;\n\n }\n\n } else if (logger == LOGGER_CALLBACK) {\n\n printf(\" Initializing callback logger (level DEBUG)\\n\");\n\n if (!salty_log_init_callback(log_callback, LEVEL_DEBUG)) {\n\n return EXIT_FAILURE;\n\n }\n\n }\n\n\n\n printf(\" Creating key pairs\\n\");\n\n const salty_keypair_t *i_keypair = salty_keypair_new();\n\n const salty_keypair_t *r_keypair = salty_keypair_new();\n\n const salty_keypair_t *unused_keypair = salty_keypair_new();\n\n\n\n printf(\" Restoring keypair from existing key\\n\");\n\n uint8_t *private_key_ptr = malloc(32);\n\n if (private_key_ptr == NULL) {\n\n printf(\" ERROR: Could not malloc 32 bytes\\n\");\n\n return EXIT_FAILURE;\n\n }\n\n memset(private_key_ptr, 42, 32);\n\n const salty_keypair_t *restored_keypair = salty_keypair_restore(private_key_ptr);\n\n\n\n printf(\" Extracting private key of existing keypair\\n\");\n\n const uint8_t *extracted_private_key = salty_keypair_private_key(restored_keypair);\n\n if (memcmp(private_key_ptr, extracted_private_key, 32) != 0) {\n\n printf(\" ERROR: Extracted private key does not match original private key\\n\");\n\n free(private_key_ptr);\n\n return EXIT_FAILURE;\n\n }\n\n free(private_key_ptr);\n\n\n\n printf(\" Copying public key from initiator\\n\");\n\n uint8_t *i_pubkey = malloc(32 * sizeof(uint8_t));\n\n if (i_pubkey == NULL) {\n\n printf(\" ERROR: Could not allocate memory for public key\");\n\n return EXIT_FAILURE;\n\n }\n\n const uint8_t *i_pubkey_ref = salty_keypair_public_key(i_keypair);\n\n memcpy(i_pubkey, i_pubkey_ref, 32 * sizeof(uint8_t));\n\n\n\n printf(\" Initiating semaphores\\n\");\n\n sem_init(&auth_token_set, 0, 0);\n\n sem_init(&initiator_channels_ready, 0, 0);\n\n sem_init(&responder_channels_ready, 0, 0);\n\n\n\n // Start initiator thread\n\n pthread_t i_thread;\n\n struct thread_data i_data = {\n\n .interval_seconds = 0,\n\n .timeout_seconds = 5,\n\n .keypair = i_keypair,\n\n .initiator_pubkey = NULL,\n\n .ca_cert = ca_cert,\n\n .ca_cert_len = ca_cert_len\n\n };\n\n pthread_create(&i_thread, NULL, connect_initiator, (void*)&i_data);\n\n\n\n // Start responder thread\n\n pthread_t r_thread;\n\n struct thread_data r_data = {\n\n .interval_seconds = 0,\n\n .timeout_seconds = 5,\n\n .keypair = r_keypair,\n\n .initiator_pubkey = i_pubkey,\n\n .ca_cert = ca_cert,\n\n .ca_cert_len = ca_cert_len\n\n };\n\n pthread_create(&r_thread, NULL, connect_responder, (void*)&r_data);\n\n\n\n // Waiting for connection event\n\n printf(\" Waiting for initiator tx channel...\\n\");\n\n sem_wait(&initiator_channels_ready);\n\n printf(\" Waiting for responder tx channel...\\n\");\n\n sem_wait(&responder_channels_ready);\n\n printf(\" Both outgoing channels are ready\\n\");\n\n\n\n // Send message\n\n printf(\" Sending message from initiator to responder\\n\");\n\n const uint8_t msg[] = { 0x93, 0x01, 0x02, 0x03 };\n\n if (SEND_OK != salty_client_send_task_bytes(initiator_sender, msg, 4)) {\n\n printf(\" ERROR: Sending message from initiator to responder failed\\n\");\n\n return EXIT_FAILURE;\n\n }\n\n\n\n // Receive message\n\n printf(\" Waiting for message to arrive...\\n\");\n\n uint32_t timeout_ms = 10000;\n\n const salty_client_recv_msg_ret_t recv_msg_ret = salty_client_recv_msg(responder_receiver, &timeout_ms);\n\n switch (recv_msg_ret.success) {\n\n case RECV_OK:\n\n printf(\" OK: Message (%lu bytes) from initiator arrived!\\n\", recv_msg_ret.msg->msg_bytes_len);\n\n if (recv_msg_ret.msg->msg_bytes_len != 4 ||\n\n recv_msg_ret.msg->msg_bytes[0] != 0x93 ||\n\n recv_msg_ret.msg->msg_bytes[1] != 0x01 ||\n\n recv_msg_ret.msg->msg_bytes[2] != 0x02 ||\n\n recv_msg_ret.msg->msg_bytes[3] != 0x03) {\n\n printf(\" ERROR: Invalid message received\\n\");\n\n return EXIT_FAILURE;\n\n } else {\n\n printf(\" OK: Message is valid!\\n\");\n\n }\n\n break;\n\n case RECV_NO_DATA:\n\n printf(\" ERROR: Waiting for message timed out!\\n\");\n\n return EXIT_FAILURE;\n\n case RECV_STREAM_ENDED:\n\n printf(\" ERROR: The incoming event stream has ended!\\n\");\n\n return EXIT_FAILURE;\n\n default:\n\n printf(\" ERROR: Error while waiting for incoming message\\n\");\n\n return EXIT_FAILURE;\n\n }\n\n printf(\" Freeing received event\\n\");\n\n salty_client_recv_msg_ret_free(recv_msg_ret);\n\n\n\n // Disconnect\n\n printf(\" Disconnecting initiator\\n\");\n\n salty_client_disconnect(initiator_disconnect, 1001);\n\n printf(\" Disconnecting responder\\n\");\n\n salty_client_disconnect(responder_disconnect, 1001);\n\n\n\n // Joining client threads\n\n printf(\" Waiting for client threads to terminate...\\n\");\n\n salty_client_connect_success_t *i_success;\n\n salty_client_connect_success_t *r_success;\n\n pthread_join(i_thread, (void*)&i_success);\n\n pthread_join(r_thread, (void*)&r_success);\n\n\n\n bool success = true;\n\n if (*i_success != CONNECT_OK) {\n\n printf(\"ERROR: Connecting initiator was not successful\\n\");\n\n success = false;\n\n } else {\n\n printf(\"OK: Connection initiator was successful\\n\");\n\n }\n\n free(i_success);\n\n if (*r_success != CONNECT_OK) {\n\n printf(\"ERROR: Connecting responder was not successful\\n\");\n\n success = false;\n\n } else {\n\n printf(\"OK: Connection responder was successful\\n\");\n\n }\n\n free(r_success);\n\n if (!success) {\n\n return EXIT_FAILURE;\n\n }\n\n\n\n printf(\"CLEANUP\\n\");\n\n\n\n printf(\" Freeing CA cert bytes\\n\");\n\n free(ca_cert);\n\n\n\n printf(\" Freeing public key copy\\n\");\n\n free(i_pubkey);\n\n\n\n printf(\" Freeing unused keypairs\\n\");\n\n salty_keypair_free(unused_keypair);\n\n salty_keypair_free(restored_keypair);\n\n\n\n printf(\" Destroying semaphores\\n\");\n\n sem_destroy(&auth_token_set);\n\n sem_destroy(&initiator_channels_ready);\n\n sem_destroy(&responder_channels_ready);\n\n\n\n printf(\"END C TEST\\n\");\n\n\n\n // Close stdout / stderr to please valgrind\n\n if (fclose(stdin) != 0) printf(\"Warning: Closing stdin failed\");\n\n if (fclose(stdout) != 0) printf(\"Warning: Closing stdout failed\");\n\n if (fclose(stderr) != 0) printf(\"Warning: Closing stderr failed\");\n\n\n\n return EXIT_SUCCESS;\n", "file_path": "ffi/tests/integration.c", "rank": 84, "score": 45547.83262529388 }, { "content": "struct thread_data {\n\n uint32_t interval_seconds;\n\n uint16_t timeout_seconds;\n\n const salty_keypair_t *keypair;\n\n const uint8_t *initiator_pubkey;\n\n const uint8_t *ca_cert;\n\n long ca_cert_len;\n", "file_path": "ffi/tests/integration.c", "rank": 85, "score": 43913.60530962124 }, { "content": "void drain_events(const salty_channel_event_rx_t *event_rx, char *role);\n", "file_path": "ffi/tests/integration.c", "rank": 86, "score": 43906.59913678013 }, { "content": " uint32_t interval_seconds;\n", "file_path": "ffi/tests/integration.c", "rank": 87, "score": 43906.59913678013 }, { "content": "static sem_t auth_token_set;\n", "file_path": "ffi/tests/integration.c", "rank": 88, "score": 42379.52969454451 }, { "content": "bool wait_for_server_handshake_completion(uint32_t timeout_ms);\n", "file_path": "ffi/tests/disconnect.c", "rank": 89, "score": 40955.11280731241 }, { "content": "/// Print the QR code payload to the terminal\n\nfn print_qrcode(payload: &[u8]) {\n\n let base64 = BASE64.encode(&payload);\n\n let qr = QrCode::encode_text(&base64, QrCodeEcc::Low).unwrap();\n\n let border = 1;\n\n for y in -border .. qr.size() + border {\n\n for x in -border .. qr.size() + border {\n\n let c: char = if qr.get_module(x, y) { '█' } else { ' ' };\n\n print!(\"{0}{0}\", c);\n\n }\n\n println!();\n\n }\n\n println!();\n\n}\n\n\n", "file_path": "examples/testclient.rs", "rank": 90, "score": 39041.966618046165 }, { "content": "extern crate cbindgen;\n\n\n\nuse std::env;\n\n\n", "file_path": "ffi/build.rs", "rank": 91, "score": 34304.71690351369 }, { "content": "\n\n // Set up client instance\n\n let client = Arc::new(RwLock::new({\n\n let builder = SaltyClient::build(keypair)\n\n .add_task(Box::new(task))\n\n .with_ping_interval(Some(ping_interval));\n\n match role {\n\n Role::Initiator => builder\n\n .initiator()\n\n .expect(\"Could not create SaltyClient instance\"),\n\n Role::Responder => {\n\n let auth_token_hex = args.value_of(ARG_AUTHTOKEN).expect(\"Auth token not supplied\").to_string();\n\n let auth_token = AuthToken::from_hex_str(&auth_token_hex).expect(\"Invalid auth token hex string\");\n\n let initiator_pubkey = public_key_from_hex_str(&path).unwrap();\n\n builder\n\n .responder(initiator_pubkey, auth_token)\n\n .expect(\"Could not create SaltyClient instance\")\n\n }\n\n }\n\n }));\n", "file_path": "examples/testclient.rs", "rank": 95, "score": 32.19749947519772 }, { "content": " Box::from_raw(rx_ptr as *mut salty_channel_receiver_rx_t);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_free_channels() {\n\n let keypair = salty_keypair_new();\n\n let event_loop = salty_event_loop_new();\n\n let remote = unsafe { salty_event_loop_get_remote(event_loop) };\n\n let client_ret = unsafe { salty_relayed_data_initiator_new(keypair, remote, 0, ptr::null(), ptr::null()) };\n\n unsafe {\n\n salty_channel_receiver_rx_free(client_ret.receiver_rx);\n\n salty_channel_sender_tx_free(client_ret.sender_tx);\n\n salty_channel_sender_rx_free(client_ret.sender_rx);\n\n }\n\n }\n\n\n\n /// Using zero bytes as trusted key should fail.\n\n #[test]\n\n fn test_initiator_trusted_key_validation() {\n", "file_path": "ffi/src/lib.rs", "rank": 96, "score": 31.667142297369175 }, { "content": " // Set up logging\n\n env::set_var(\"RUST_LOG\", \"saltyrtc_client=debug,saltyrtc_task_relayed_data=debug,testclient=trace\");\n\n env_logger::init();\n\n\n\n // Tokio reactor core\n\n let mut core = Core::new().unwrap();\n\n\n\n // Create TLS connector instance\n\n let tls_connector = TlsConnector::builder()\n\n .min_protocol_version(Some(Protocol::Tlsv11))\n\n .build()\n\n .unwrap_or_else(|e| panic!(\"Could not initialize TlsConnector: {}\", e));\n\n\n\n // Create new public permanent keypair\n\n let keypair = KeyPair::new();\n\n let pubkey = keypair.public_key().clone();\n\n\n\n // Determine websocket path\n\n let path: String = match role {\n\n Role::Initiator => keypair.public_key_hex(),\n", "file_path": "examples/testclient.rs", "rank": 97, "score": 31.246124564887424 }, { "content": "#[macro_use] extern crate failure;\n\n#[macro_use] extern crate log;\n\nextern crate saltyrtc_client;\n\nextern crate tokio_core;\n\n\n\nuse std::borrow::Cow;\n\nuse std::collections::HashMap;\n\nuse std::mem;\n\n\n\nuse saltyrtc_client::{CloseCode, BoxedFuture};\n\nuse saltyrtc_client::dep::futures::future;\n\nuse saltyrtc_client::dep::futures::{Stream, Sink, Future};\n\nuse saltyrtc_client::dep::futures::sync::mpsc::{self, UnboundedSender, UnboundedReceiver};\n\nuse saltyrtc_client::dep::futures::sync::oneshot::Sender as OneshotSender;\n\nuse saltyrtc_client::dep::rmpv::Value;\n\nuse saltyrtc_client::errors::Error;\n\nuse saltyrtc_client::tasks::{Task, TaskMessage};\n\nuse tokio_core::reactor::Remote;\n\n\n\nmod errors;\n", "file_path": "src/lib.rs", "rank": 98, "score": 29.49416208562604 } ]
Rust
src/test/instruction_tests/instr_vdbpsadbw.rs
ftilde/rust-x86asm
f6584b8cfe8e75d978bf7b83a67c69444fd3f161
use instruction_def::*; use test::run_test; use Operand::*; use Reg::*; use RegScale::*; use RegType::*; use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; #[test] fn vdbpsadbw_1() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM7)), operand3: Some(Direct(XMM2)), operand4: Some(Literal8(93)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None, }, &[98, 243, 69, 142, 66, 202, 93], OperandSize::Dword, ) } #[test] fn vdbpsadbw_2() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM3)), operand3: Some(IndirectScaledDisplaced( EDI, Four, 1875484248, Some(OperandSize::Xmmword), None, )), operand4: Some(Literal8(97)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None, }, &[98, 243, 101, 140, 66, 4, 189, 88, 158, 201, 111, 97], OperandSize::Dword, ) } #[test] fn vdbpsadbw_3() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(XMM13)), operand2: Some(Direct(XMM21)), operand3: Some(Direct(XMM21)), operand4: Some(Literal8(109)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None, }, &[98, 51, 85, 132, 66, 237, 109], OperandSize::Qword, ) } #[test] fn vdbpsadbw_4() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(XMM22)), operand2: Some(Direct(XMM6)), operand3: Some(IndirectScaledDisplaced( RCX, Four, 341913354, Some(OperandSize::Xmmword), None, )), operand4: Some(Literal8(3)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None, }, &[98, 227, 77, 139, 66, 52, 141, 10, 47, 97, 20, 3], OperandSize::Qword, ) } #[test] fn vdbpsadbw_5() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(YMM7)), operand2: Some(Direct(YMM0)), operand3: Some(Direct(YMM0)), operand4: Some(Literal8(89)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None, }, &[98, 243, 125, 175, 66, 248, 89], OperandSize::Dword, ) } #[test] fn vdbpsadbw_6() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM0)), operand3: Some(IndirectScaledDisplaced( ESI, Four, 1441455171, Some(OperandSize::Ymmword), None, )), operand4: Some(Literal8(82)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None, }, &[98, 243, 125, 170, 66, 52, 181, 67, 220, 234, 85, 82], OperandSize::Dword, ) } #[test] fn vdbpsadbw_7() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(YMM13)), operand2: Some(Direct(YMM29)), operand3: Some(Direct(YMM22)), operand4: Some(Literal8(51)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None, }, &[98, 51, 21, 162, 66, 238, 51], OperandSize::Qword, ) } #[test] fn vdbpsadbw_8() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(YMM28)), operand2: Some(Direct(YMM11)), operand3: Some(IndirectScaledIndexed( RDX, RDX, Eight, Some(OperandSize::Ymmword), None, )), operand4: Some(Literal8(93)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None, }, &[98, 99, 37, 173, 66, 36, 210, 93], OperandSize::Qword, ) } #[test] fn vdbpsadbw_9() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(ZMM1)), operand2: Some(Direct(ZMM1)), operand3: Some(Direct(ZMM4)), operand4: Some(Literal8(36)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None, }, &[98, 243, 117, 203, 66, 204, 36], OperandSize::Dword, ) } #[test] fn vdbpsadbw_10() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(ZMM2)), operand2: Some(Direct(ZMM0)), operand3: Some(IndirectScaledIndexed( ESI, EAX, Eight, Some(OperandSize::Zmmword), None, )), operand4: Some(Literal8(73)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None, }, &[98, 243, 125, 207, 66, 20, 198, 73], OperandSize::Dword, ) } #[test] fn vdbpsadbw_11() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(ZMM26)), operand2: Some(Direct(ZMM7)), operand3: Some(Direct(ZMM27)), operand4: Some(Literal8(123)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None, }, &[98, 3, 69, 203, 66, 211, 123], OperandSize::Qword, ) } #[test] fn vdbpsadbw_12() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(ZMM12)), operand2: Some(Direct(ZMM25)), operand3: Some(IndirectScaledIndexedDisplaced( RDX, RBX, Eight, 938432062, Some(OperandSize::Zmmword), None, )), operand4: Some(Literal8(83)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None, }, &[98, 115, 53, 198, 66, 164, 218, 62, 86, 239, 55, 83], OperandSize::Qword, ) }
use instruction_def::*; use test::run_test; use Operand::*; use Reg::*; use RegScale::*; use RegType::*; use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; #[test] fn vdbpsadbw_1() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM7)), operand3: So
mask: Some(MaskReg::K7), broadcast: None, }, &[98, 243, 125, 207, 66, 20, 198, 73], OperandSize::Dword, ) } #[test] fn vdbpsadbw_11() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(ZMM26)), operand2: Some(Direct(ZMM7)), operand3: Some(Direct(ZMM27)), operand4: Some(Literal8(123)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None, }, &[98, 3, 69, 203, 66, 211, 123], OperandSize::Qword, ) } #[test] fn vdbpsadbw_12() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(ZMM12)), operand2: Some(Direct(ZMM25)), operand3: Some(IndirectScaledIndexedDisplaced( RDX, RBX, Eight, 938432062, Some(OperandSize::Zmmword), None, )), operand4: Some(Literal8(83)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None, }, &[98, 115, 53, 198, 66, 164, 218, 62, 86, 239, 55, 83], OperandSize::Qword, ) }
me(Direct(XMM2)), operand4: Some(Literal8(93)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None, }, &[98, 243, 69, 142, 66, 202, 93], OperandSize::Dword, ) } #[test] fn vdbpsadbw_2() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM3)), operand3: Some(IndirectScaledDisplaced( EDI, Four, 1875484248, Some(OperandSize::Xmmword), None, )), operand4: Some(Literal8(97)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None, }, &[98, 243, 101, 140, 66, 4, 189, 88, 158, 201, 111, 97], OperandSize::Dword, ) } #[test] fn vdbpsadbw_3() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(XMM13)), operand2: Some(Direct(XMM21)), operand3: Some(Direct(XMM21)), operand4: Some(Literal8(109)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None, }, &[98, 51, 85, 132, 66, 237, 109], OperandSize::Qword, ) } #[test] fn vdbpsadbw_4() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(XMM22)), operand2: Some(Direct(XMM6)), operand3: Some(IndirectScaledDisplaced( RCX, Four, 341913354, Some(OperandSize::Xmmword), None, )), operand4: Some(Literal8(3)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None, }, &[98, 227, 77, 139, 66, 52, 141, 10, 47, 97, 20, 3], OperandSize::Qword, ) } #[test] fn vdbpsadbw_5() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(YMM7)), operand2: Some(Direct(YMM0)), operand3: Some(Direct(YMM0)), operand4: Some(Literal8(89)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None, }, &[98, 243, 125, 175, 66, 248, 89], OperandSize::Dword, ) } #[test] fn vdbpsadbw_6() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM0)), operand3: Some(IndirectScaledDisplaced( ESI, Four, 1441455171, Some(OperandSize::Ymmword), None, )), operand4: Some(Literal8(82)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None, }, &[98, 243, 125, 170, 66, 52, 181, 67, 220, 234, 85, 82], OperandSize::Dword, ) } #[test] fn vdbpsadbw_7() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(YMM13)), operand2: Some(Direct(YMM29)), operand3: Some(Direct(YMM22)), operand4: Some(Literal8(51)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None, }, &[98, 51, 21, 162, 66, 238, 51], OperandSize::Qword, ) } #[test] fn vdbpsadbw_8() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(YMM28)), operand2: Some(Direct(YMM11)), operand3: Some(IndirectScaledIndexed( RDX, RDX, Eight, Some(OperandSize::Ymmword), None, )), operand4: Some(Literal8(93)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None, }, &[98, 99, 37, 173, 66, 36, 210, 93], OperandSize::Qword, ) } #[test] fn vdbpsadbw_9() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(ZMM1)), operand2: Some(Direct(ZMM1)), operand3: Some(Direct(ZMM4)), operand4: Some(Literal8(36)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None, }, &[98, 243, 117, 203, 66, 204, 36], OperandSize::Dword, ) } #[test] fn vdbpsadbw_10() { run_test( &Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(ZMM2)), operand2: Some(Direct(ZMM0)), operand3: Some(IndirectScaledIndexed( ESI, EAX, Eight, Some(OperandSize::Zmmword), None, )), operand4: Some(Literal8(73)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false,
random
[ { "content": "fn encode32_helper2(mnemonic: Mnemonic, operand1: Operand, operand2: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(operand1),\n\n operand2: Some(operand2),\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode32_helper(&instr, expected);\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 0, "score": 606330.1135968915 }, { "content": "fn encode16_helper2(mnemonic: Mnemonic, operand1: Operand, operand2: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(operand1),\n\n operand2: Some(operand2),\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode16_helper(&instr, expected);\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 1, "score": 606330.1135968915 }, { "content": "fn encode64_helper2(mnemonic: Mnemonic, operand1: Operand, operand2: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(operand1),\n\n operand2: Some(operand2),\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode64_helper(&instr, expected);\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 2, "score": 606330.1135968915 }, { "content": "fn random_reg_of_size(size: OperandSize) -> Reg {\n\n match size {\n\n OperandSize::Byte => random_reg_8(),\n\n OperandSize::Word => random_reg_16(),\n\n OperandSize::Dword => random_reg_32(),\n\n OperandSize::Qword => random_reg_64(),\n\n _ => panic!(\"Invalid general register size: {:?}.\", size)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 3, "score": 405074.69505944534 }, { "content": "fn random_reg_of_size_no_stack(size: OperandSize) -> Reg {\n\n match size {\n\n OperandSize::Byte => random_reg_8(),\n\n OperandSize::Word => random_reg_16_no_stack(),\n\n OperandSize::Dword => random_reg_32_no_stack(),\n\n OperandSize::Qword => random_reg_64_no_stack(),\n\n _ => panic!(\"Invalid general register size: {:?}.\", size)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 4, "score": 400092.43049772026 }, { "content": "#[allow(dead_code)]\n\nfn encode32_helper1(mnemonic: Mnemonic, op1: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(op1),\n\n operand2: None,\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode32_helper(&instr, expected);\n\n}\n", "file_path": "src/test/mod.rs", "rank": 5, "score": 383637.69327454804 }, { "content": "#[test]\n\nfn operand_type_mask_reg() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x28, 0xC2, 0xDB, 0x05],\n\n Mode::Long,\n\n &Instruction::new4(\n\n Mnemonic::VCMPPD,\n\n Operand::Direct(Reg::K3),\n\n Operand::Direct(Reg::YMM2),\n\n Operand::Direct(Reg::YMM3),\n\n Operand::Literal8(5),\n\n ),\n\n ); // VCMPPD K3, YMM2, YMM3, 5\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 6, "score": 379026.03268035897 }, { "content": "fn random_imm(size: OperandSize) -> Operand {\n\n let mut gen = rand::thread_rng();\n\n match size {\n\n OperandSize::Byte => Operand::Literal8(gen.gen_range::<u8>(0, 128)),\n\n OperandSize::Word => Operand::Literal16(\n\n gen.gen_range::<u16>(u8::max_value() as u16 + 1, i16::max_value() as u16)),\n\n OperandSize::Dword => Operand::Literal32(\n\n gen.gen_range::<u32>(u16::max_value() as u32 + 1, i32::max_value() as u32)),\n\n OperandSize::Qword => Operand::Literal64(\n\n gen.gen_range::<u64>(u32::max_value() as u64 + 1, i64::max_value() as u64)),\n\n OperandSize::Far16 => Operand::MemoryAndSegment16(rand::random(), rand::random()),\n\n OperandSize::Far32 => Operand::MemoryAndSegment32(rand::random(), rand::random()),\n\n _ => panic!(\"Invalid immediate value size: {:?}.\", size)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 7, "score": 364239.84979508806 }, { "content": "fn random_reg(reg_type: RegType, size: OperandSize, addr_size: OperandSize, \n\n def: &InstructionDefinition) -> Reg {\n\n match reg_type {\n\n RegType::General => {\n\n match size {\n\n OperandSize::Byte => random_reg_8(),\n\n OperandSize::Word => random_reg_16(),\n\n OperandSize::Dword => random_reg_32(),\n\n OperandSize::Qword => random_reg_64(),\n\n OperandSize::Unsized => random_reg_of_size(addr_size),\n\n _ => panic!(\"Invalid general register size: {:?}.\", size)\n\n }\n\n },\n\n RegType::Avx => {\n\n let allow_all = if let Some(CompositePrefix::Evex {..}) = def.composite_prefix {\n\n addr_size == OperandSize::Qword } else { false };\n\n match size {\n\n OperandSize::Xmmword => random_xmm_reg(allow_all),\n\n OperandSize::Ymmword => random_ymm_reg(allow_all),\n\n OperandSize::Zmmword => random_zmm_reg(allow_all),\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 8, "score": 362089.76032153267 }, { "content": "fn random_fixed(fixed_op: FixedOperand) -> Operand {\n\n match fixed_op {\n\n FixedOperand::Reg(reg) => Operand::Direct(reg),\n\n FixedOperand::Constant(c) => Operand::Literal8(c as u8)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 9, "score": 360428.80400271865 }, { "content": "fn run_test(instr: &Instruction, expected: &[u8], addr_size: OperandSize) {\n\n let mut buffer = Cursor::new(Vec::new());\n\n instr\n\n .encode(&mut buffer, Mode::from_size(addr_size).unwrap())\n\n .expect(\"Encoding failed\");\n\n if &buffer.get_ref()[..] != expected {\n\n println!(\"Test failed.\");\n\n print!(\"Output: [\");\n\n output_hex_array(buffer.get_ref());\n\n println!(\"]\");\n\n print!(\"Expected: [\");\n\n output_hex_array(expected);\n\n println!(\"]\");\n\n panic!(\n\n \"Failure. Mode: {:?}.\\nInstruction: {:?}.\\n\",\n\n addr_size, instr\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 10, "score": 358425.5942229312 }, { "content": "fn random_mib(size: OperandSize, addr_size: OperandSize) -> Operand {\n\n Operand::IndirectScaledIndexed(\n\n random_reg_of_size_no_stack(addr_size),\n\n random_reg_of_size_no_stack(addr_size),\n\n RegScale::One,\n\n Some(size), None)\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 11, "score": 353851.0840731592 }, { "content": "fn random_mem(size: OperandSize, addr_size: OperandSize) -> Operand {\n\n if addr_size != OperandSize::Word {\n\n match rand::random::<u32>() % 5 { // Select addressing mode\n\n 0 => { // Indirect - [EAX]\n\n Operand::Indirect(\n\n random_reg_of_size_no_stack(addr_size),\n\n Some(size), None)\n\n },\n\n 1 => { // Indirect Displaced - [EAX+5]\n\n Operand::IndirectDisplaced(\n\n random_reg_of_size_no_stack(addr_size),\n\n (rand::random::<u32>() as u64) & 0x7FFFFFFF,\n\n Some(size), None)\n\n },\n\n 2 => { // Indirect Scaled Indexed - [EAX+EBX*2]\n\n Operand::IndirectScaledIndexed(\n\n random_reg_of_size_no_stack(addr_size),\n\n random_reg_of_size_no_stack(addr_size),\n\n random_reg_scale(),\n\n Some(size), None)\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 12, "score": 353851.0840731592 }, { "content": "fn write_operand<W: Write>(op: &Operand, instr_def: &InstructionDefinition, f: &mut W)\n\n -> io::Result<()> { \n\n match *op {\n\n Operand::Direct(reg) => write!(f, \"{}\", reg),\n\n Operand::Indirect(reg, size, seg) => \n\n write_indirect(f, Some(reg), None, None, None, size, seg, instr_def),\n\n Operand::IndirectDisplaced(reg, dsp, size, seg) =>\n\n write_indirect(f, Some(reg), None, None, Some(dsp), size, seg, instr_def),\n\n Operand::IndirectScaledIndexed(base, index, scale, size, seg) => \n\n write_indirect(f, Some(base), Some(index), Some(scale), None, size, seg, instr_def),\n\n Operand::IndirectScaledIndexedDisplaced(base, index, scale, dsp, size, seg) =>\n\n write_indirect(f, Some(base), Some(index), Some(scale), Some(dsp), size, seg,\n\n instr_def),\n\n Operand::IndirectScaledDisplaced(reg, scale, dsp, size, seg) =>\n\n write_indirect(f, Some(reg), None, Some(scale), Some(dsp), size, seg, instr_def),\n\n Operand::Memory(addr, size, seg) |\n\n Operand::Offset(addr, size, seg) => size_seg_helper(f, size, seg, |fmt| write!(fmt, \"[{}]\", addr)), // TODO Is this correct?\n\n Operand::Literal8(v) => write!(f, \"0x{:X}\", v),\n\n Operand::Literal16(v) => write!(f, \"0x{:X}\", v),\n\n Operand::Literal32(v) => write!(f, \"0x{:X}\", v),\n\n Operand::Literal64(v) => write!(f, \"0x{:X}\", v),\n\n Operand::MemoryAndSegment16(seg, addr) => write!(f, \"0x{:X}:0x{:X}\", seg, addr),\n\n Operand::MemoryAndSegment32(seg, addr) => write!(f, \"0x{:X}:0x{:X}\", seg, addr),\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 13, "score": 348761.8677862118 }, { "content": "fn random_ymm_reg(use_all: bool) -> Reg { \n\n if use_all { random_of(&[\n\n Reg::YMM0, Reg::YMM1, Reg::YMM2, Reg::YMM3, Reg::YMM4, Reg::YMM5, Reg::YMM6, Reg::YMM7,\n\n Reg::YMM8, Reg::YMM9, Reg::YMM10, Reg::YMM11, Reg::YMM12, Reg::YMM13, Reg::YMM14, Reg::YMM15,\n\n Reg::YMM16, Reg::YMM17, Reg::YMM18, Reg::YMM19, Reg::YMM20, Reg::YMM21, Reg::YMM22, Reg::YMM23,\n\n Reg::YMM24, Reg::YMM25, Reg::YMM26, Reg::YMM27, Reg::YMM28, Reg::YMM29, Reg::YMM30, Reg::YMM31\n\n ]) } else { random_of(&[\n\n Reg::YMM0, Reg::YMM1, Reg::YMM2, Reg::YMM3, Reg::YMM4, Reg::YMM5, Reg::YMM6, Reg::YMM7,\n\n ]) }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 14, "score": 348038.44714981 }, { "content": "fn random_zmm_reg(use_all: bool) -> Reg { \n\n if use_all { random_of(&[\n\n Reg::ZMM0, Reg::ZMM1, Reg::ZMM2, Reg::ZMM3, Reg::ZMM4, Reg::ZMM5, Reg::ZMM6, Reg::ZMM7,\n\n Reg::ZMM8, Reg::ZMM9, Reg::ZMM10, Reg::ZMM11, Reg::ZMM12, Reg::ZMM13, Reg::ZMM14, Reg::ZMM15,\n\n Reg::ZMM16, Reg::ZMM17, Reg::ZMM18, Reg::ZMM19, Reg::ZMM20, Reg::ZMM21, Reg::ZMM22, Reg::ZMM23,\n\n Reg::ZMM24, Reg::ZMM25, Reg::ZMM26, Reg::ZMM27, Reg::ZMM28, Reg::ZMM29, Reg::ZMM30, Reg::ZMM31\n\n ]) } else { random_of(&[\n\n Reg::ZMM0, Reg::ZMM1, Reg::ZMM2, Reg::ZMM3, Reg::ZMM4, Reg::ZMM5, Reg::ZMM6, Reg::ZMM7,\n\n ]) }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 15, "score": 348038.44714981 }, { "content": "fn random_xmm_reg(use_all: bool) -> Reg { \n\n if use_all { random_of(&[\n\n Reg::XMM0, Reg::XMM1, Reg::XMM2, Reg::XMM3, Reg::XMM4, Reg::XMM5, Reg::XMM6, Reg::XMM7,\n\n Reg::XMM8, Reg::XMM9, Reg::XMM10, Reg::XMM11, Reg::XMM12, Reg::XMM13, Reg::XMM14, Reg::XMM15,\n\n Reg::XMM16, Reg::XMM17, Reg::XMM18, Reg::XMM19, Reg::XMM20, Reg::XMM21, Reg::XMM22, Reg::XMM23,\n\n Reg::XMM24, Reg::XMM25, Reg::XMM26, Reg::XMM27, Reg::XMM28, Reg::XMM29, Reg::XMM30, Reg::XMM31\n\n ]) } else { random_of(&[\n\n Reg::XMM0, Reg::XMM1, Reg::XMM2, Reg::XMM3, Reg::XMM4, Reg::XMM5, Reg::XMM6, Reg::XMM7,\n\n ]) }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 16, "score": 348038.44714981 }, { "content": "fn make_operand_combinations(instr: &InstructionDefinition) -> Vec<Vec<OperandDefinition>> {\n\n let set_parts = instr.operands.iter().by_ref().filter_map(\n\n |maybe_op| maybe_op.as_ref().and_then(\n\n |op| if let OperandType::Set(ref items) = op.op_type {\n\n if instr.mnemonic.find(\"CVT\").is_none() {\n\n Some(items.clone())\n\n } else {\n\n Some(items.iter().filter(|i| if let OperandType::Bcst(_) = **i { false }\n\n else { true }).cloned().collect())\n\n }\n\n } else { None }\n\n )\n\n ).next();\n\n if let Some(parts) = set_parts { \n\n parts.iter().map(|part| instr.operands.iter().filter_map(\n\n |maybe_op| maybe_op.as_ref().map(|op| if let OperandType::Set(_) = op.op_type {\n\n OperandDefinition {\n\n encoding: op.encoding,\n\n access: op.access,\n\n size: op.size,\n\n op_type: part.clone()\n\n }\n\n } else { op.clone() }\n\n )).collect()).collect()\n\n } else {\n\n vec![instr.operands.iter().filter_map(|x| x.as_ref()).cloned().collect()]\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 17, "score": 328989.55420121795 }, { "content": "#[test]\n\nfn operand_type_a() {\n\n decode_helper(\n\n &vec![0x66, 0x62, 0x00],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BOUND,\n\n Operand::Direct(Reg::AX),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Unsized), None),\n\n ),\n\n ); // BOUND AX, [EAX]\n\n decode_helper(\n\n &vec![0x62, 0x00],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BOUND,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Unsized), None),\n\n ),\n\n ); // BOUND EAX, [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 18, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_q() {\n\n decode_helper(\n\n &vec![0xFF, 0x20],\n\n Mode::Long,\n\n &Instruction::new1(\n\n Mnemonic::JMP,\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // JMP QWORD PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 19, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_v() {\n\n decode_helper(\n\n &vec![0x40],\n\n Mode::Real,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::AX)),\n\n ); // INC AX\n\n decode_helper(\n\n &vec![0x66, 0x40],\n\n Mode::Real,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::EAX)),\n\n ); // INC EAX\n\n decode_helper(\n\n &vec![0x66, 0x40],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::AX)),\n\n ); // INC AX\n\n decode_helper(\n\n &vec![0x40],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::EAX)),\n\n ); // INC EAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 20, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_d() {\n\n decode_helper(\n\n &vec![0x0F, 0x6E, 0xD0],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::MOVD,\n\n Operand::Direct(Reg::MM2),\n\n Operand::Direct(Reg::EAX),\n\n ),\n\n ); // MOVD MM2, EAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 21, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_b() {\n\n decode_helper(\n\n &vec![0xC4, 0xE3, 0x79, 0x32, 0xCA, 0x05],\n\n Mode::Protected,\n\n &Instruction::new3(\n\n Mnemonic::KSHIFTLB,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n Operand::Literal8(5),\n\n ),\n\n ); // KSHIFTLB K1, K2, 5\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 22, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_w() {\n\n decode_helper(\n\n &vec![0xC8, 0x05, 0x00, 0x06],\n\n Mode::Real,\n\n &Instruction::new2(\n\n Mnemonic::ENTER,\n\n Operand::Literal16(0x5),\n\n Operand::Literal8(0x06),\n\n ),\n\n ); // ENTER 5, 6\n\n decode_helper(\n\n &vec![0xC8, 0x05, 0x00, 0x06],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::ENTER,\n\n Operand::Literal16(0x5),\n\n Operand::Literal8(0x06),\n\n ),\n\n ); // ENTER 5, 6\n\n decode_helper(\n\n &vec![0xC8, 0x05, 0x00, 0x06],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ENTER,\n\n Operand::Literal16(0x5),\n\n Operand::Literal8(0x06),\n\n ),\n\n ); // ENTER 5, 6\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 23, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_p() {\n\n decode_helper(\n\n &vec![0x9A, 0xAB, 0x89, 0x67, 0x45, 0x23, 0x01],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::MemoryAndSegment32(0x0123, 0x456789AB),\n\n ),\n\n ); // CALL 0x0123:0x456789AB\n\n decode_helper(\n\n &vec![0x66, 0x9A, 0x67, 0x45, 0x23, 0x01],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::CALL, Operand::MemoryAndSegment16(0x0123, 0x4567)),\n\n ); // CALL 0x0123:0x4567\n\n decode_helper(\n\n &vec![0x9A, 0x67, 0x45, 0x23, 0x01],\n\n Mode::Real,\n\n &Instruction::new1(Mnemonic::CALL, Operand::MemoryAndSegment16(0x0123, 0x4567)),\n\n ); // CALL 0x0123:0x4567\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 24, "score": 322405.83004577464 }, { "content": "fn build_test_operand(instr: &mut Instruction, instr_def: &InstructionDefinition,\n\n def: &OperandDefinition, addr_size: OperandSize) -> Operand {\n\n match def.op_type {\n\n OperandType::Reg(reg_type) =>\n\n Operand::Direct(random_reg(reg_type, def.size, addr_size, instr_def)),\n\n OperandType::Mem(size) => random_mem(size.unwrap_or(def.size), addr_size),\n\n OperandType::Imm => random_imm(def.size),\n\n OperandType::Offset => Operand::Offset(rand_value_of_size(def.size), Some(def.size), None),\n\n OperandType::Rel(op_size) => random_imm(op_size), // TODO Is this correct?\n\n OperandType::Mib => random_mib(def.size, addr_size),\n\n OperandType::Bcst(bcst_size) => random_mem(bcst_size, addr_size),\n\n OperandType::Fixed(fixed_op) => random_fixed(fixed_op),\n\n OperandType::Constant => unimplemented!(), // TODO What is this?\n\n _ => unreachable!() // Set(_) should be split apart already\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 25, "score": 320714.5979397588 }, { "content": "fn make_rm(size: OperandSize, reg_type: RegType) -> InstructionToken {\n\n let vec = vec![InstructionToken::Reg(reg_type, size), InstructionToken::Mem(size)];\n\n InstructionToken::Set(vec)\n\n}\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 26, "score": 318056.8734751269 }, { "content": "// Test decoding of the operand size prefix.\n\nfn operand_size_prefix() {\n\n decode_helper(\n\n &vec![0xC5, 0xE9, 0x58, 0x08],\n\n Mode::Protected,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // VADDPD XMM1, XMM2, [EAX]\n\n}\n\n\n\n#[test]\n", "file_path": "src/test/decode.rs", "rank": 27, "score": 317057.37213678606 }, { "content": "#[test]\n\nfn operand_type_vs() {\n\n decode_helper(\n\n &vec![0x66, 0x68, 0x34, 0x12],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Literal16(0x1234)),\n\n ); // PUSH 0x1234\n\n decode_helper(\n\n &vec![0x68, 0x78, 0x56, 0x34, 0x12],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Literal32(0x12345678)),\n\n ); // PUSH 0x12345678\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 28, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_vqp() {\n\n decode_helper(\n\n &vec![0x66, 0x01, 0xC3],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::BX),\n\n Operand::Direct(Reg::AX),\n\n ),\n\n ); // ADD BX, AX\n\n decode_helper(\n\n &vec![0x01, 0xC3],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::EBX),\n\n Operand::Direct(Reg::EAX),\n\n ),\n\n ); // ADD EBX, EAX\n\n decode_helper(\n\n &vec![0x48, 0x01, 0xC3],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::RBX),\n\n Operand::Direct(Reg::RAX),\n\n ),\n\n ); // ADD RBX, RAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 29, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_ptp() {\n\n decode_helper(\n\n &vec![0xFF, 0x10],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // CALL DWORD PTR [EAX]\n\n decode_helper(\n\n &vec![0xFF, 0x18],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Fword), None),\n\n ),\n\n ); // CALL FWORD PTR [EAX]\n\n\n\n // TODO I'm not 100% sure this is correct. It seems to be from the Intel docs, but GCC won't\n\n // seem to accept this form?\n\n decode_helper(\n\n &vec![0x48, 0xFF, 0x18],\n\n Mode::Long,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Tbyte), None),\n\n ),\n\n ); // CALL TBYTE PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 30, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_ps() {\n\n decode_helper(\n\n &vec![0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDPS XMM1, XMM2\n\n decode_helper(\n\n &vec![0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // ADDPS XMM1, [EAX]1\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 31, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_ds() {\n\n decode_helper(\n\n &vec![0xE8, 0x78, 0x56, 0x34, 0x12],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::CALL, Operand::Offset(0x12345678, None, None)),\n\n ); // CALL 0x12345678\n\n}\n\n\n\n// I've temporarily disabled this test as the decoding logic will need to lookahead in order to\n\n// distiguish between a standalone FWAIT instruction and an instruction prefixed with 0x9B.\n\n// #[test]\n\n// fn operand_type_e() {\n\n// decode_helper(&vec![0x9B, 0xD9, 0x30], Mode::Protected, &Instruction::new1(Mnemonic::FSTENV, Operand::Indirect(Reg::EAX, None, None))); // FSTENV [EAX]\n\n// }\n\n\n", "file_path": "src/test/decode.rs", "rank": 32, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_bs() {\n\n decode_helper(\n\n &vec![0x6B, 0xC3, 0x12],\n\n Mode::Protected,\n\n &Instruction::new3(\n\n Mnemonic::IMUL,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Direct(Reg::EBX),\n\n Operand::Literal8(0x12),\n\n ),\n\n ); // IMUL EAX, EBX, 0x12\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 33, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_qp() {\n\n decode_helper(\n\n &vec![0x48, 0xCF],\n\n Mode::Long,\n\n &Instruction::new0(Mnemonic::IRETQ),\n\n ); // IRETQ\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 34, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_dr() {\n\n decode_helper(\n\n &vec![0xDC, 0x00],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FADD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // FADD QWORD PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 35, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_bcd() {\n\n decode_helper(\n\n &vec![0xDF, 0x20],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FBLD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Tbyte), None),\n\n ),\n\n ); // FBLD [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 36, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_ymm() {\n\n decode_helper(\n\n &vec![0xC5, 0xED, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::YMM1),\n\n Operand::Direct(Reg::YMM2),\n\n Operand::Direct(Reg::YMM3),\n\n ),\n\n ); // VADDPD YMM1, YMM2, YMM3\n\n decode_helper(\n\n &vec![0xC5, 0xED, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::YMM1),\n\n Operand::Direct(Reg::YMM2),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Ymmword), None),\n\n ),\n\n ); // VADDPD YMM1, YMM2, [EAX}\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 37, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_sd() {\n\n decode_helper(\n\n &vec![0xF2, 0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDSD XMM1, XMM2\n\n decode_helper(\n\n &vec![0xF2, 0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // ADDSD XMM1, [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 38, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_psq() {\n\n decode_helper(\n\n &vec![0x0F, 0x2C, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CVTTPS2PI,\n\n Operand::Direct(Reg::MM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // CVTTPS2PI MM1, XMM2\n\n decode_helper(\n\n &vec![0x0F, 0x2C, 0x08],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CVTTPS2PI,\n\n Operand::Direct(Reg::MM1),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // CVTTPS2PI MM1, [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 39, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_pd() {\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDPD XMM1, XMM2\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // ADDPD XMM1, [EAX]1\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 40, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_pi() {\n\n decode_helper(\n\n &vec![0x0F, 0x2A, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CVTPI2PS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::MM2),\n\n ),\n\n ); // CVTPI2PS XMM1, MM2\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 41, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_dq() {\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x38, 0x00, 0x08],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::PSHUFB,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // PSHUFB XMM1, [EAX]\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x38, 0x00, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::PSHUFB,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // PSHUFB XMM1, [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 42, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_vq() {\n\n decode_helper(\n\n &vec![0x66, 0x50],\n\n Mode::Long,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Direct(Reg::AX)),\n\n ); // PUSH AX\n\n decode_helper(\n\n &vec![0x50],\n\n Mode::Long,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Direct(Reg::RAX)),\n\n ); // PUSH RAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 43, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_dqp() {\n\n decode_helper(\n\n &vec![0xF2, 0x48, 0x0F, 0x38, 0xF0, 0xC0],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::CRC32,\n\n Operand::Direct(Reg::RAX),\n\n Operand::Direct(Reg::AL),\n\n ),\n\n ); // CRC32 RAX, AL\n\n decode_helper(\n\n &vec![0xF2, 0x0F, 0x38, 0xF0, 0xC0],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CRC32,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Direct(Reg::AL),\n\n ),\n\n ); // CRC32 EAX, AL\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 44, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_di() {\n\n decode_helper(\n\n &vec![0xDA, 0x00],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FIADD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // FIADD DWORD PTR [EAX}\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 45, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_bound() {\n\n decode_helper(\n\n &vec![0xF3, 0x0F, 0x1A, 0xC8],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BNDCL,\n\n Operand::Direct(Reg::BND1),\n\n Operand::Direct(Reg::EAX),\n\n ),\n\n ); // BNDCL BND1, EAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 46, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_zmm() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x48, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::ZMM1),\n\n Operand::Direct(Reg::ZMM2),\n\n Operand::Direct(Reg::ZMM3),\n\n ),\n\n ); // VADDPD ZMM1, ZMM2, ZMM3\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x48, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::ZMM1),\n\n Operand::Direct(Reg::ZMM2),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Zmmword), None),\n\n ),\n\n ); // VADDPD ZMM1, ZMM2, [EAX}\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 47, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_ss() {\n\n decode_helper(\n\n &vec![0xF3, 0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDSS XMM1, XMM2\n\n decode_helper(\n\n &vec![0xF3, 0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // ADDSS XMM1, [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 48, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_er() {\n\n decode_helper(\n\n &vec![0xDB, 0x28],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FLD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Tbyte), None),\n\n ),\n\n ); // FLD TBYTE PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 49, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_vds() {\n\n decode_helper(\n\n &vec![0x05, 0x34, 0x12],\n\n Mode::Real,\n\n &Instruction::new2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::AX),\n\n Operand::Literal16(0x1234),\n\n ),\n\n ); // ADD AX, 0x1234\n\n decode_helper(\n\n &vec![0x66, 0x05, 0x78, 0x56, 0x34, 0x12],\n\n Mode::Real,\n\n &Instruction::new2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Literal32(0x12345678),\n\n ),\n\n ); // ADD EAX, 0x12345678\n\n decode_helper(\n", "file_path": "src/test/decode.rs", "rank": 50, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_bss() {\n\n decode_helper(\n\n &vec![0x6A, 0x12],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Literal8(0x12)),\n\n ); // PUSH 0x12\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 51, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_avx() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x0A, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction {\n\n mnemonic: Mnemonic::VADDPD,\n\n operand1: Some(Operand::Direct(Reg::XMM1)),\n\n operand2: Some(Operand::Direct(Reg::XMM2)),\n\n operand3: Some(Operand::Direct(Reg::XMM3)),\n\n mask: Some(MaskReg::K2),\n\n merge_mode: Some(MergeMode::Merge),\n\n ..Default::default()\n\n },\n\n ); // VADDPD XMM1 {K2}, XMM2, XMM3\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x2A, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction {\n\n mnemonic: Mnemonic::VADDPD,\n\n operand1: Some(Operand::Direct(Reg::YMM1)),\n", "file_path": "src/test/decode.rs", "rank": 52, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_xmm() {\n\n decode_helper(\n\n &vec![0xC5, 0xE9, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Direct(Reg::XMM3),\n\n ),\n\n ); // VADDPD XMM1, XMM2, XMM3\n\n decode_helper(\n\n &vec![0xC5, 0xE9, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // VADDPD XMM1, XMM2, [EAX}\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 53, "score": 317044.3639582245 }, { "content": "fn build_test_instruction(def: &InstructionDefinition, op_defs: Vec<OperandDefinition>,\n\n addr_size: OperandSize) -> Instruction {\n\n\n\n let mut instr = Instruction {\n\n mnemonic: def.mnemonic.clone(),\n\n .. Default::default()\n\n };\n\n\n\n let first_op_not_mem = op_defs.iter().next().map(|o| !o.op_type.is_mem()).unwrap_or(true);\n\n if def.allow_mask && first_op_not_mem { instr.mask = Some(random_mask()); }\n\n if def.allow_merge_mode && first_op_not_mem { instr.merge_mode = Some(MergeMode::Zero) }\n\n\n\n if op_defs.iter().all(|d| !d.op_type.is_mem()) {\n\n if def.allow_rounding & op_defs.iter().all(\n\n |op_def| if let OperandType::Reg(_) = op_def.op_type { true } else { false })\n\n { instr.rounding_mode = Some(random_rounding_mode()); }\n\n else if def.allow_sae { instr.sae = true; }\n\n }\n\n\n\n let broadcast_size = op_defs.iter().filter_map(\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 54, "score": 316645.5576981066 }, { "content": "fn build_test_instructions(def: &InstructionDefinition, addr_size: OperandSize) -> Vec<Instruction> {\n\n let op_combinations = make_operand_combinations(def);\n\n op_combinations.into_iter().filter(filter_op_combination)\n\n .map(|op_c| build_test_instruction(def, op_c, addr_size)).collect()\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 55, "score": 315604.4112455332 }, { "content": "#[test]\n\nfn operand_type_xmm_or_mem64() {\n\n decode_helper(\n\n &vec![0x0F, 0x5A, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::CVTPS2PD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // CVTPS2PD XMM1, XMM2\n\n decode_helper(\n\n &vec![0x0F, 0x5A, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::CVTPS2PD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // CVTPS2PD XMM1, QWORD PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 56, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_xmm_or_ymm() {\n\n decode_helper(\n\n &vec![0xC5, 0xE9, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Direct(Reg::XMM3),\n\n ),\n\n ); // VADDPD XMM1, XMM2, XMM3\n\n decode_helper(\n\n &vec![0xC5, 0xED, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::YMM1),\n\n Operand::Direct(Reg::YMM2),\n\n Operand::Direct(Reg::YMM3),\n\n ),\n\n ); // VADDPD YMM1, YMM2, YMM3\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 57, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_mask_or_mem_64() {\n\n decode_helper(\n\n &vec![0xC4, 0xE1, 0xF8, 0x90, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVQ,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n ),\n\n ); // KMOVQ K1, K2\n\n decode_helper(\n\n &vec![0xC4, 0xE1, 0xF8, 0x90, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVQ,\n\n Operand::Direct(Reg::K1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // KMOVQ K1, QWORD PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 58, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_mask_or_mem_32() {\n\n decode_helper(\n\n &vec![0xC4, 0xE1, 0xF9, 0x90, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVD,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n ),\n\n ); // KMOVD K1, K2\n\n decode_helper(\n\n &vec![0xC4, 0xE1, 0xF9, 0x90, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVD,\n\n Operand::Direct(Reg::K1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // KMOVD K1, DWORD PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 59, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_bound_or_mem() {\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x1A, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BNDMOV,\n\n Operand::Direct(Reg::BND1),\n\n Operand::Direct(Reg::BND2),\n\n ),\n\n ); // BNDMOV BND1, BND2\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x1A, 0x08],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BNDMOV,\n\n Operand::Direct(Reg::BND1),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // BNDMOV BND1, QWORD PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 60, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_unsized_memory() {\n\n decode_helper(\n\n &vec![0x8D, 0x03],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::LEA,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Indirect(Reg::EBX, Some(OperandSize::Unsized), None),\n\n ),\n\n ); // LEA EAX, [EBX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 61, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_fpu_register() {\n\n decode_helper(\n\n &vec![0xD8, 0xC2],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::FADD,\n\n Operand::Direct(Reg::ST),\n\n Operand::Direct(Reg::ST2),\n\n ),\n\n ); // FADD ST(2)\n\n}\n", "file_path": "src/test/decode.rs", "rank": 62, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_mask_or_mem_16() {\n\n decode_helper(\n\n &vec![0xC5, 0xF8, 0x90, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVW,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n ),\n\n ); // KMOVW K1, K2\n\n decode_helper(\n\n &vec![0xC5, 0xF8, 0x90, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVW,\n\n Operand::Direct(Reg::K1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Word), None),\n\n ),\n\n ); // KMOVW K1, BYTE PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 63, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_mask_or_mem_8() {\n\n decode_helper(\n\n &vec![0xC5, 0xF9, 0x90, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVB,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n ),\n\n ); // KMOVB K1, K2\n\n decode_helper(\n\n &vec![0xC5, 0xF9, 0x90, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVB,\n\n Operand::Direct(Reg::K1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Byte), None),\n\n ),\n\n ); // KMOVB K1, BYTE PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 64, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_xmm_or_mem32() {\n\n decode_helper(\n\n &vec![0xC5, 0xEA, 0x5A, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VCVTSS2SD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Direct(Reg::XMM3),\n\n ),\n\n ); // VCVTSS2SD XMM1, XMM2\n\n decode_helper(\n\n &vec![0xC5, 0xEA, 0x5A, 0x08],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VCVTSS2SD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Dword), None),\n\n ),\n", "file_path": "src/test/decode.rs", "rank": 65, "score": 311894.7044301774 }, { "content": "fn parse_operand_encoding_opt(operand: &str) -> Option<(OperandEncoding, OperandAccess)> {\n\n if operand.len() != 0 {\n\n Some(parse_operand_encoding(operand.as_bytes()).unwrap().1)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nnamed!(instruction_sep, eat_separator!(&b\", \"[..]));\n\nnamed!(parse_token_list<Vec<Vec<InstructionToken>>>, separated_list!(instruction_sep, parse_instruction_part));\n\nnamed!(parse_instruction<&[u8], (String, Vec<InstructionToken>), u32>, do_parse!(\n\n mnemonic: alphanumeric >> opt!(instruction_sep) >>\n\n tokens: opt!(complete!(parse_token_list)) >>\n\n (build_result(mnemonic, tokens))\n\n )\n\n);\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 66, "score": 311486.28128727485 }, { "content": "fn random_reg_64() -> Reg\n\n { random_of(&[Reg::RBX, Reg::RCX, Reg::RDX, Reg::RSI, Reg::RDI, Reg::RSP, Reg::RBP]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 67, "score": 311326.47033738723 }, { "content": "fn random_reg_16() -> Reg\n\n { random_of(&[Reg::BX, Reg::CX, Reg::DX, Reg::SI, Reg::DI, Reg::SP, Reg::BP]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 68, "score": 311326.47033738723 }, { "content": "fn random_reg_32() -> Reg\n\n { random_of(&[Reg::EBX, Reg::ECX, Reg::EDX, Reg::ESI, Reg::EDI, Reg::ESP, Reg::EBP]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 69, "score": 311326.47033738723 }, { "content": "fn random_reg_64_no_stack() -> Reg\n\n { random_of(&[Reg::RAX, Reg::RBX, Reg::RCX, Reg::RDX, Reg::RSI, Reg::RDI]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 70, "score": 308105.8281911791 }, { "content": "fn random_reg_16_no_stack() -> Reg\n\n { random_of(&[Reg::AX, Reg::BX, Reg::CX, Reg::DX, Reg::SI, Reg::DI]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 71, "score": 308105.8281911791 }, { "content": "fn random_mmx_reg() -> Reg\n\n { random_of(&[Reg::MM0, Reg::MM1, Reg::MM2, Reg::MM3, Reg::MM4, Reg::MM5, Reg::MM6, Reg::MM7]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 72, "score": 308105.8281911791 }, { "content": "fn random_fpu_reg() -> Reg\n\n { random_of(&[Reg::ST1, Reg::ST2, Reg::ST3, Reg::ST4, Reg::ST5, Reg::ST6, Reg::ST7]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 73, "score": 308105.8281911791 }, { "content": "fn random_mask_reg() -> Reg\n\n { random_of(&[Reg::K1, Reg::K2, Reg::K3, Reg::K4, Reg::K5, Reg::K6, Reg::K7]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 74, "score": 308105.8281911791 }, { "content": "fn random_reg_32_no_stack() -> Reg\n\n { random_of(&[Reg::EAX, Reg::EBX, Reg::ECX, Reg::EDX, Reg::ESI, Reg::EDI]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 75, "score": 308105.8281911791 }, { "content": "#[test]\n\nfn infer_operand_size_16_32_instr() {\n\n encode32_helper2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::AX),\n\n Operand::Indirect(Reg::EBX, None, None),\n\n &vec![0x66, 0x03, 0x03],\n\n );\n\n encode32_helper2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Indirect(Reg::EBX, None, None),\n\n &vec![0x03, 0x03],\n\n );\n\n}\n\n\n", "file_path": "src/test/size_inference.rs", "rank": 76, "score": 306944.54334351444 }, { "content": "#[test]\n\nfn operand_type_ymm_or_mem_or_mem32() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0x7C, 0x48, 0x5A, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPS2PD,\n\n Operand::Direct(Reg::ZMM1),\n\n Operand::Direct(Reg::YMM2),\n\n ),\n\n ); // VCVTPS2PD ZMM1, YMM2\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0x7C, 0x48, 0x5A, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPS2PD,\n\n Operand::Direct(Reg::ZMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Ymmword), None),\n\n ),\n\n ); // VCVTPS2PD ZMM1, YMMWORD PTR [RAX]\n\n decode_helper(\n", "file_path": "src/test/decode.rs", "rank": 77, "score": 306944.54334351444 }, { "content": "#[test]\n\nfn operand_type_ymm_or_mem_or_mem64() {\n\n decode_helper(\n\n &vec![0xC5, 0xFF, 0xE6, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2DQ,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::YMM2),\n\n ),\n\n ); // VCVTPD2DQ XMM1, YMM2\n\n decode_helper(\n\n &vec![0xC5, 0xFF, 0xE6, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2DQ,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Ymmword), None),\n\n ),\n\n ); // VCVTPD2DQ XMM1, YMMWORD PTR [RAX]\n\n decode_helper(\n", "file_path": "src/test/decode.rs", "rank": 78, "score": 306944.54334351444 }, { "content": "#[test]\n\nfn operand_type_zmm_or_mem_or_mem64() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xFD, 0x48, 0x5A, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2PS,\n\n Operand::Direct(Reg::YMM1),\n\n Operand::Direct(Reg::ZMM2),\n\n ),\n\n ); // VCVTPD2PS YMM1, ZMM2\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xFD, 0x48, 0x5A, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2PS,\n\n Operand::Direct(Reg::YMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Zmmword), None),\n\n ),\n\n ); // VCVTPD2PS YMM1, ZMMWORD PTR [RAX]\n\n decode_helper(\n", "file_path": "src/test/decode.rs", "rank": 79, "score": 306944.54334351444 }, { "content": "fn encode(instr: &Instruction, def: &InstructionDefinition, addr_size: OperandSize)\n\n -> io::Result<Vec<u8>> {\n\n // Write instruction to file\n\n let mut test_file = File::create(\"test.s\")?;\n\n write!(test_file, \".intel_syntax noprefix\\n\")?;\n\n write!(test_file, \".code{}\\n\", match addr_size {\n\n OperandSize::Word => \"16\",\n\n OperandSize::Dword => \"32\",\n\n OperandSize::Qword => \"64\",\n\n _ => panic!(\"Invalid addressing size.\")\n\n })?;\n\n write_instruction(instr, def, &mut test_file)?;\n\n write!(test_file, \"\\n\")?;\n\n\n\n // Run assembler\n\n let as_result = Command::new(\"as\")\n\n .args(&[\"test.s\", \"-o\", \"test.out\"])\n\n .spawn()?\n\n .wait()?;\n\n if !as_result.success() {\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 80, "score": 303187.427868437 }, { "content": "#[test]\n\nfn operand_type_xmm_or_ymm_or_mem_or_mem64() {\n\n decode_helper(\n\n &vec![0xC5, 0xF9, 0x5A, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2PS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // VCVTPD2PS XMM1, XMM2\n\n decode_helper(\n\n &vec![0xC5, 0xF9, 0x5A, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2PS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // VCVTPD2PS XMM1, XMMWORD PTR [RAX]\n\n decode_helper(\n", "file_path": "src/test/decode.rs", "rank": 81, "score": 302182.50809514255 }, { "content": "fn vdbpsadbw_1() {\n\n run_test(&Instruction { mnemonic: Mnemonic::VDBPSADBW, operand1: Some(Direct(XMM3)), operand2: Some(Direct(XMM5)), operand3: Some(Direct(XMM7)), operand4: Some(Literal8(74)), lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 243, 85, 139, 66, 223, 74], OperandSize::Dword)\n\n}\n\n\n", "file_path": "src/test/instruction_tests/vdbpsadbw.rs", "rank": 82, "score": 300752.634987277 }, { "content": "fn random_reg_8() -> Reg { random_of(&[Reg::BL, Reg::CL, Reg::DL]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 83, "score": 299957.40658541943 }, { "content": "fn write_test<W: Write>(instr: &Instruction, encoded: &[u8], addr_size: OperandSize,\n\n writer: &mut W, test_count: &mut HashMap<String, u32>) -> io::Result<()> {\n\n let test_num = test_count.entry(instr.mnemonic.clone()).or_insert(0);\n\n *test_num += 1;\n\n\n\n write!(writer, \"#[test]\\nfn {}_{}() {{\\n run_test(&{:?}, &{:?}, {:?})\\n}}\\n\\n\",\n\n instr.mnemonic.to_lowercase(), test_num, instr, encoded, addr_size)\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 85, "score": 295845.2649296818 }, { "content": "fn random_bound_reg() -> Reg { random_of(&[Reg::BND0, Reg::BND1, Reg::BND2, Reg::BND3]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 86, "score": 294212.576199507 }, { "content": "fn random_mask() -> MaskReg {\n\n random_of(&[MaskReg::K1, MaskReg::K2, MaskReg::K3, MaskReg::K4, MaskReg::K5, MaskReg::K6,\n\n MaskReg::K7])\n\n}\n\n\n\nnamed!(parse_as_output<Vec<u8>>, do_parse!(\n\n take_until_and_consume!(\"0:\\t\") >>\n\n bytes: flat_map!(\n\n take_until!(\"\\t\"),\n\n ws!(many1!(parse_u8_hex_str))\n\n ) >>\n\n (bytes)\n\n));\n\n\n\nnamed!(parse_u8_hex_str<u8>, map!(\n\n alphanumeric,\n\n |val| u8::from_str_radix(str::from_utf8(val).unwrap(), 16).unwrap()\n\n));\n\n\n\nimpl Display for Reg {\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 87, "score": 292388.0058182387 }, { "content": "fn random_control_reg() -> Reg { random_of(&[Reg::CR0, Reg::CR1, Reg::CR2, Reg::CR3, Reg::CR4 ]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 88, "score": 290615.58209834783 }, { "content": "fn instr_token_to_operand_type(token: &InstructionToken) -> (OperandType, Option<OperandSize>) {\n\n match *token {\n\n InstructionToken::Reg(reg_type, op_size)\n\n => (OperandType::Reg(reg_type), Some(op_size)),\n\n InstructionToken::Mem(op_size)\n\n => (OperandType::Mem(Some(op_size)), Some(op_size)),\n\n InstructionToken::Imm(op_size)\n\n => (OperandType::Imm, Some(op_size)),\n\n InstructionToken::Bcst(bcst_size)\n\n => (OperandType::Bcst(bcst_size), None),\n\n InstructionToken::Rel(op_size)\n\n => (OperandType::Rel(op_size), Some(op_size)),\n\n InstructionToken::Offset(op_size)\n\n => (OperandType::Offset, Some(op_size)),\n\n InstructionToken::FixedReg(reg)\n\n => (OperandType::Fixed(FixedOperand::Reg(reg)), Some(reg.size())),\n\n InstructionToken::Constant(val)\n\n => (OperandType::Fixed(FixedOperand::Constant(val)), Some(OperandSize::Unsized)),\n\n InstructionToken::Mib\n\n => (OperandType::Mib, Some(OperandSize::Unsized)),\n\n _ => panic!(\"Unsupported type: {:?}\", *token)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 89, "score": 290525.33096318727 }, { "content": "pub fn emit_tests_helper(instr: &InstructionDefinition, addr_size: OperandSize, output_dir: &str,\n\n test_count: &mut HashMap<String, u32>) -> io::Result<()> {\n\n if should_skip_instr(instr) { return Ok(()); }\n\n\n\n let test_instrs = build_test_instructions(instr, addr_size);\n\n\n\n let enc_result: io::Result<Vec<Vec<u8>>> = \n\n test_instrs.iter().map(|i| encode(i, instr, addr_size)).collect();\n\n let bytes = enc_result?;\n\n let mut writer = OpenOptions::new()\n\n .append(true)\n\n .create(true)\n\n .open(format!(\"{}/{}\", output_dir, instr.mnemonic.to_lowercase()))\n\n .unwrap();\n\n\n\n test_instrs.iter().zip(bytes.iter()).fold(Ok(()),\n\n |res, (i, b)| res.and(write_test(i, b, addr_size, &mut writer, test_count)))\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 90, "score": 289957.7210800017 }, { "content": "fn random_segment_reg() -> Reg { random_of(&[Reg::CS, Reg::DS, Reg::ES, Reg::FS, Reg::GS, Reg::SS]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 91, "score": 287095.97362550016 }, { "content": "fn random_debug_reg() -> Reg { random_of(&[Reg::DR0, Reg::DR1, Reg::DR2, Reg::DR3, Reg::DR4, Reg::DR5]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 92, "score": 287095.97362550016 }, { "content": "fn size_seg_helper<F, W: Write>(f: &mut W, size: Option<OperandSize>, seg: Option<SegmentReg>,\n\n action: F) -> io::Result<()> where F: Fn(&mut W) -> io::Result<()> {\n\n write_size(f, size)?;\n\n write_seg(f, seg)?;\n\n action(f)\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 93, "score": 279399.81786843465 }, { "content": "fn compare_sizes(a: &[Option<OperandSize>; 4], b: &[Option<OperandSize>; 4]) -> bool {\n\n a.iter()\n\n .zip(b.iter())\n\n .all(|(m_s1, m_s2)| match (*m_s1, *m_s2) {\n\n (Some(s1), Some(s2)) => {\n\n s1 == s2 || s1 == OperandSize::Unsized || s2 == OperandSize::Unsized\n\n }\n\n (None, None) => true,\n\n _ => false,\n\n })\n\n}\n\n\n", "file_path": "src/instruction_def.rs", "rank": 94, "score": 279224.7992648828 }, { "content": "fn rand_value_of_size(size: OperandSize) -> u64 {\n\n match size {\n\n OperandSize::Byte => (rand::random::<u8>() as u64) & 0xF,\n\n OperandSize::Word => (rand::random::<u16>() as u64) & 0xFFF,\n\n OperandSize::Dword => (rand::random::<u32>() as u64) & 0xFFFFFF,\n\n OperandSize::Qword => (rand::random::<u64>() & 0xFFFFFFFFFFFFFF),\n\n _ => panic!(\"Invalid size.\")\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 95, "score": 276624.8324960207 }, { "content": "fn generate_implied_encoding(op_type: &OperandType, has_embedded_reg: bool) \n\n -> (OperandEncoding, OperandAccess) {\n\n match *op_type { // TODO Better implied operand access\n\n OperandType::Reg(reg_type) => {\n\n (OperandEncoding::ModRmRm, OperandAccess::Read)\n\n },\n\n OperandType::Mem(_) => (OperandEncoding::ModRmRm, OperandAccess::Read),\n\n OperandType::Imm => (OperandEncoding::Imm, OperandAccess::Read),\n\n OperandType::Constant => (OperandEncoding::Fixed, OperandAccess::Read),\n\n OperandType::Offset => (OperandEncoding::Imm, OperandAccess::Read),\n\n OperandType::Rel(_) => (OperandEncoding::Imm, OperandAccess::Read),\n\n OperandType::Mib => (OperandEncoding::ModRmRm, OperandAccess::Read),\n\n OperandType::Bcst(_) => (OperandEncoding::ModRmRm, OperandAccess::Read),\n\n OperandType::Fixed(_) => (OperandEncoding::Fixed, OperandAccess::Read),\n\n OperandType::Set(_) => (OperandEncoding::ModRmRm, OperandAccess::Read)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 96, "score": 271748.73627749074 }, { "content": "fn filter_op_combination(ops: &Vec<OperandDefinition>) -> bool {\n\n ops.iter().by_ref().any(|o| match o.op_type {\n\n OperandType::Rel(_) => false,\n\n _ => true\n\n })\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 97, "score": 269531.44662573794 }, { "content": "fn parse_operand_parts_final(parts: Vec<InstructionToken>) -> InstructionToken {\n\n if parts.len() == 1 { parts.into_iter().next().unwrap() }\n\n else { InstructionToken::Set(parts) }\n\n}\n\n\n\nnamed!(type_suffix, complete!(alt_complete!(\n\n tag!(\"fp\") | \n\n tag!(\"int\") |\n\n tag!(\"dec\") |\n\n tag!(\"bcd\")\n\n)));\n\n\n\nnamed!(parse_operand_part<InstructionToken>, alt_complete!(\n\n tag!(\"imm8/r\") => { |_| InstructionToken::Imm(OperandSize::Byte) } |\n\n do_parse!(tag!(\"rel\") >> size: parse_size >> (make_sized(size, |s| InstructionToken::Rel(s))) ) |\n\n do_parse!(tag!(\"r/m\") >> size: parse_size >> (make_rm(size, RegType::General)) ) |\n\n do_parse!(tag!(\"imm\") >> size: parse_size >> (make_sized(size, |s| InstructionToken::Imm(s))) ) |\n\n do_parse!(tag!(\"moffs\") >> size: parse_size >> (make_sized(size,\n\n |s| InstructionToken::Offset(s))) ) |\n\n tag!(\"Sreg\") => { |_| InstructionToken::Reg(RegType::Segment, OperandSize::Word) } |\n", "file_path": "gen_defs/src/main.rs", "rank": 98, "score": 259641.78459854837 }, { "content": "fn get_op_sizes(def: &InstructionDefinition, instr: &Instruction) -> [Option<OperandSize>; 4] {\n\n let ops = instr.operands();\n\n let mut iter = def.operands.iter().zip(ops.iter()).map(|(def, op)| {\n\n def.as_ref()\n\n .map(|d| op.map_or(d.size, |o| d.get_real_size(&o)))\n\n });\n\n [\n\n iter.next().unwrap_or(None),\n\n iter.next().unwrap_or(None),\n\n iter.next().unwrap_or(None),\n\n iter.next().unwrap_or(None),\n\n ]\n\n}\n\n\n", "file_path": "src/instruction_def.rs", "rank": 99, "score": 257634.85019219562 } ]
Rust
src/parser.rs
opp11/chemtool2
d8cace048c5adf2ed61ab503534c287304a57644
use std::str::CharRange; use elem::{PerElem, Molecule}; use error::{CTResult, CTError}; use error::CTErrorKind::InputError; pub struct Parser { pos: usize, input: String, paren_level: u32, } impl Parser { pub fn new(input: &str) -> Parser { Parser { pos: 0, input: String::from_str(input), paren_level: 0 } } pub fn is_done(&self) -> bool { self.input.chars().skip(self.pos).all(|ch| ch.is_whitespace()) } pub fn parse_reaction(&mut self) -> CTResult<(Vec<Molecule>, Vec<Molecule>)> { let lhs = try!(self.parse_side()); self.consume_whitespace(); if self.pos + 2 >= self.input.len() || self.consume_char() != '-' || self.consume_char() != '>' { return Err(CTError { kind: InputError, desc: "Missing arrow (->) in chemical reaction".to_string(), pos: Some((self.pos - 2, 1)) }); } self.consume_whitespace(); let rhs = try!(self.parse_side()); Ok((lhs, rhs)) } pub fn parse_side(&mut self) -> CTResult<Vec<Molecule>> { let mut out = Vec::new(); let molecule = try!(self.parse_molecule()); out.push(molecule); self.consume_whitespace(); if !self.eof() && self.peek_char() == '+' { self.consume_char(); self.consume_whitespace(); let mut rest = try!(self.parse_side()); out.append(&mut rest); } Ok(out) } pub fn parse_molecule(&mut self) -> CTResult<Molecule> { let mut out = Vec::new(); let mut per = try!(self.parse_periodic()); out.append(&mut per); if !self.eof() && (self.peek_char().is_alphabetic() || self.peek_char() == '(') { let mut molecule = try!(self.parse_molecule()); out.append(&mut molecule); } if !self.eof() && self.peek_char() == ')' && self.paren_level == 0 { Err(CTError { kind: InputError, desc: "Missing opening parentheses".to_string(), pos: Some((self.pos, 1)) }) } else if !self.eof() && !self.on_legal_char() { Err(CTError { kind: InputError, desc: "Unexpected character".to_string(), pos: Some((self.pos, 1)) }) } else { Ok(out) } } fn parse_periodic(&mut self) -> CTResult<Vec<PerElem>> { let mut elem = try!(self.parse_element()); if !self.eof() && self.peek_char().is_numeric() { let coef = try!(self.parse_coefficient()); for e in elem.iter_mut() { e.coef *= coef; } } Ok(elem) } fn parse_element(&mut self) -> CTResult<Vec<PerElem>> { if self.eof() { return Err(CTError { kind: InputError, desc: "Found no periodic element".to_string(), pos: Some((self.pos, 1)) }); } let start_pos = self.pos; let first = self.consume_char(); if first == '(' { self.paren_level += 1; let molecule = try!(self.parse_molecule()); if self.eof() || self.consume_char() != ')' { Err(CTError { kind: InputError, desc: "Missing closing parentheses".to_string(), pos: Some((self.pos - 1, 1)) }) } else { self.paren_level -= 1; Ok(molecule) } } else if first.is_uppercase() { let mut name = String::new(); name.push(first); name.push_str(self.consume_while(|ch| ch.is_lowercase()).as_slice()); let len = name.len(); Ok(vec!(PerElem { name: name, coef: 1, pos: start_pos, len: len })) } else { Err(CTError { kind: InputError, desc: "Missing uppercase letter at the beginning of the element".to_string(), pos: Some((self.pos - 1, 1)) }) } } fn parse_coefficient(&mut self) -> CTResult<u32> { let start_pos = self.pos; let num_str = self.consume_while(|ch| ch.is_numeric()); if let Ok(num) = num_str.parse::<u32>() { Ok(num) } else { Err(CTError { kind: InputError, desc: "Could not parse coefficient".to_string(), pos: Some((start_pos, num_str.len())) }) } } fn peek_char(&self) -> char { self.input.char_at(self.pos) } fn consume_char(&mut self) -> char { let CharRange { ch, next } = self.input.char_range_at(self.pos); self.pos = next; ch } fn consume_while<F>(&mut self, pred: F) -> String where F: Fn(char) -> bool { let mut out = String::new(); while !self.eof() && pred(self.peek_char()) { out.push(self.consume_char()); } out } fn consume_whitespace(&mut self) { self.consume_while(|ch| ch.is_whitespace()); } fn eof(&mut self) -> bool { self.pos >= self.input.len() } fn on_legal_char(&self) -> bool { match self.peek_char() { ch if ch.is_alphanumeric() => true, '+' | '-' | '>' | '(' | ')' | ' ' => true, _ => false, } } } #[cfg(test)] mod test { use super::*; use elem::PerElem; macro_rules! check_raw_result( ($raw:expr, $expected:expr) => ( if let Ok(result) = $raw { assert_eq!(result, $expected); } else { panic!($raw); } ) ); #[test] fn elems() { let mut parser = Parser::new("CHeH"); let raw_result = parser.parse_molecule(); let expected = vec!(PerElem { name: "C".to_string(), coef: 1, pos: 0, len: 1 }, PerElem { name: "He".to_string(), coef: 1, pos: 1, len: 2 }, PerElem { name: "H".to_string(), coef: 1, pos: 3, len: 1 }); check_raw_result!(raw_result, expected); } #[test] fn coefs() { let mut parser = Parser::new("C23"); let raw_result = parser.parse_molecule(); let expected = vec!(PerElem { name: "C".to_string(), coef: 23, pos: 0, len: 1 }); check_raw_result!(raw_result, expected); } #[test] fn parens() { let mut parser = Parser::new("(CH3)2"); let raw_result = parser.parse_molecule(); let expected = vec!(PerElem { name: "C".to_string(), coef: 2, pos: 1, len: 1 }, PerElem { name: "H".to_string(), coef: 6, pos: 2, len: 1 }); check_raw_result!(raw_result, expected); } #[test] fn multiple_elems() { let mut parser = Parser::new("C + H"); let raw_result = parser.parse_side(); let expected = vec!(vec!(PerElem { name: "C".to_string(), coef: 1, pos: 0, len: 1 }), vec!(PerElem { name: "H".to_string(), coef: 1, pos: 4, len: 1 })); check_raw_result!(raw_result, expected); } #[test] fn reaction() { let mut parser = Parser::new("C -> H"); let raw_result = parser.parse_reaction(); let expected = (vec!(vec!(PerElem { name: "C".to_string(), coef: 1, pos: 0, len: 1 })), vec!(vec!(PerElem { name: "H".to_string(), coef: 1, pos: 5, len: 1 }))); check_raw_result!(raw_result, expected); } #[test] fn empty() { let mut parser = Parser::new(""); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn no_uppercase() { let mut parser = Parser::new("c"); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn missing_close_paren() { let mut parser = Parser::new("(C"); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn missing_open_paren() { let mut parser = Parser::new("C)"); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn invald_char() { let mut parser = Parser::new("%"); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn is_done() { let parser = Parser::new(" "); assert!(parser.is_done()); } #[test] fn not_done() { let parser = Parser::new(" C"); assert!(!parser.is_done()); } #[test] fn invald_num() { let mut parser = Parser::new("C999999999999999999999"); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn dangling_plus() { let mut parser = Parser::new("C + -> H"); assert!(parser.parse_reaction().is_err()); } }
use std::str::CharRange; use elem::{PerElem, Molecule}; use error::{CTResult, CTError}; use error::CTErrorKind::InputError; pub struct Parser { pos: usize, input: String, paren_level: u32, } impl Parser { pub fn new(input: &str) -> Parser { Parser { pos: 0, input: String::from_str(input), paren_level: 0 } } pub fn is_done(&self) -> bool { self.input.chars().skip(self.pos).all(|ch| ch.is_whitespace()) } pub fn parse_reaction(&mut self) -> CTResult<(Vec<Molecule>, Vec<Molecule>)> { let lhs = try!(self.parse_side()); self.consume_whitespace(); if self.pos + 2 >= self.input.len() || self.consume_char() != '-' || self.consume_char() != '>' { return Err(CTError { kind: InputError, desc: "Missing arrow (->) in chemical reaction".to_string(), pos: Some((self.pos - 2, 1)) }); } self.consume_whitespace(); let rhs = try!(self.parse_side()); Ok((lhs, rhs)) } pub fn parse_side(&mut self) -> CTResult<Vec<Molecule>> { let mut out = Vec::new(); let molecule = try!(self.parse_molecule()); out.push(molecule); self.consume_whitespace(); if !self.eof() && self.peek_char() == '+' { self.consume_char(); self.consume_whitespace(); let mut rest = try!(self.parse_side()); out.append(&mut rest); } Ok(out) } pub fn parse_molecule(&mut self) -> CTResult<Molecule> { let mut out = Vec::new(); let mut per = try!(self.parse_periodic()); out.append(&mut per); if !self.eof() && (self.peek_char().is_alphabetic() || self.peek_char() == '(') { let mut molecule = try!(self.parse_molecule()); out.append(&mut molecule); } if !self.eof() && self.peek_char() == ')' && self.paren_level == 0 { Err(CTError { kind: InputError, desc: "Missing opening parentheses".to_string(), pos: Some((self.pos, 1)) }) } else if !self.eof() && !self.on_legal_char() { Err(CTError { kind: InputError, desc: "Unexpected character".to_string(), pos: Some((self.pos, 1)) }) } else { Ok(out) } }
fn parse_element(&mut self) -> CTResult<Vec<PerElem>> { if self.eof() { return Err(CTError { kind: InputError, desc: "Found no periodic element".to_string(), pos: Some((self.pos, 1)) }); } let start_pos = self.pos; let first = self.consume_char(); if first == '(' { self.paren_level += 1; let molecule = try!(self.parse_molecule()); if self.eof() || self.consume_char() != ')' { Err(CTError { kind: InputError, desc: "Missing closing parentheses".to_string(), pos: Some((self.pos - 1, 1)) }) } else { self.paren_level -= 1; Ok(molecule) } } else if first.is_uppercase() { let mut name = String::new(); name.push(first); name.push_str(self.consume_while(|ch| ch.is_lowercase()).as_slice()); let len = name.len(); Ok(vec!(PerElem { name: name, coef: 1, pos: start_pos, len: len })) } else { Err(CTError { kind: InputError, desc: "Missing uppercase letter at the beginning of the element".to_string(), pos: Some((self.pos - 1, 1)) }) } } fn parse_coefficient(&mut self) -> CTResult<u32> { let start_pos = self.pos; let num_str = self.consume_while(|ch| ch.is_numeric()); if let Ok(num) = num_str.parse::<u32>() { Ok(num) } else { Err(CTError { kind: InputError, desc: "Could not parse coefficient".to_string(), pos: Some((start_pos, num_str.len())) }) } } fn peek_char(&self) -> char { self.input.char_at(self.pos) } fn consume_char(&mut self) -> char { let CharRange { ch, next } = self.input.char_range_at(self.pos); self.pos = next; ch } fn consume_while<F>(&mut self, pred: F) -> String where F: Fn(char) -> bool { let mut out = String::new(); while !self.eof() && pred(self.peek_char()) { out.push(self.consume_char()); } out } fn consume_whitespace(&mut self) { self.consume_while(|ch| ch.is_whitespace()); } fn eof(&mut self) -> bool { self.pos >= self.input.len() } fn on_legal_char(&self) -> bool { match self.peek_char() { ch if ch.is_alphanumeric() => true, '+' | '-' | '>' | '(' | ')' | ' ' => true, _ => false, } } } #[cfg(test)] mod test { use super::*; use elem::PerElem; macro_rules! check_raw_result( ($raw:expr, $expected:expr) => ( if let Ok(result) = $raw { assert_eq!(result, $expected); } else { panic!($raw); } ) ); #[test] fn elems() { let mut parser = Parser::new("CHeH"); let raw_result = parser.parse_molecule(); let expected = vec!(PerElem { name: "C".to_string(), coef: 1, pos: 0, len: 1 }, PerElem { name: "He".to_string(), coef: 1, pos: 1, len: 2 }, PerElem { name: "H".to_string(), coef: 1, pos: 3, len: 1 }); check_raw_result!(raw_result, expected); } #[test] fn coefs() { let mut parser = Parser::new("C23"); let raw_result = parser.parse_molecule(); let expected = vec!(PerElem { name: "C".to_string(), coef: 23, pos: 0, len: 1 }); check_raw_result!(raw_result, expected); } #[test] fn parens() { let mut parser = Parser::new("(CH3)2"); let raw_result = parser.parse_molecule(); let expected = vec!(PerElem { name: "C".to_string(), coef: 2, pos: 1, len: 1 }, PerElem { name: "H".to_string(), coef: 6, pos: 2, len: 1 }); check_raw_result!(raw_result, expected); } #[test] fn multiple_elems() { let mut parser = Parser::new("C + H"); let raw_result = parser.parse_side(); let expected = vec!(vec!(PerElem { name: "C".to_string(), coef: 1, pos: 0, len: 1 }), vec!(PerElem { name: "H".to_string(), coef: 1, pos: 4, len: 1 })); check_raw_result!(raw_result, expected); } #[test] fn reaction() { let mut parser = Parser::new("C -> H"); let raw_result = parser.parse_reaction(); let expected = (vec!(vec!(PerElem { name: "C".to_string(), coef: 1, pos: 0, len: 1 })), vec!(vec!(PerElem { name: "H".to_string(), coef: 1, pos: 5, len: 1 }))); check_raw_result!(raw_result, expected); } #[test] fn empty() { let mut parser = Parser::new(""); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn no_uppercase() { let mut parser = Parser::new("c"); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn missing_close_paren() { let mut parser = Parser::new("(C"); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn missing_open_paren() { let mut parser = Parser::new("C)"); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn invald_char() { let mut parser = Parser::new("%"); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn is_done() { let parser = Parser::new(" "); assert!(parser.is_done()); } #[test] fn not_done() { let parser = Parser::new(" C"); assert!(!parser.is_done()); } #[test] fn invald_num() { let mut parser = Parser::new("C999999999999999999999"); assert!(parser.parse_molecule().is_err()); assert!(parser.parse_reaction().is_err()); } #[test] fn dangling_plus() { let mut parser = Parser::new("C + -> H"); assert!(parser.parse_reaction().is_err()); } }
fn parse_periodic(&mut self) -> CTResult<Vec<PerElem>> { let mut elem = try!(self.parse_element()); if !self.eof() && self.peek_char().is_numeric() { let coef = try!(self.parse_coefficient()); for e in elem.iter_mut() { e.coef *= coef; } } Ok(elem) }
function_block-full_function
[ { "content": "/// Sorts the PerElems and groups those with the same name field.\n\n///\n\n/// Grouping of two (or more) PerElems means adding the coef field of the\n\n/// duplicate to the one already found, and then throwing away the duplicate.\n\n/// E.g. CH3CH3 would turn into C2H6.\n\npub fn group_elems(mut molecule: Molecule) -> Molecule {\n\n let mut out = Vec::<PerElem>::new();\n\n molecule.as_mut_slice().sort_by(|a, b| a.name.cmp(&b.name));\n\n // since the elements are now sorted, if the current elem does not match the\n\n // last element in out (i.e. what we previously pushed), then it won't match\n\n // anything in out\n\n for elem in molecule.into_iter() {\n\n if out.last().and_then(|e| Some(e.name == elem.name)).unwrap_or(false) {\n\n out.last_mut().unwrap().coef += elem.coef;\n\n } else {\n\n out.push(elem);\n\n }\n\n }\n\n out\n\n}\n\n\n\nimpl Display for Molecule {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {\n\n for elem in self.iter() {\n\n try!(fmt.write_str(elem.name.as_slice()));\n", "file_path": "src/elem.rs", "rank": 0, "score": 104045.51824357468 }, { "content": "/// Balances a chemical reaction using Gaussian elimination and returns the coefficients\n\n///\n\n/// The balancer finds the coefficients needed to balance the reaction by treating the reaction as\n\n/// a system of linear equations and then solving the system with Gaussian elimination.\n\n/// The transformation to an equation system (represented as a matrix `R`) is as follows:\n\n/// * Each column corresponds to a molecule\n\n/// * Each row corresponds to a periodic element.\n\n/// * The number at `R[i,j]` is the amount of periocic element `i` in molecule `j`.\n\n/// * Numbers from molecules on the right hand side of the equation will be negative.\n\n/// Thus we can now solve the system to find a linear combination of the columns which will result\n\n/// in a zero-vector, and then read the coefficients from the solution.\n\npub fn balance_reaction(reaction: &(Vec<Molecule>, Vec<Molecule>)) -> CTResult<Vec<u32>> {\n\n let reac_mat = Matrix::from_reaction(reaction);\n\n let reduced_mat = try!(forward_elim(reac_mat));\n\n let coefs = back_substitute(&reduced_mat);\n\n\n\n // if any of the coefs are 0, then an element in that molecule is missing on the other side\n\n // of the reaction\n\n if let Some(pos) = coefs.iter().position(|&c| c == 0.0) {\n\n let &(ref lhs, ref rhs) = reaction;\n\n let molecule = lhs.iter().chain(rhs.iter()).nth(pos).unwrap();\n\n let begin = molecule.first().unwrap().pos;\n\n let len = molecule.last().unwrap().pos + molecule.last().unwrap().len - begin;\n\n return Err(CTError {\n\n kind: InputError,\n\n desc: format!(\"An element in {} is missing on the other side of the reaction\",\n\n molecule),\n\n pos: Some((begin, len)),\n\n })\n\n }\n\n\n", "file_path": "src/balance.rs", "rank": 1, "score": 83276.06645612928 }, { "content": "/// Takes a parsed reaction and pretty prints it to the console\n\n///\n\n/// The reaction is printed as follows:\n\n/// <coef> <molecule> + <coef> <molecule> + ... -> <coef> <molecule> + <coef> <molecule> + ...\n\npub fn pretty_print_balanced(reaction: &(Vec<Molecule>, Vec<Molecule>), coefs: &Vec<u32>) {\n\n let &(ref lhs, ref rhs) = reaction;\n\n print!(\"{} {}\", coefs[0], lhs[0]);\n\n for (coef, molecule) in coefs.iter().zip(lhs.iter()).skip(1) {\n\n print!(\" + {} {}\", coef, molecule);\n\n }\n\n print!(\" -> \");\n\n print!(\"{} {}\", coefs[lhs.len()], rhs[0]);\n\n for (coef, molecule) in coefs.iter().skip(lhs.len()).zip(rhs.iter()).skip(1) {\n\n print!(\" + {} {}\", coef, molecule);\n\n }\n\n println!(\"\");\n\n}\n\n\n", "file_path": "src/balance.rs", "rank": 2, "score": 83272.94974426995 }, { "content": "/// Takes a parsed checmical formula containing a single molecule, and pretty print the mass\n\n///\n\n/// The function will print the molar mass (and some other data) for each element\n\n/// in the given molecule, as well as the total molar mass.\n\npub fn pretty_print_data(elem_data: &Vec<ElemData>, molecule: &Molecule) {\n\n let total = elem_data.iter()\n\n .zip(molecule.iter())\n\n .fold(0f64, |t, (ref data, ref elem)| t + data.mass * elem.coef as f64);\n\n\n\n println!(\"abbrv. amt. M name Z\");\n\n println!(\"------------------------------------------------------\");\n\n for (data, elem) in elem_data.iter().zip(molecule.iter()) {\n\n println!(\"{: <3} {: >10} {: >12} {: ^12} {: >3}\",\n\n data.short_name,\n\n elem.coef,\n\n // extra format, since println! does not right-align the number\n\n // when we specify the precision\n\n format!(\"{:3.8}\", data.mass),\n\n data.long_name,\n\n data.atomic_num);\n\n }\n\n println!(\"Total: {}\", total);\n\n}", "file_path": "src/mass.rs", "rank": 3, "score": 63571.748416007904 }, { "content": "fn balance_cmd(args: &[String]) -> CTResult<()> {\n\n if args.len() < 1 {\n\n Err(CTError {\n\n kind: UsageError,\n\n desc: \"Missing reaction.\".to_string(),\n\n pos: None,\n\n })\n\n } else if args.len() > 1 {\n\n Err(CTError {\n\n kind: UsageError,\n\n desc: \"Too many arguments.\".to_string(),\n\n pos: None,\n\n })\n\n } else {\n\n let input = args[0].as_slice();\n\n let mut parser = Parser::new(input);\n\n let reaction = try!(parser.parse_reaction());\n\n let coefs = try!(balance::balance_reaction(&reaction));\n\n balance::pretty_print_balanced(&reaction, &coefs);\n\n Ok(())\n\n }\n\n}", "file_path": "src/main.rs", "rank": 4, "score": 52513.585164017175 }, { "content": "fn decode_line(line: &String) -> CTResult<ElemData> {\n\n let data: Vec<&str> = line.trim().split(';').collect();\n\n if data.len() < 4 {\n\n Err(CTError {\n\n kind: DatabaseError,\n\n desc: \"Missing field in database\".to_string(),\n\n pos: None\n\n })\n\n } else {\n\n let mass = data[1].parse::<f64>();\n\n let atomic_num = data[3].parse::<u16>();\n\n if let (Ok(m), Ok(an)) = (mass, atomic_num) {\n\n Ok(ElemData {\n\n short_name: data[0].to_string(),\n\n long_name: data[2].to_string(),\n\n mass: m,\n\n atomic_num: an,\n\n })\n\n } else {\n\n Err(CTError {\n", "file_path": "src/database.rs", "rank": 5, "score": 48616.94654597474 }, { "content": "fn forward_elim(mut mat: Matrix) -> CTResult<Matrix> {\n\n for k in 0..min(mat.width(), mat.height()) {\n\n // locate the pivot\n\n let pivot = column_abs_max_index(&mat[k..], k) + k;\n\n if mat[pivot][k] == 0_f64 {\n\n return Err(CTError {\n\n kind: InputError,\n\n desc: \"Could not balance reaction\".to_string(),\n\n pos: None,\n\n })\n\n }\n\n // move the pivot to its new position\n\n mat.switch_rows(k, pivot);\n\n\n\n // zero out the rest of the column\n\n for i in k + 1..mat.height() {\n\n let mult = -(mat[i][k] / mat[k][k]);\n\n mat.add_row_to_row(i, k, mult);\n\n }\n\n }\n\n Ok(mat)\n\n}\n\n\n", "file_path": "src/balance.rs", "rank": 6, "score": 48152.16421697447 }, { "content": "fn mass_cmd(args: &[String], db_path: &Path) -> CTResult<()> {\n\n if args.len() < 1 {\n\n Err(CTError {\n\n kind: UsageError,\n\n desc: \"Missing formula.\".to_string(),\n\n pos: None,\n\n })\n\n } else if args.len() > 1 {\n\n Err(CTError {\n\n kind: UsageError,\n\n desc: \"Too many arguments.\".to_string(),\n\n pos: None,\n\n })\n\n } else {\n\n let input = args[0].as_slice();\n\n let mut parser = Parser::new(input);\n\n let molecule = try!(parser.parse_molecule());\n\n if !parser.is_done() {\n\n // since there should be no whitespace in a molecule, the only way for parser to have\n\n // returned sucess while not being done, is if there was some whitespace,\n", "file_path": "src/main.rs", "rank": 7, "score": 46414.74614936941 }, { "content": "fn column_abs_max_index(columns: &[Vec<f64>], column: usize) -> usize {\n\n let mut max = 0;\n\n for i in 1..columns.len() {\n\n if columns[i][column].abs() > columns[max][column].abs() {\n\n max = i;\n\n }\n\n }\n\n max\n\n}\n\n\n", "file_path": "src/balance.rs", "rank": 8, "score": 40596.67718275641 }, { "content": "#[derive(Debug, PartialEq)]\n\nstruct Matrix {\n\n buf: Vec<Vec<f64>>,\n\n height: usize,\n\n width: usize,\n\n}\n\n\n\nimpl Matrix {\n\n fn from_reaction(reaction: &(Vec<Molecule>, Vec<Molecule>)) -> Matrix {\n\n let &(ref lhs, ref rhs) = reaction;\n\n let lhs: Vec<Molecule> = lhs.clone().into_iter().map(|m| elem::group_elems(m)).collect();\n\n let rhs: Vec<Molecule> = rhs.clone().into_iter().map(|m| elem::group_elems(m)).collect();\n\n let mut names = Vec::<&str>::new();\n\n // gather up all the element names in the reaction so we now how many rows will be needed\n\n for molecule in lhs.iter().chain(rhs.iter()) {\n\n for elem in molecule.iter() {\n\n if names.iter().find(|e| **e == elem.name).is_none() {\n\n names.push(elem.name.as_slice());\n\n }\n\n }\n\n }\n", "file_path": "src/balance.rs", "rank": 9, "score": 39347.42583280171 }, { "content": "#[cfg(not(test))]\n\nfn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut opts = Options::new();\n\n opts.optflag(\"h\", \"help\", \"Display this message and then exit.\");\n\n opts.optflag(\"v\", \"version\", \"Display the version number and then exit.\");\n\n opts.optopt(\"\", \"db-path\", \"Explicitly specify the path to the database file.\", \"PATH\");\n\n let given_opts = match opts.parse(args.tail()) {\n\n Ok(go) => go,\n\n Err(msg) => {\n\n println!(\"{}\", msg.to_string());\n\n println!(\"{}\", opts.usage(USAGE));\n\n return;\n\n },\n\n };\n\n if given_opts.opt_present(\"help\") {\n\n println!(\"{}\", opts.usage(USAGE));\n\n } else if given_opts.opt_present(\"version\") {\n\n println!(\"{}\", VERSION);\n\n } else {\n\n let path = if let Some(path) = given_opts.opt_str(\"db-path\") {\n", "file_path": "src/main.rs", "rank": 10, "score": 28696.94537207866 }, { "content": "fn back_substitute(mat: &Matrix) -> Vec<f64> {\n\n let mut vars = Vec::with_capacity(mat.width());\n\n for k in (0..mat.width()).rev() {\n\n if k >= mat.height() {\n\n // if this coef does not have a corresponding row in the matrix, then treat is as a\n\n // free variable and set it to 1\n\n vars.push(1_f64);\n\n } else {\n\n let mut var = 0f64;\n\n for i in k + 1..mat.width() {\n\n var -= mat[k][i] * vars[mat.width() - 1 - i];\n\n }\n\n vars.push(var / mat[k][k]);\n\n }\n\n }\n\n vars\n\n}\n\n\n", "file_path": "src/balance.rs", "rank": 11, "score": 22139.044317184558 }, { "content": "#[derive(Debug, PartialEq)]\n\npub struct CTError {\n\n pub kind: CTErrorKind,\n\n pub desc: String,\n\n pub pos: Option<(usize, usize)>,\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum CTErrorKind {\n\n InputError,\n\n DatabaseError,\n\n UsageError,\n\n}\n\n\n\npub type CTResult<T> = Result<T, CTError>;\n\n\n\nimpl CTError {\n\n /// Pretty-prints the CTError struct to stdout\n\n pub fn print(&self, extra_desc: Option<&String>) {\n\n println!(\"{}\", self.desc);\n", "file_path": "src/error.rs", "rank": 29, "score": 14.450088556232998 }, { "content": "use std::old_io::File;\n\nuse std::old_io::SeekStyle::SeekSet;\n\nuse std::old_io::IoErrorKind::EndOfFile;\n\nuse elem::{PerElem, Molecule};\n\nuse error::{CTError, CTResult};\n\nuse error::CTErrorKind::{InputError, DatabaseError};\n\n\n\nmacro_rules! read_err (\n\n () => (Err(CTError {\n\n kind: DatabaseError,\n\n desc: \"Error reading the database\".to_string(),\n\n pos: None\n\n }));\n\n);\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct ElemData {\n\n pub short_name: String,\n\n pub long_name: String,\n\n pub mass: f64,\n", "file_path": "src/database.rs", "rank": 30, "score": 13.761954833085055 }, { "content": "use std::fmt::{Display, Formatter, Error};\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct PerElem {\n\n pub name: String,\n\n pub coef: u32,\n\n pub pos: usize,\n\n pub len: usize,\n\n}\n\n\n\npub type Molecule = Vec<PerElem>;\n\n\n\n/// Sorts the PerElems and groups those with the same name field.\n\n///\n\n/// Grouping of two (or more) PerElems means adding the coef field of the\n\n/// duplicate to the one already found, and then throwing away the duplicate.\n\n/// E.g. CH3CH3 would turn into C2H6.\n", "file_path": "src/elem.rs", "rank": 31, "score": 12.957087093267798 }, { "content": " // followed by more (illegal) input\n\n return Err(CTError {\n\n kind: InputError,\n\n desc: \"A molecule must not contain whitespace\".to_string(),\n\n pos: None,\n\n })\n\n }\n\n\n\n let molecule = elem::group_elems(molecule);\n\n let mut database = try!(ElemDatabase::open(db_path));\n\n let data = try!(database.get_data(&molecule));\n\n mass::pretty_print_data(&data, &molecule);\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 32, "score": 12.711802267688453 }, { "content": " kind: DatabaseError,\n\n desc: \"Field in database corrupted\".to_string(),\n\n pos: None,\n\n })\n\n }\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::old_io::File;\n\n use std::old_io::fs;\n\n use elem::PerElem;\n\n\n\n fn make_dummy_db(name: &str, contents: &str) -> ElemDatabase {\n\n if let Err(e) = File::create(&Path::new(name)).and_then(|mut f| f.write_str(contents)) {\n\n // if we can't make the database we can't test, so just abort here\n\n panic!(\"Could not create dummy database: {:?}\", e.desc);\n", "file_path": "src/database.rs", "rank": 33, "score": 12.078013280550818 }, { "content": " pub atomic_num: u16,\n\n}\n\n\n\npub struct ElemDatabase {\n\n db: File,\n\n}\n\n\n\nimpl ElemDatabase {\n\n /// Try to make the database with the file at the given oath\n\n pub fn open(path: &Path) -> CTResult<ElemDatabase> {\n\n match File::open(path) {\n\n Ok(db_file) => Ok(ElemDatabase { db: db_file }),\n\n Err(_) => Err(CTError {\n\n kind: DatabaseError,\n\n desc: format!(\"Could not open database file. Expected at: {:?}\",\n\n path.as_str().unwrap_or(\"same directory as the program\")),\n\n pos: None,\n\n }),\n\n }\n\n }\n", "file_path": "src/database.rs", "rank": 34, "score": 10.45418003554244 }, { "content": "use std::f64;\n\nuse std::cmp::min;\n\nuse std::num::Float;\n\nuse std::ops::{Index, IndexMut, Range, RangeTo, RangeFrom, RangeFull};\n\nuse elem;\n\nuse elem::Molecule;\n\nuse error::{CTResult, CTError};\n\nuse error::CTErrorKind::InputError;\n\n\n\nmacro_rules! impl_matrix_index {\n\n ($idx:ty, $out:ty) => {\n\n impl Index<$idx> for Matrix {\n\n type Output = $out;\n\n\n\n fn index(&self, index: &$idx) -> &$out {\n\n &self.buf[*index]\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/balance.rs", "rank": 35, "score": 10.315971870152207 }, { "content": " }\n\n }\n\n\n\n fn read_line(&mut self, elem: &PerElem) -> CTResult<String> {\n\n // we know that no line in the database is more than 30 characters long\n\n let mut buf = Vec::with_capacity(30);\n\n loop {\n\n match self.db.read_byte() {\n\n Ok(b) if b == b'\\n' => break,\n\n Ok(b) => buf.push(b),\n\n Err(ref e) if e.kind == EndOfFile => return Err(CTError {\n\n kind: InputError,\n\n desc: format!(\"Could not find element: {:?}\", elem.name),\n\n pos: Some((elem.pos, elem.len)),\n\n }),\n\n Err(_) => return read_err!()\n\n }\n\n }\n\n String::from_utf8(buf).or_else(|_| read_err!())\n\n }\n\n}\n\n\n", "file_path": "src/database.rs", "rank": 36, "score": 9.460561122799398 }, { "content": " }\n\n } else {\n\n Err(CTError {\n\n kind: UsageError,\n\n desc: \"Missing command.\".to_string(),\n\n pos: None,\n\n })\n\n };\n\n\n\n match cmd_result {\n\n Err(ref e) if e.kind == InputError => e.print(Some(&args[2])),\n\n Err(ref e) if e.kind == UsageError => e.print(Some(&opts.usage(USAGE))),\n\n Err(ref e) => e.print(None),\n\n _ => ()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 37, "score": 9.387167225002568 }, { "content": "#![allow(unused_features)] // so we can still feature(os) when testing\n\n#![feature(collections, path, io, core, os, plugin, env)]\n\nextern crate getopts;\n\n\n\nuse getopts::Options;\n\nuse std::env;\n\nuse parser::Parser;\n\nuse database::ElemDatabase;\n\nuse error::{CTResult, CTError};\n\nuse error::CTErrorKind::{InputError, UsageError};\n\n\n\nmod elem;\n\nmod parser;\n\nmod error;\n\nmod database;\n\nmod mass;\n\nmod balance;\n\n\n\nconst USAGE: &'static str = \"\\\n\nUsage:\n\n chemtool mass <formula> [options]\n\n chemtool balance <reaction> [options]\n\n chemtool [-h | --help]\n\n chemtool [-v | --version]\";\n\n\n\nconst VERSION: &'static str = \"chemtool 0.4.1\";\n\n\n\n#[cfg(not(test))]\n", "file_path": "src/main.rs", "rank": 38, "score": 8.915123087230038 }, { "content": " }\n\n\n\n fn height(&self) -> usize {\n\n self.height\n\n }\n\n}\n\n\n\nimpl_matrix_index!(usize, Vec<f64>);\n\nimpl_matrix_index!(Range<usize>, [Vec<f64>]);\n\nimpl_matrix_index!(RangeTo<usize>, [Vec<f64>]);\n\nimpl_matrix_index!(RangeFrom<usize>, [Vec<f64>]);\n\nimpl_matrix_index!(RangeFull, [Vec<f64>]);\n\n\n\nimpl_matrix_index_mut!(usize, Vec<f64>);\n\nimpl_matrix_index_mut!(Range<usize>, [Vec<f64>]);\n\nimpl_matrix_index_mut!(RangeTo<usize>, [Vec<f64>]);\n\nimpl_matrix_index_mut!(RangeFrom<usize>, [Vec<f64>]);\n\nimpl_matrix_index_mut!(RangeFull, [Vec<f64>]);\n\n\n\n#[cfg(test)]\n", "file_path": "src/balance.rs", "rank": 39, "score": 8.674258985117358 }, { "content": " }\n\n ElemDatabase::open(&Path::new(name)).unwrap()\n\n }\n\n\n\n fn remove_dummy_db(name: &str) {\n\n if let Err(e) = fs::unlink(&Path::new(name)) {\n\n // if we can't remove the database something is wrong, and we abort the test\n\n panic!(\"Could not remove dummy database: {:?}\", e.desc);\n\n }\n\n }\n\n\n\n #[test]\n\n fn multiple_elems() {\n\n let db_name = \"multiple_elems_db\";\n\n let mut db = make_dummy_db(db_name,\n\n \"A;1;Abba;2\\n\\\n\n B;3;Beta;4\\n\");\n\n let raw_result = db.get_data(&vec!(\n\n PerElem { name: \"B\".to_string(), coef: 1, pos: 0, len: 1 },\n\n PerElem { name: \"A\".to_string(), coef: 1, pos: 1, len: 1 }\n", "file_path": "src/database.rs", "rank": 40, "score": 8.475486315377267 }, { "content": "mod test {\n\n use super::*;\n\n use elem::PerElem;\n\n use error::CTErrorKind::InputError;\n\n\n\n macro_rules! dummy_elem(\n\n ($name:expr) => (\n\n PerElem { name: $name.to_string(), coef: 1, pos: 0, len: 1 }\n\n );\n\n ($name:expr, $coef:expr) => (\n\n PerElem { name: $name.to_string(), coef: $coef, pos: 0, len: 1 }\n\n );\n\n );\n\n\n\n #[test]\n\n fn balance() {\n\n // attempt to balance C3H8 + O2 -> CO2 + H2O\n\n let reaction = (vec!(vec!(dummy_elem!(\"C\", 3), dummy_elem!(\"H\", 8)),\n\n vec!(dummy_elem!(\"O\", 2))),\n\n vec!(vec!(dummy_elem!(\"C\", 1), dummy_elem!(\"O\", 2)),\n", "file_path": "src/balance.rs", "rank": 41, "score": 8.294518606279308 }, { "content": " fn missing_elem() {\n\n let db_name = \"missing_elem_db\";\n\n let mut db = make_dummy_db(db_name, \"A;123.456789;Abba;12\\n\");\n\n let result = db.get_single_data(\n\n &PerElem { name: \"B\".to_string(), coef: 1, pos: 0, len: 2 }\n\n );\n\n remove_dummy_db(db_name);\n\n assert!(result.is_err());\n\n }\n\n\n\n #[test]\n\n fn missing_field() {\n\n let db_name = \"missing_field_db\";\n\n let mut db = make_dummy_db(db_name, \"A;\");\n\n let result = db.get_single_data(\n\n &PerElem { name: \"A\".to_string(), coef: 1, pos: 0, len: 2 }\n\n );\n\n remove_dummy_db(db_name);\n\n assert!(result.is_err());\n\n }\n", "file_path": "src/database.rs", "rank": 42, "score": 7.719950513808952 }, { "content": "\n\nmacro_rules! impl_matrix_index_mut {\n\n ($idx:ty, $out:ty) => {\n\n impl IndexMut<$idx> for Matrix {\n\n fn index_mut(&mut self, index: &$idx) -> &mut $out {\n\n &mut self.buf[*index]\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Takes a parsed reaction and pretty prints it to the console\n\n///\n\n/// The reaction is printed as follows:\n\n/// <coef> <molecule> + <coef> <molecule> + ... -> <coef> <molecule> + <coef> <molecule> + ...\n", "file_path": "src/balance.rs", "rank": 43, "score": 7.717639792674559 }, { "content": " // TODO: Use a proper conversion function -- if it exists\n\n if elem.coef > 1 {\n\n try!(fmt.write_str(format!(\"{}\", elem.coef).as_slice()));\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n macro_rules! dummy_elem(\n\n ($name:expr) => (\n\n PerElem { name: $name.to_string(), coef: 1, pos: 0, len: 1 }\n\n );\n\n ($name:expr, $coef:expr) => (\n\n PerElem { name: $name.to_string(), coef: $coef, pos: 0, len: 1 }\n\n );\n", "file_path": "src/elem.rs", "rank": 44, "score": 7.6614465389768265 }, { "content": " // some errors will have extra stuff to report to make the message clearer for the user\n\n match self.kind {\n\n CTErrorKind::InputError => {\n\n if let (Some((pos, len)), Some(input)) = (self.pos, extra_desc) {\n\n println!(\" {}\", input);\n\n print!(\" \");\n\n for _ in 0..pos {\n\n print!(\" \");\n\n }\n\n print!(\"^\");\n\n for _ in 1..len {\n\n print!(\"~\");\n\n }\n\n println!(\"\");\n\n }\n\n },\n\n CTErrorKind::UsageError => {\n\n if let Some(usage) = extra_desc {\n\n println!(\"{}\", usage);\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n\n}", "file_path": "src/error.rs", "rank": 45, "score": 7.344656327372801 }, { "content": " /// This function errors if one of the PerElem could not be found, or the\n\n /// database could not be read.\n\n pub fn get_data(&mut self, elems: &Molecule) -> CTResult<Vec<ElemData>> {\n\n let mut out = Vec::new();\n\n for elem in elems.iter() {\n\n match self.get_single_data(elem) {\n\n Ok(data) => out.push(data),\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n Ok(out)\n\n }\n\n\n\n fn do_data_search(&mut self, elem: &PerElem) -> CTResult<ElemData> {\n\n loop {\n\n // TODO: make it so this function returns the 'not found' error\n\n let line = try!(self.read_line(elem));\n\n if line.starts_with(elem.name.as_slice()) {\n\n return decode_line(&line);\n\n }\n", "file_path": "src/database.rs", "rank": 46, "score": 6.365691206665867 }, { "content": " Path::new(path)\n\n } else {\n\n let mut path = Path::new(&args[0]);\n\n path.set_filename(\"elemdb.csv\");\n\n path\n\n };\n\n\n\n let cmd_result = if given_opts.free.len() > 0 {\n\n let cmd = &given_opts.free[0];\n\n let args = given_opts.free.tail();\n\n match cmd.as_slice() {\n\n \"mass\" => mass_cmd(&args, &path),\n\n \"balance\" => balance_cmd(&args),\n\n _ => {\n\n Err(CTError {\n\n kind: UsageError,\n\n desc: \"Invalid command\".to_string(),\n\n pos: None,\n\n })\n\n }\n", "file_path": "src/main.rs", "rank": 47, "score": 5.394206656335623 }, { "content": " Matrix {\n\n buf: buf,\n\n height: height,\n\n width: width,\n\n }\n\n }\n\n\n\n fn add_row_to_row(&mut self, dest: usize, row: usize, mult: f64) {\n\n for i in 0..self.width {\n\n let incr = self.buf[row][i] * mult;\n\n self.buf[dest][i] += incr;\n\n }\n\n }\n\n\n\n fn switch_rows(&mut self, row1: usize, row2: usize) {\n\n self.buf.swap(row1, row2);\n\n }\n\n\n\n fn width(&self) -> usize {\n\n self.width\n", "file_path": "src/balance.rs", "rank": 48, "score": 5.186724902788182 }, { "content": "use elem::Molecule;\n\nuse database::ElemData;\n\n\n\n/// Takes a parsed checmical formula containing a single molecule, and pretty print the mass\n\n///\n\n/// The function will print the molar mass (and some other data) for each element\n\n/// in the given molecule, as well as the total molar mass.\n", "file_path": "src/mass.rs", "rank": 49, "score": 5.027739948448879 }, { "content": "\n\n #[test]\n\n fn field_corrupted() {\n\n let db_name = \"field_corrupted_db\";\n\n let mut db = make_dummy_db(db_name, \"A;not a number;Abba;12\\n\");\n\n let result = db.get_single_data(\n\n &PerElem { name: \"A\".to_string(), coef: 1, pos: 0, len: 2 }\n\n );\n\n remove_dummy_db(db_name);\n\n assert!(result.is_err());\n\n }\n\n}", "file_path": "src/database.rs", "rank": 50, "score": 4.696556777466402 }, { "content": " fn find_elem() {\n\n let db_name = \"find_elem_db\";\n\n let mut db = make_dummy_db(db_name,\n\n \"A;0;Abba;0\\n\\\n\n B;123.456789;Beta;12\\n\\\n\n C;0;Coop;0\\n\");\n\n let raw_result = db.get_single_data(\n\n &PerElem { name: \"B\".to_string(), coef: 1, pos: 0, len: 2 }\n\n );\n\n let expected = ElemData {\n\n short_name: \"B\".to_string(),\n\n long_name: \"Beta\".to_string(),\n\n mass: 123.456789,\n\n atomic_num: 12,\n\n };\n\n remove_dummy_db(db_name);\n\n assert_eq!(Ok(expected), raw_result);\n\n }\n\n\n\n #[test]\n", "file_path": "src/database.rs", "rank": 51, "score": 4.686693238506054 }, { "content": "\n\n /// Try to get the data matching the given PerElem.\n\n ///\n\n /// This function errors if the PerElem could not be found, or the database\n\n /// could not be read.\n\n pub fn get_single_data(&mut self, elem: &PerElem) -> CTResult<ElemData> {\n\n // since the elements should be sorted before we get their data from the database\n\n // we should never have to seek back to the beginning of the file\n\n if let Ok(data) = self.do_data_search(elem) {\n\n Ok(data)\n\n } else {\n\n // but in case they weren't, we return to the beginning of the underlying file, since\n\n // the data might lie on a line we have previously read past\n\n self.db.seek(0, SeekSet).ok().expect(\"Internal error reading database\");\n\n self.do_data_search(elem)\n\n }\n\n }\n\n\n\n /// Try to get the data for all the provided PerElems.\n\n ///\n", "file_path": "src/database.rs", "rank": 52, "score": 4.523484499344601 }, { "content": " let mut buf = Vec::with_capacity(names.len());\n\n for name in names.iter() {\n\n let mut row = Vec::with_capacity(lhs.len() + rhs.len());\n\n for molecule in lhs.iter() {\n\n row.push(molecule.iter()\n\n .find(|e| e.name == *name)\n\n .and_then(|e| Some(e.coef as f64))\n\n .unwrap_or(0_f64));\n\n }\n\n // we loop over rhs seperately, since we need to multiply the coefs with -1\n\n for molecule in rhs.iter() {\n\n row.push(molecule.iter()\n\n .find(|e| e.name == *name)\n\n .and_then(|e| Some(-1.0*(e.coef as f64)))\n\n .unwrap_or(0_f64));\n\n }\n\n buf.push(row);\n\n }\n\n let height = names.len();\n\n let width = lhs.len() + rhs.len();\n", "file_path": "src/balance.rs", "rank": 53, "score": 4.060579309389319 }, { "content": " let reaction = (vec!(vec!(dummy_elem!(\"C\", 1)), vec!(dummy_elem!(\"H\", 1))),\n\n vec!(vec!(dummy_elem!(\"C\", 1)), vec!(dummy_elem!(\"H\", 1))));\n\n let result = balance_reaction(&reaction);\n\n let expected = Ok(vec!(1, 1, 1, 1));\n\n assert_eq!(result, expected);\n\n }\n\n\n\n #[test]\n\n fn missing_elem() {\n\n let reaction = (vec!(vec!(dummy_elem!(\"C\", 1)), vec!(dummy_elem!(\"H\", 1))),\n\n vec!(vec!(dummy_elem!(\"C\", 1))));\n\n let result = balance_reaction(&reaction);\n\n println!(\"{:?}\", result);\n\n assert!(result.is_err());\n\n assert_eq!(result.err().unwrap().kind, InputError);\n\n }\n\n}", "file_path": "src/balance.rs", "rank": 54, "score": 3.8215653885972123 }, { "content": "The MIT License (MIT)\n\n\n\nCopyright (c) 2015 Patrick M. Jensen, [email protected]\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in\n\nall copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n", "file_path": "LICENSE.md", "rank": 55, "score": 2.635241153406005 }, { "content": " ));\n\n let expected = vec!(\n\n ElemData {\n\n short_name: \"B\".to_string(),\n\n long_name: \"Beta\".to_string(),\n\n mass: 3.0,\n\n atomic_num: 4,\n\n },\n\n ElemData {\n\n short_name: \"A\".to_string(),\n\n long_name: \"Abba\".to_string(),\n\n mass: 1.0,\n\n atomic_num: 2,\n\n }\n\n );\n\n remove_dummy_db(db_name);\n\n assert_eq!(Ok(expected), raw_result);\n\n }\n\n\n\n #[test]\n", "file_path": "src/database.rs", "rank": 56, "score": 2.6292327787277356 }, { "content": "CHEMTOOL\n\n========\n\nA commandline utility for making chemistry a little less tedious.\n\n\n\nSo far the program is able to calculate the molar mass for a given molecule,\n\nor balance a given chemical reaction.\n\n\n\nExamples:\n\n---------\n\nTo get the total molar mass of the molecule `CH3CH2CH3` simply call chemtool as:\n\n```\n\nchemtool mass CH3CH2CH3\n\n```\n\nWhich should generate the following output:\n\n```\n\nabbrv. amt. M name Z\n\n------------------------------------------------------\n\nC 3 12.01070000 Carbon 6\n\nH 8 1.00794000 Hydrogen 1\n\nTotal: 44.09562\n\n```\n\nTo balance a reaction - e.g. `C3H8 + O2 -> CO2 + H2O` call:\n\n```\n\nchemtool balance 'C3H8 + O2 -> CO2 + H2O'\n\n```\n\nand the following should be displayed:\n\n```\n\n1 C3H8 + 5 O2 -> 3 CO2 + 4 H2O\n\n```\n\n\n\nUSAGE\n\n=====\n\n```\n\nUsage:\n\n chemtool mass <formula> [options]\n\n chemtool balance <reaction> [options]\n\n chemtool [-h | --help]\n\n chemtool [-v | --version]\n\n\n\nOptions:\n\n -h --help Display this message and then exit.\n\n -v --version Display the version number and then exit.\n\n --db-path PATH Explicitly specify the path to the database file.\n\n```\n\n\n\nInstalling and building\n\n=======================\n\nThe easiest way to compile the project is with the `cargo` program available\n\nfrom the rust website at:\n\nhttp://www.rust-lang.org/\n\n\n\nNavigate to the project directory and call:\n\n```\n\ncargo build --release\n\n```\n\nwhich will place the binary in the target directory. Then copy the element\n\ndatabase `elemdb.csv` to the same directory as the program binary (or use\n\n`--db-path` when invoking chemtool to specify the path yourself).\n\n\n\nTesting\n\n-------\n\nTo do a quick test of the program simply call:\n\n```\n\ncargo test\n", "file_path": "README.md", "rank": 57, "score": 2.622180927500654 }, { "content": " );\n\n\n\n #[test]\n\n fn group() {\n\n let result = group_elems(vec!(dummy_elem!(\"C\"), dummy_elem!(\"H\"), dummy_elem!(\"C\")));\n\n let expected = vec!(dummy_elem!(\"C\", 2), dummy_elem!(\"H\", 1));\n\n assert_eq!(result, expected);\n\n }\n\n\n\n #[test]\n\n fn molecule_display() {\n\n let molecule = vec!(dummy_elem!(\"C\", 2), dummy_elem!(\"H\", 3));\n\n let result = format!(\"{}\", molecule);\n\n let expected = \"C2H3\";\n\n assert_eq!(result, expected);\n\n }\n\n\n\n #[test]\n\n fn molecule_display_coefs() {\n\n let molecule = vec!(dummy_elem!(\"C\", 1), dummy_elem!(\"H\", 3));\n\n let result = format!(\"{}\", molecule);\n\n let expected = \"CH3\";\n\n assert_eq!(result, expected);\n\n }\n\n}", "file_path": "src/elem.rs", "rank": 58, "score": 2.568742435482663 }, { "content": " // we find the minimum element with fold, since f64 does not implement Ord...\n\n let min = coefs.iter().fold(f64::INFINITY, |crnt, &num| {\n\n if num < crnt {\n\n num\n\n } else {\n\n crnt\n\n }\n\n });\n\n // now we divide all elements by the minimum to (hopefully) convert them all to intergers\n\n // after which we do the actual conversion\n\n // we also reverse the Vec so the coefs are in the right order\n\n Ok(coefs.iter().rev().map(|n| (n / min) as u32).collect())\n\n}\n\n\n", "file_path": "src/balance.rs", "rank": 59, "score": 1.6482018268730045 } ]
Rust
rsnes/src/smp.rs
nat-rix/rsnes
05b68de39041e68fa65184c1842c1cd7108543d6
use crate::{ backend::AudioBackend as Backend, spc700::Spc700, timing::{Cycles, APU_CPU_TIMING_PROPORTION_NTSC, APU_CPU_TIMING_PROPORTION_PAL}, }; use save_state::{InSaveState, SaveStateDeserializer, SaveStateSerializer}; use save_state_macro::InSaveState; use std::sync::mpsc::{channel, Receiver, RecvError, Sender}; #[derive(Debug, Clone)] enum Action { WriteInputPort { addr: u8, data: u8 }, ReadOutputPort { addr: u8 }, } #[derive(Debug, Clone)] enum ThreadCommand { RunCycles { cycles: Cycles, action: Option<Action>, }, SaveState(Box<Spc700>), GetSaveState, KillMe, } #[derive(Debug, Clone)] enum MainCommand { Data(u8), SaveState(Box<Spc700>), } type ReturnType = Result<(), RecvError>; #[derive(Debug)] struct Thread { join_handle: Option<std::thread::JoinHandle<ReturnType>>, send: Sender<ThreadCommand>, recv: Receiver<MainCommand>, } #[derive(Debug, InSaveState)] pub struct Smp<B: Backend> { pub spc: Option<Spc700>, #[except((|_v, _s| ()), (|_v, _s| ()))] pub backend: Option<B>, #[except(Self::serialize_save_state, Self::deserialize_save_state)] thread: Option<Thread>, timing_proportion: (Cycles, Cycles), master_cycles: Cycles, } fn threaded_spc<B: Backend>( mut spc: Spc700, mut backend: B, send: Sender<MainCommand>, recv: Receiver<ThreadCommand>, ) -> ReturnType { loop { match recv.recv()? { ThreadCommand::RunCycles { cycles, action } => { for _ in 0..cycles { if let Some(sample) = spc.run_cycle() { backend.push_sample(sample) } } match action { Some(Action::WriteInputPort { addr, data }) => { spc.input[usize::from(addr & 3)] = data } Some(Action::ReadOutputPort { addr }) => { let _ = send.send(MainCommand::Data(spc.output[usize::from(addr & 3)])); } None => (), } } ThreadCommand::SaveState(new_spc) => spc = *new_spc, ThreadCommand::GetSaveState => { let _ = send.send(MainCommand::SaveState(Box::new(spc.clone()))); } ThreadCommand::KillMe => break Ok(()), } } } impl<B: Backend> Smp<B> { pub fn new(backend: B, is_pal: bool, is_threaded: bool) -> Self { let spc = Spc700::default(); let timing_proportion = if is_pal { APU_CPU_TIMING_PROPORTION_PAL } else { APU_CPU_TIMING_PROPORTION_NTSC }; if is_threaded { let ((m_send, m_recv), (t_send, t_recv)) = (channel(), channel()); let handle = std::thread::spawn(move || threaded_spc(spc, backend, m_send, t_recv)); let thread = Some(Thread { join_handle: Some(handle), send: t_send, recv: m_recv, }); Self { spc: None, backend: None, thread, timing_proportion, master_cycles: 0, } } else { Self { spc: Some(spc), backend: Some(backend), thread: None, timing_proportion, master_cycles: 0, } } } pub fn tick(&mut self, n: u16) { self.master_cycles += Cycles::from(n) * self.timing_proportion.1; } fn refresh_counters(&mut self) -> Cycles { let cycles = self.master_cycles / self.timing_proportion.0; self.master_cycles %= self.timing_proportion.0; cycles } fn refresh_no_thread(spc: &mut Spc700, backend: &mut B, cycles: Cycles) { for _ in 0..cycles { if let Some(sample) = spc.run_cycle() { backend.push_sample(sample) } } } pub fn refresh(&mut self) { let cycles = self.refresh_counters(); if let (Some(spc), Some(backend)) = (&mut self.spc, &mut self.backend) { Self::refresh_no_thread(spc, backend, cycles) } else if let Some(thread) = &mut self.thread { let _ = thread.send.send(ThreadCommand::RunCycles { cycles, action: None, }); } else { unreachable!() } } pub fn read_output_port(&mut self, addr: u8) -> u8 { let cycles = self.refresh_counters(); if let (Some(spc), Some(backend)) = (&mut self.spc, &mut self.backend) { Self::refresh_no_thread(spc, backend, cycles); spc.output[usize::from(addr & 3)] } else if let Some(thread) = &mut self.thread { let _ = thread.send.send(ThreadCommand::RunCycles { cycles, action: Some(Action::ReadOutputPort { addr }), }); match thread.recv.recv().unwrap() { MainCommand::Data(d) => d, _ => panic!(), } } else { unreachable!() } } pub fn write_input_port(&mut self, addr: u8, data: u8) { let cycles = self.refresh_counters(); if let (Some(spc), Some(backend)) = (&mut self.spc, &mut self.backend) { Self::refresh_no_thread(spc, backend, cycles); spc.input[usize::from(addr & 3)] = data } else if let Some(thread) = &mut self.thread { let _ = thread.send.send(ThreadCommand::RunCycles { cycles, action: Some(Action::WriteInputPort { addr, data }), }); } else { unreachable!() } } pub fn is_threaded(&self) -> bool { self.thread.is_some() } fn serialize_save_state(thread: &Option<Thread>, ser: &mut SaveStateSerializer) { if let Some(thread) = thread { thread.send.send(ThreadCommand::GetSaveState).unwrap(); match thread.recv.recv().unwrap() { MainCommand::SaveState(new_spc) => { new_spc.serialize(ser); } _ => panic!(), } } } fn deserialize_save_state(thread: &mut Option<Thread>, deser: &mut SaveStateDeserializer) { if let Some(thread) = thread { let mut spc = Spc700::default(); spc.deserialize(deser); let _ = thread.send.send(ThreadCommand::SaveState(Box::new(spc))); } } } impl<B: Backend> Drop for Smp<B> { fn drop(&mut self) { if let Some(thread) = &mut self.thread { drop(thread.send.send(ThreadCommand::KillMe)); if let Some(Ok(Err(err))) = thread.join_handle.take().map(|t| t.join()) { todo!("throw useful error ({})", err) } } } }
use crate::{ backend::AudioBackend as Backend, spc700::Spc700, timing::{Cycles, APU_CPU_TIMING_PROPORTION_NTSC, APU_CPU_TIMING_PROPORTION_PAL}, }; use save_
mple) } } match action { Some(Action::WriteInputPort { addr, data }) => { spc.input[usize::from(addr & 3)] = data } Some(Action::ReadOutputPort { addr }) => { let _ = send.send(MainCommand::Data(spc.output[usize::from(addr & 3)])); } None => (), } } ThreadCommand::SaveState(new_spc) => spc = *new_spc, ThreadCommand::GetSaveState => { let _ = send.send(MainCommand::SaveState(Box::new(spc.clone()))); } ThreadCommand::KillMe => break Ok(()), } } } impl<B: Backend> Smp<B> { pub fn new(backend: B, is_pal: bool, is_threaded: bool) -> Self { let spc = Spc700::default(); let timing_proportion = if is_pal { APU_CPU_TIMING_PROPORTION_PAL } else { APU_CPU_TIMING_PROPORTION_NTSC }; if is_threaded { let ((m_send, m_recv), (t_send, t_recv)) = (channel(), channel()); let handle = std::thread::spawn(move || threaded_spc(spc, backend, m_send, t_recv)); let thread = Some(Thread { join_handle: Some(handle), send: t_send, recv: m_recv, }); Self { spc: None, backend: None, thread, timing_proportion, master_cycles: 0, } } else { Self { spc: Some(spc), backend: Some(backend), thread: None, timing_proportion, master_cycles: 0, } } } pub fn tick(&mut self, n: u16) { self.master_cycles += Cycles::from(n) * self.timing_proportion.1; } fn refresh_counters(&mut self) -> Cycles { let cycles = self.master_cycles / self.timing_proportion.0; self.master_cycles %= self.timing_proportion.0; cycles } fn refresh_no_thread(spc: &mut Spc700, backend: &mut B, cycles: Cycles) { for _ in 0..cycles { if let Some(sample) = spc.run_cycle() { backend.push_sample(sample) } } } pub fn refresh(&mut self) { let cycles = self.refresh_counters(); if let (Some(spc), Some(backend)) = (&mut self.spc, &mut self.backend) { Self::refresh_no_thread(spc, backend, cycles) } else if let Some(thread) = &mut self.thread { let _ = thread.send.send(ThreadCommand::RunCycles { cycles, action: None, }); } else { unreachable!() } } pub fn read_output_port(&mut self, addr: u8) -> u8 { let cycles = self.refresh_counters(); if let (Some(spc), Some(backend)) = (&mut self.spc, &mut self.backend) { Self::refresh_no_thread(spc, backend, cycles); spc.output[usize::from(addr & 3)] } else if let Some(thread) = &mut self.thread { let _ = thread.send.send(ThreadCommand::RunCycles { cycles, action: Some(Action::ReadOutputPort { addr }), }); match thread.recv.recv().unwrap() { MainCommand::Data(d) => d, _ => panic!(), } } else { unreachable!() } } pub fn write_input_port(&mut self, addr: u8, data: u8) { let cycles = self.refresh_counters(); if let (Some(spc), Some(backend)) = (&mut self.spc, &mut self.backend) { Self::refresh_no_thread(spc, backend, cycles); spc.input[usize::from(addr & 3)] = data } else if let Some(thread) = &mut self.thread { let _ = thread.send.send(ThreadCommand::RunCycles { cycles, action: Some(Action::WriteInputPort { addr, data }), }); } else { unreachable!() } } pub fn is_threaded(&self) -> bool { self.thread.is_some() } fn serialize_save_state(thread: &Option<Thread>, ser: &mut SaveStateSerializer) { if let Some(thread) = thread { thread.send.send(ThreadCommand::GetSaveState).unwrap(); match thread.recv.recv().unwrap() { MainCommand::SaveState(new_spc) => { new_spc.serialize(ser); } _ => panic!(), } } } fn deserialize_save_state(thread: &mut Option<Thread>, deser: &mut SaveStateDeserializer) { if let Some(thread) = thread { let mut spc = Spc700::default(); spc.deserialize(deser); let _ = thread.send.send(ThreadCommand::SaveState(Box::new(spc))); } } } impl<B: Backend> Drop for Smp<B> { fn drop(&mut self) { if let Some(thread) = &mut self.thread { drop(thread.send.send(ThreadCommand::KillMe)); if let Some(Ok(Err(err))) = thread.join_handle.take().map(|t| t.join()) { todo!("throw useful error ({})", err) } } } }
state::{InSaveState, SaveStateDeserializer, SaveStateSerializer}; use save_state_macro::InSaveState; use std::sync::mpsc::{channel, Receiver, RecvError, Sender}; #[derive(Debug, Clone)] enum Action { WriteInputPort { addr: u8, data: u8 }, ReadOutputPort { addr: u8 }, } #[derive(Debug, Clone)] enum ThreadCommand { RunCycles { cycles: Cycles, action: Option<Action>, }, SaveState(Box<Spc700>), GetSaveState, KillMe, } #[derive(Debug, Clone)] enum MainCommand { Data(u8), SaveState(Box<Spc700>), } type ReturnType = Result<(), RecvError>; #[derive(Debug)] struct Thread { join_handle: Option<std::thread::JoinHandle<ReturnType>>, send: Sender<ThreadCommand>, recv: Receiver<MainCommand>, } #[derive(Debug, InSaveState)] pub struct Smp<B: Backend> { pub spc: Option<Spc700>, #[except((|_v, _s| ()), (|_v, _s| ()))] pub backend: Option<B>, #[except(Self::serialize_save_state, Self::deserialize_save_state)] thread: Option<Thread>, timing_proportion: (Cycles, Cycles), master_cycles: Cycles, } fn threaded_spc<B: Backend>( mut spc: Spc700, mut backend: B, send: Sender<MainCommand>, recv: Receiver<ThreadCommand>, ) -> ReturnType { loop { match recv.recv()? { ThreadCommand::RunCycles { cycles, action } => { for _ in 0..cycles { if let Some(sample) = spc.run_cycle() { backend.push_sample(sa
random
[ { "content": "pub trait AccessType<B: crate::backend::AudioBackend, FB: crate::backend::FrameBuffer> {\n\n fn read<D: Data>(device: &mut Device<B, FB>, addr: Addr24) -> D;\n\n fn write<D: Data>(device: &mut Device<B, FB>, addr: Addr24, val: D);\n\n fn cpu(device: &Device<B, FB>) -> &Cpu;\n\n fn cpu_mut(device: &mut Device<B, FB>) -> &mut Cpu;\n\n fn is_main() -> bool {\n\n false\n\n }\n\n}\n\n\n\npub struct AccessTypeMain;\n\n\n\nimpl<B: crate::backend::AudioBackend, FB: crate::backend::FrameBuffer> AccessType<B, FB>\n\n for AccessTypeMain\n\n{\n\n fn read<D: Data>(device: &mut Device<B, FB>, addr: Addr24) -> D {\n\n device.read::<D>(addr)\n\n }\n\n\n\n fn write<D: Data>(device: &mut Device<B, FB>, addr: Addr24, val: D) {\n", "file_path": "rsnes/src/instr.rs", "rank": 0, "score": 97009.81171677442 }, { "content": " pub trait AudioBackend: Send + 'static {\n\n fn push_sample(&mut self, sample: StereoSample);\n\n }\n\n pub struct Dummy;\n\n\n\n impl AudioBackend for Dummy {\n\n fn push_sample(&mut self, _sample: StereoSample) {}\n\n }\n\n}\n\n\n\npub use audio::{AudioBackend, Dummy as AudioDummy};\n\n\n", "file_path": "rsnes/src/backend.rs", "rank": 2, "score": 41380.04673110174 }, { "content": "mod audio {\n\n use crate::spc700::StereoSample;\n\n\n", "file_path": "rsnes/src/backend.rs", "rank": 3, "score": 37793.23385139394 }, { "content": "struct AudioBackend {\n\n producer: ringbuf::Producer<i16>,\n\n}\n\n\n\nconst SAMPLE_RATE: cpal::SampleRate = cpal::SampleRate(32000);\n\nconst TIME_PER_GPU_FRAME: Duration = Duration::from_micros(8_333);\n\nconst TIME_UNTIL_TIMER_RESET: Duration = Duration::from_millis(500);\n\n\n\nimpl AudioBackend {\n\n fn write_data<T: Sample>(data: &mut [T], consumer: &mut ringbuf::Consumer<i16>, channels: u16) {\n\n for frame in data.chunks_exact_mut(channels.into()) {\n\n let [l, r] = [(), ()].map(|_| T::from(&consumer.pop().unwrap_or(0)));\n\n if channels == 2 {\n\n frame[0] = l;\n\n frame[1] = r;\n\n } else {\n\n // TODO: join channels together\n\n for i in 0..channels {\n\n frame[usize::from(i)] = l;\n\n }\n", "file_path": "emulator/src/main.rs", "rank": 4, "score": 34388.205166461754 }, { "content": "pub trait FrameBuffer {\n\n fn pixels(&self) -> &[[u8; 4]];\n\n fn mut_pixels(&mut self) -> &mut [[u8; 4]];\n\n fn request_redraw(&mut self);\n\n}\n\n\n\npub const FRAME_BUFFER_SIZE: usize = (ppu::MAX_SCREEN_HEIGHT_OVERSCAN * ppu::SCREEN_WIDTH) as usize;\n\nuse crate::ppu;\n\n#[derive(Debug, Clone)]\n\npub struct ArrayFrameBuffer(pub [[u8; 4]; FRAME_BUFFER_SIZE], pub bool);\n\n\n\nimpl FrameBuffer for ArrayFrameBuffer {\n\n fn pixels(&self) -> &[[u8; 4]] {\n\n &self.0\n\n }\n\n fn mut_pixels(&mut self) -> &mut [[u8; 4]] {\n\n &mut self.0\n\n }\n\n fn request_redraw(&mut self) {\n\n self.1 = true\n\n }\n\n}\n\n\n\nimpl ArrayFrameBuffer {\n\n pub fn get_bytes(&self) -> &[u8] {\n\n unsafe { core::slice::from_raw_parts(self.0.as_ptr() as _, self.0.len() << 2) }\n\n }\n\n}\n", "file_path": "rsnes/src/backend.rs", "rank": 5, "score": 32909.274132054554 }, { "content": "//! The SNES/Famicom device\n\n\n\nuse crate::{\n\n backend::{AudioBackend, FrameBuffer},\n\n cartridge::Cartridge,\n\n controller::ControllerPorts,\n\n cpu::Cpu,\n\n dma::Dma,\n\n ppu::Ppu,\n\n registers::MathRegisters,\n\n smp::Smp,\n\n timing::Cycles,\n\n};\n\nuse core::cell::Cell;\n\nuse save_state_macro::*;\n\n\n\nconst RAM_SIZE: usize = 0x20000;\n\n\n\n/// The 24-bit address type used\n\n#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n", "file_path": "rsnes/src/device.rs", "rank": 7, "score": 11.522759925398685 }, { "content": " FB: crate::backend::FrameBuffer,\n\n>(\n\n device: &'a mut Device<B, FB>,\n\n) -> DeviceAccess<'a, T, B, FB> {\n\n DeviceAccess(device, core::marker::PhantomData)\n\n}\n\n\n\npub struct DeviceAccess<\n\n 'a,\n\n T: AccessType<B, FB>,\n\n B: crate::backend::AudioBackend,\n\n FB: crate::backend::FrameBuffer,\n\n>(pub &'a mut Device<B, FB>, core::marker::PhantomData<T>);\n\n\n\nimpl<\n\n 'a,\n\n T: AccessType<B, FB>,\n\n B: crate::backend::AudioBackend,\n\n FB: crate::backend::FrameBuffer,\n\n > DeviceAccess<'a, T, B, FB>\n", "file_path": "rsnes/src/instr.rs", "rank": 8, "score": 10.114769666383541 }, { "content": "use crate::cpu::{Cpu, Status};\n\nuse crate::device::{Addr24, Data, Device};\n\nuse crate::timing::Cycles;\n\n\n\n// 0x80 BRA: the 2 instead of 3 cycles are on purpose.\n\n// `branch_near` will increment the cycle count\n\n#[rustfmt::skip]\n\nstatic CYCLES: [Cycles; 256] = [\n\n /* ^0 ^1 ^2 ^3 ^4 ^5 ^6 ^7 | ^8 ^9 ^a ^b ^c ^d ^e ^f */\n\n 7, 6, 7, 4, 5, 3, 5, 6, 3, 2, 2, 4, 6, 4, 6, 5, // 0^\n\n 2, 5, 5, 7, 5, 4, 6, 6, 2, 4, 2, 2, 6, 4, 7, 5, // 1^\n\n 6, 6, 8, 4, 3, 3, 5, 6, 4, 2, 2, 5, 4, 4, 6, 5, // 2^\n\n 2, 5, 5, 7, 4, 4, 6, 6, 2, 4, 2, 2, 4, 4, 7, 5, // 3^\n\n 6, 6, 2, 4, 1, 3, 5, 6, 3, 2, 2, 3, 3, 4, 6, 5, // 4^\n\n 2, 5, 5, 7, 1, 4, 6, 6, 2, 4, 3, 2, 4, 4, 7, 5, // 5^\n\n 6, 6, 6, 4, 3, 3, 5, 6, 4, 2, 2, 6, 5, 4, 6, 5, // 6^\n\n 2, 5, 5, 7, 4, 4, 6, 6, 2, 4, 4, 2, 6, 4, 7, 5, // 7^\n\n 2, 6, 4, 4, 3, 3, 3, 6, 2, 2, 2, 3, 4, 4, 4, 5, // 8^\n\n 2, 6, 5, 7, 4, 4, 4, 6, 2, 5, 2, 2, 4, 5, 5, 5, // 9^\n\n 2, 6, 2, 4, 3, 3, 3, 6, 2, 2, 2, 4, 4, 4, 4, 5, // a^\n", "file_path": "rsnes/src/instr.rs", "rank": 9, "score": 9.977426417540729 }, { "content": "mod config;\n\n\n\nuse clap::{ErrorKind, Parser};\n\nuse cpal::{\n\n traits::{DeviceTrait, HostTrait, StreamTrait},\n\n Sample,\n\n};\n\nuse pollster::FutureExt;\n\nuse rsnes::{backend::ArrayFrameBuffer, device::Device, spc700::StereoSample};\n\nuse save_state::InSaveState;\n\nuse std::{\n\n path::PathBuf,\n\n time::{Duration, Instant},\n\n};\n\nuse winit::{\n\n event::{DeviceEvent, ElementState, Event, KeyboardInput, WindowEvent},\n\n event_loop::{ControlFlow, EventLoop},\n\n window::WindowBuilder,\n\n};\n\n\n\nconst MASTER_CYCLES_PER_TICK: u16 = 2;\n\n\n\n#[derive(Parser, Clone)]\n\n#[clap(\n\n version = clap::crate_version!(),\n\n)]\n", "file_path": "emulator/src/main.rs", "rank": 10, "score": 9.824422935346433 }, { "content": " }\n\n }\n\n\n\n pub fn enable_hdma(&mut self, value: u8) {\n\n self.hdma_enabled = value;\n\n }\n\n\n\n pub fn get_first_dma_channel_id(&mut self) -> Option<usize> {\n\n if let id @ 0..=7 = self.dma_enabled.trailing_zeros() {\n\n Some(id as usize)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<B: crate::backend::AudioBackend, FB: crate::backend::FrameBuffer> Device<B, FB> {\n\n fn transfer_direct_byte(\n\n &mut self,\n\n channel_id: usize,\n", "file_path": "rsnes/src/dma.rs", "rank": 11, "score": 8.710751052017837 }, { "content": " pub(crate) memory_cycles: Cycles,\n\n pub(crate) cpu_ahead_cycles: i32,\n\n pub(crate) new_scanline: bool,\n\n pub(crate) scanline_drawn: bool,\n\n pub new_frame: bool,\n\n pub(crate) do_hdma: bool,\n\n // multiplied by 4\n\n pub(crate) irq_time_h: u16,\n\n pub(crate) irq_time_v: u16,\n\n pub(crate) shall_irq: bool,\n\n pub(crate) shall_nmi: bool,\n\n pub(crate) nmi_vblank_bit: Cell<bool>,\n\n pub(crate) math_registers: MathRegisters,\n\n pub(crate) is_pal: bool,\n\n}\n\n\n\nimpl<B: AudioBackend, FB: FrameBuffer> Device<B, FB> {\n\n pub fn new(audio_backend: B, frame_buffer: FB, is_pal: bool, is_threaded: bool) -> Self {\n\n Self {\n\n cpu: Cpu::new(),\n", "file_path": "rsnes/src/device.rs", "rank": 12, "score": 8.703975498543358 }, { "content": " device.write::<D>(addr, val)\n\n }\n\n\n\n fn cpu(device: &Device<B, FB>) -> &Cpu {\n\n &device.cpu\n\n }\n\n\n\n fn cpu_mut(device: &mut Device<B, FB>) -> &mut Cpu {\n\n &mut device.cpu\n\n }\n\n\n\n fn is_main() -> bool {\n\n true\n\n }\n\n}\n\n\n\npub(crate) fn create_device_access<\n\n 'a,\n\n T: AccessType<B, FB>,\n\n B: crate::backend::AudioBackend,\n", "file_path": "rsnes/src/instr.rs", "rank": 13, "score": 8.334674630429923 }, { "content": " }\n\n}\n\n\n\nimpl<'a, B: crate::backend::AudioBackend, FB: crate::backend::FrameBuffer>\n\n DeviceAccess<'a, AccessTypeSa1, B, FB>\n\n{\n\n pub fn sa1(&self) -> &Sa1 {\n\n self.0.cartridge.as_ref().unwrap().sa1_ref()\n\n }\n\n\n\n pub fn sa1_mut(&mut self) -> &mut Sa1 {\n\n self.0.cartridge.as_mut().unwrap().sa1_mut()\n\n }\n\n\n\n pub fn run_dma_normal(&mut self) {\n\n todo!(\"SA-1 normal dma\")\n\n }\n\n\n\n pub fn run_dma_character_conversion_type1(&mut self) {\n\n todo!(\"SA-1 character conversion type 1 dma\")\n", "file_path": "rsnes/src/enhancement/sa1.rs", "rank": 14, "score": 8.250777710584028 }, { "content": " if self.math_timer == 0 {\n\n &self.result_after\n\n } else {\n\n &self.result_before\n\n }\n\n }\n\n}\n\n\n\nimpl<B: crate::backend::AudioBackend, FB: crate::backend::FrameBuffer> Device<B, FB> {\n\n pub fn read_internal_register(&mut self, id: u16) -> Option<u8> {\n\n assert!(id >= 0x4000 && id <= 0x43ff);\n\n match id {\n\n 0x4016 => {\n\n // JOYSER0 - NES-style Joypad access\n\n Some(self.controllers.port1.read_port_data() | (self.open_bus & 0xfc))\n\n }\n\n 0x4017 => {\n\n // JOYSER1 - NES-style Joypad access\n\n Some(self.controllers.port2.read_port_data() | 0b11100 | (self.open_bus & 0xfc))\n\n }\n", "file_path": "rsnes/src/registers.rs", "rank": 15, "score": 8.057634356911395 }, { "content": " for d in arr.as_mut() {\n\n self.cpu_mut().regs.sp = self.cpu().regs.sp.wrapping_add(1);\n\n if self.cpu().regs.is_emulation {\n\n self.cpu_mut().regs.sp = (self.cpu().regs.sp & 0xff) | 256\n\n }\n\n *d = self.read(Addr24::new(0, self.cpu().regs.sp));\n\n }\n\n D::from_bytes(&arr)\n\n }\n\n}\n\n\n\nimpl<\n\n 'a,\n\n T: AccessType<B, FB>,\n\n B: crate::backend::AudioBackend,\n\n FB: crate::backend::FrameBuffer,\n\n > DeviceAccess<'a, T, B, FB>\n\n{\n\n fn load_indexed_v<const BC: bool>(&mut self, cycles: &mut Cycles, val: u16) -> Addr24 {\n\n let loaded_addr = self.load::<u16>();\n", "file_path": "rsnes/src/instr.rs", "rank": 16, "score": 7.9096101168024955 }, { "content": "pub(crate) const NECDSP_CPU_TIMING_PROPORTION_NTSC: (Cycles, Cycles) = (118125, 45056);\n\npub(crate) const NECDSP_CPU_TIMING_PROPORTION_PAL: (Cycles, Cycles) = (40591, 15625);\n\n\n\nimpl<B: crate::backend::AudioBackend, FB: crate::backend::FrameBuffer> Device<B, FB> {\n\n pub fn run_cycle<const N: u16>(&mut self) {\n\n self.smp.tick(N);\n\n self.cartridge.as_mut().unwrap().tick(N.into());\n\n let vend = self.ppu.vend();\n\n if self.is_auto_joypad() && self.new_scanline && self.ppu.get_pos().y == vend + 2 {\n\n self.controllers.auto_joypad_timer = 4224;\n\n self.controllers.auto_joypad()\n\n }\n\n self.controllers.auto_joypad_timer -= self.controllers.auto_joypad_timer.min(N);\n\n // > The CPU is paused for 40 cycles beginning about 536 cycles\n\n // > after the start of each scanline\n\n // source: <https://wiki.superfamicom.org/timing>\n\n if self.ppu.is_cpu_active() && self.cpu.active {\n\n if self.dma.hdma_ahead_cycles > 0 {\n\n self.dma.hdma_ahead_cycles -= i32::from(N);\n\n } else if self.dma.is_dma_running() {\n", "file_path": "rsnes/src/timing.rs", "rank": 17, "score": 7.826401892685209 }, { "content": "//! Utilities to read a cartridge into memory\n\n//!\n\n//! # Literature\n\n//!\n\n//! - the [super famicom wiki page](https://wiki.superfamicom.org/memory-mapping)\n\n//! - <http://patrickjohnston.org/ASM/ROM data/snestek.htm>\n\n\n\nuse std::convert::TryInto;\n\n\n\nuse crate::{\n\n device::{Addr24, Data},\n\n enhancement::{sa1::Sa1, Dsp, DspVersion},\n\n timing::Cycles,\n\n};\n\nuse save_state::{SaveStateDeserializer, SaveStateSerializer};\n\nuse save_state_macro::*;\n\n\n\nconst MINIMUM_SIZE: usize = 0x8000;\n\n\n", "file_path": "rsnes/src/cartridge.rs", "rank": 18, "score": 7.552583165391597 }, { "content": "//! Timing control implementation\n\n//!\n\n//! # Literature\n\n//!\n\n//! - <https://wiki.superfamicom.org/timing>\n\n\n\nuse crate::{\n\n cpu::Status,\n\n device::{Addr24, Device},\n\n};\n\n\n\npub type Cycles = u32;\n\n\n\n// The SNES master clock runs at ca. (945/44) MHz which is ca. 21_477kHz;\n\n// The APU runs at 1024kHz\n\n\n\n/// This is a fractional proportion between the cpu and apu clock speed\n\npub(crate) const APU_CPU_TIMING_PROPORTION_NTSC: (Cycles, Cycles) = (118125, 5632);\n\npub(crate) const APU_CPU_TIMING_PROPORTION_PAL: (Cycles, Cycles) = (665, 32);\n\n\n", "file_path": "rsnes/src/timing.rs", "rank": 19, "score": 7.260285793113628 }, { "content": "use crate::device::Device;\n\nuse save_state_macro::*;\n\n\n\nconst CHIP_5A22_VERSION: u8 = 2;\n\n\n\n#[derive(Debug, Clone, InSaveState)]\n\npub struct MathRegisters {\n\n multiplicands: [u8; 2],\n\n dividend: u16,\n\n divisor: u8,\n\n math_timer: u16,\n\n result_after: [u8; 4],\n\n result_before: [u8; 4],\n\n}\n\n\n\nimpl MathRegisters {\n\n pub const fn new() -> Self {\n\n Self {\n\n multiplicands: [0xff, 0xff],\n\n dividend: 0xffff,\n", "file_path": "rsnes/src/registers.rs", "rank": 20, "score": 7.138668942032641 }, { "content": " fn write_bwram_small<const INTERNAL: bool>(&mut self, addr: Addr24, val: u8) {\n\n let addr = self.get_bwram_small::<INTERNAL>(addr);\n\n if INTERNAL && self.bwram_map_bits {\n\n return self.write_bwram_bits(addr, val);\n\n }\n\n self.bwram[(addr & 0x3_ffff) as usize] = val\n\n }\n\n}\n\n\n\npub struct AccessTypeSa1;\n\n\n\nimpl<B: crate::backend::AudioBackend, FB: crate::backend::FrameBuffer> AccessType<B, FB>\n\n for AccessTypeSa1\n\n{\n\n fn read<D: Data>(device: &mut Device<B, FB>, mut addr: Addr24) -> D {\n\n let mut arr: D::Arr = Default::default();\n\n let mut open_bus = device.open_bus;\n\n let cartridge = device.cartridge.as_mut().unwrap();\n\n for v in arr.as_mut() {\n\n *v = cartridge.sa1_read::<true>(addr).unwrap_or(open_bus);\n", "file_path": "rsnes/src/enhancement/sa1.rs", "rank": 21, "score": 7.096368666745546 }, { "content": "//! DSP-n cartridge coprocessor handling types\n\n//!\n\n//! # Literature\n\n//!\n\n//! - https://www.caitsith2.com/snes/dsp/\n\n//! - https://datasheet.datasheetarchive.com/originals/scans/Scans-003/Scans-0079458.pdf\n\n//! - SNES book 2 - Section 3\n\n\n\nuse crate::timing::Cycles;\n\nuse save_state::{InSaveState, SaveStateDeserializer, SaveStateSerializer};\n\nuse save_state_macro::InSaveState;\n\n\n\npub const ROM_SIZE: usize = 0x2000;\n\n\n\n#[derive(Debug, Clone, Copy, InSaveState)]\n\npub struct Stack {\n\n stack: [u16; 4],\n\n size: u8,\n\n}\n\n\n", "file_path": "rsnes/src/enhancement/dsp.rs", "rank": 22, "score": 7.021960370732151 }, { "content": "//! SA-1 Coprocessor handling types\n\n//!\n\n//! # Literature\n\n//!\n\n//! - <https://wiki.superfamicom.org/sa-1>\n\n//! - <https://wiki.superfamicom.org/sa-1-registers>\n\n//! - <https://wiki.superfamicom.org/uploads/assembly-programming-manual-for-w65c816.pdf>\n\n//! - <https://problemkaputt.de/fullsnes.htm>\n\n\n\nuse crate::{\n\n cartridge::Cartridge,\n\n cpu::{Cpu, Status},\n\n device::{Addr24, Data, Device},\n\n instr::{AccessType, DeviceAccess},\n\n};\n\nuse core::mem::replace;\n\nuse save_state_macro::*;\n\n\n\nconst IRAM_SIZE: usize = 0x800;\n\nconst BWRAM_SIZE: usize = 0x40000;\n", "file_path": "rsnes/src/enhancement/sa1.rs", "rank": 23, "score": 6.983826775752403 }, { "content": " fn to_open_bus(self) -> u8 {\n\n self.bank\n\n }\n\n fn from_open_bus(open_bus: u8) -> Self {\n\n Self::new(open_bus, open_bus as u16 | ((open_bus as u16) << 8))\n\n }\n\n}\n\n\n\n#[derive(Debug, InSaveState)]\n\npub struct Device<B: AudioBackend, FB: FrameBuffer> {\n\n pub(crate) cpu: Cpu,\n\n pub smp: Smp<B>,\n\n pub ppu: Ppu<FB>,\n\n pub(crate) dma: Dma,\n\n pub controllers: ControllerPorts,\n\n pub(crate) cartridge: Option<Cartridge>,\n\n /// <https://wiki.superfamicom.org/open-bus>\n\n pub(crate) open_bus: u8,\n\n ram: [u8; RAM_SIZE],\n\n wram_addr: Cell<u32>,\n", "file_path": "rsnes/src/device.rs", "rank": 24, "score": 6.9056384970808775 }, { "content": "use crate::oam::{CgRam, Oam, Object};\n\nuse core::mem::{replace, take};\n\nuse save_state::{SaveStateDeserializer, SaveStateSerializer};\n\nuse save_state_macro::*;\n\n\n\npub const VRAM_SIZE: usize = 0x8000;\n\npub const SCREEN_WIDTH: u32 = 256;\n\npub const MAX_SCREEN_HEIGHT: u32 = 224;\n\npub const MAX_SCREEN_HEIGHT_OVERSCAN: u32 = 239;\n\npub const CHIP_5C77_VERSION: u8 = 1;\n\npub const CHIP_5C78_VERSION: u8 = 3;\n\n\n\n// TODO: Check the exact value of this.\n\n// wiki.superfamicom.org/timing states that\n\n// when we disable Force Blank mid-scanline,\n\n// there is garbage for about 16-24 pixels.\n\npub const RAY_AHEAD_CYCLES: u16 = 20 * 4;\n\n\n\nstatic OBJ_SIZES: [[[u8; 2]; 2]; 8] = [\n\n [[8, 8], [16, 16]],\n", "file_path": "rsnes/src/ppu.rs", "rank": 26, "score": 6.781066332331774 }, { "content": "use crate::device::{Addr24, Device};\n\nuse save_state_macro::*;\n\n\n\npub mod flags {\n\n pub const MODE: u8 = 0b111;\n\n pub const FIXED: u8 = 0x08;\n\n pub const DECREMENT: u8 = 0x10;\n\n pub const INDIRECT: u8 = 0x40;\n\n pub const PPU_TO_CPU: u8 = 0x80;\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, InSaveState)]\n\npub struct Channel {\n\n a_bus: Addr24,\n\n b_bus: u8,\n\n size: u16,\n\n indirect_bank: u8,\n\n control: u8,\n\n unknown_register: u8,\n\n table: u16,\n", "file_path": "rsnes/src/dma.rs", "rank": 27, "score": 6.582801313466065 }, { "content": "//! 65816/65C816 CPU handling types\n\n//!\n\n//! # Literature\n\n//!\n\n//! - the [super famicom wiki page](https://wiki.superfamicom.org/65816-reference)\n\n//! - <https://apprize.best/programming/65816/>\n\n//! - <https://www.westerndesigncenter.com/wdc/documentation/w65c816s.pdf>\n\n//! - <https://wiki.superfamicom.org/uploads/assembly-programming-manual-for-w65c816.pdf>\n\n\n\nuse crate::device::Addr24;\n\nuse core::ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, Not};\n\nuse save_state_macro::*;\n\n\n\npub const RESET_VECTOR_ADDR: Addr24 = Addr24::new(0, 0xfffc);\n\n\n\n/// Structure containing the processor registers\n\n#[derive(Debug, Clone, InSaveState)]\n\npub struct Regs {\n\n /// The accumulator register\n\n pub a: u16,\n", "file_path": "rsnes/src/cpu.rs", "rank": 28, "score": 6.526092476636515 }, { "content": "//! SPC700 Sound Coprocessor handling types\n\n//!\n\n//! # Literature\n\n//!\n\n//! - <https://wiki.superfamicom.org/spc700-reference>\n\n//! - <https://emudev.de/q00-snes/spc700-the-audio-processor/>\n\n//! - The first of the two official SNES documentation books\n\n\n\nuse crate::timing::Cycles;\n\nuse core::{cell::Cell, mem::take};\n\nuse save_state::{SaveStateDeserializer, SaveStateSerializer};\n\nuse save_state_macro::*;\n\n\n\npub const MEMORY_SIZE: usize = 64 * 1024;\n\n\n\nstatic ROM: [u8; 64] = [\n\n 0xCD, 0xEF, 0xBD, 0xE8, 0x00, 0xC6, 0x1D, 0xD0, 0xFC, 0x8F, 0xAA, 0xF4, 0x8F, 0xBB, 0xF5, 0x78,\n\n 0xCC, 0xF4, 0xD0, 0xFB, 0x2F, 0x19, 0xEB, 0xF4, 0xD0, 0xFC, 0x7E, 0xF4, 0xD0, 0x0B, 0xE4, 0xF5,\n\n 0xCB, 0xF4, 0xD7, 0x00, 0xFC, 0xD0, 0xF3, 0xAB, 0x01, 0x10, 0xEF, 0x7E, 0xF4, 0x10, 0xEB, 0xBA,\n\n 0xF6, 0xDA, 0x00, 0xBA, 0xF4, 0xC4, 0xF4, 0xDD, 0x5D, 0xD0, 0xDB, 0x1F, 0x00, 0x00, 0xC0, 0xFF,\n", "file_path": "rsnes/src/spc700.rs", "rank": 29, "score": 5.627857657768466 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, InSaveState)]\n\npub struct Ppu<FB: crate::backend::FrameBuffer> {\n\n #[except((|_v, _s| ()), (|_v, _s| ()))]\n\n pub frame_buffer: FB,\n\n oam: Oam,\n\n cgram: CgRam,\n\n vram: Vram,\n\n bgs: [Bg; 4],\n\n bg_mode: BgMode,\n\n bg3_prio: bool,\n\n pos: RayPos,\n\n latched: LatchedState,\n\n brightness: u8,\n\n draw_layers: Layers,\n\n /// Object sizes in (x, y) = `[u8; 2]` for small sprites (=`obj_size[0]`) and large sprites\n\n /// (=`obj_size[1]`) in pixels\n", "file_path": "rsnes/src/ppu.rs", "rank": 30, "score": 5.5268974273080715 }, { "content": " is_pal,\n\n }\n\n }\n\n\n\n pub fn with_main_cpu<'a>(\n\n &'a mut self,\n\n ) -> crate::instr::DeviceAccess<'a, crate::instr::AccessTypeMain, B, FB> {\n\n crate::instr::create_device_access(self)\n\n }\n\n\n\n pub fn with_sa1_cpu<'a>(\n\n &'a mut self,\n\n ) -> crate::instr::DeviceAccess<'a, crate::enhancement::sa1::AccessTypeSa1, B, FB> {\n\n crate::instr::create_device_access(self)\n\n }\n\n\n\n pub fn get_irq_pin(&self) -> bool {\n\n match &self.cartridge {\n\n Some(cart) if cart.has_sa1() => cart.sa1_ref().irq_pin(),\n\n _ => false,\n", "file_path": "rsnes/src/device.rs", "rank": 31, "score": 5.241057015466465 }, { "content": " pub regs: Regs,\n\n pub(crate) nmitimen: u8,\n\n pub(crate) access_speed: bool,\n\n pub(crate) in_nmi: bool,\n\n pub(crate) irq_bit: u8,\n\n pub wait_mode: bool,\n\n pub active: bool,\n\n}\n\n\n\nimpl Cpu {\n\n pub const fn new() -> Self {\n\n Self {\n\n regs: Regs {\n\n a: 0,\n\n x: 0,\n\n y: 0,\n\n sp: 0x100,\n\n dp: 0,\n\n pc: Addr24::new(0, 0),\n\n db: 0,\n", "file_path": "rsnes/src/cpu.rs", "rank": 33, "score": 5.055984588410108 }, { "content": "}\n\n\n\nimpl<FB: crate::backend::FrameBuffer> Ppu<FB> {\n\n pub fn new(frame_buffer: FB, is_pal: bool) -> Self {\n\n let bg_mode = BgMode::new(0, false, false);\n\n Self {\n\n frame_buffer,\n\n oam: Oam::new(),\n\n cgram: CgRam::new(),\n\n vram: Vram::new(),\n\n bgs: [Bg::new(); 4],\n\n bg_mode,\n\n bg3_prio: false,\n\n pos: Default::default(),\n\n latched: Default::default(),\n\n brightness: 15,\n\n draw_layers: Layers::from_bgmode(bg_mode),\n\n obj_size: OBJ_SIZES[0],\n\n obj_tile_addr: [0; 2],\n\n obj_layer: Layer::new(),\n", "file_path": "rsnes/src/ppu.rs", "rank": 34, "score": 5.048335567831096 }, { "content": "## Structure\n\n\n\nThis repository is a workspace consisting of two crates\n\n\n\n- `rsnes` - the SNES backend library (located in `/rsnes/`)\n\n- `rsnes-emulator` - a sample frontend implementation using `winit` and `wgpu`\n\n (located in `/emulator/`)\n\n\n\n⚠️ Please note that the `rsnes` API is neither tested nor documented (well) ⚠️\n\n\n\n⚠️ Also note, that `rsnes-emulator` is only tested on Linux/X11 ⚠️\n\n\n\n## Features\n\n\n\nThis is a set of features to be implemented in the future (sorted by priority)\n\n\n\n- [x] Mode 7 support\n\n- [x] Sprite support\n\n- [x] Color math\n\n- [x] S-DSP echo effect support\n\n- [x] S-DSP noise effect support\n\n- [x] PPU Mosaic effect\n\n- [ ] Save game to files\n\n- [ ] SA-1 support\n\n- [ ] Real gamepad input support for `rsnes-emulator`\n\n (see [winit#944](https://github.com/rust-windowing/winit/issues/944),\n\n maybe use unstable fork or branch?)\n\n- [ ] Improved documentation\n\n- [ ] Tests\n\n - [ ] 65816 processor instruction tests\n\n - [ ] SPC-700 processor instruction tests\n\n - [ ] Audio tests\n\n - [ ] Video output tests\n\n- [ ] configurable UI\n\n - [x] configurable key bindings\n\n- [ ] emulator running also on [WASM](https://webassembly.org/)\n\n- [x] [DSP](https://en.wikipedia.org/wiki/NEC_%C2%B5PD7720#%C2%B5PD77C25)\n\n coprocessor support\n\n - [x] DSP-1, DSP-1A, DSP-1B\n\n - [x] DSP-2, DSP-3, DSP-4 (low priority)\n\n - [ ] ST010, ST011 (very low priority)\n\n- [ ] [GSU](https://en.wikipedia.org/wiki/Super_FX) coprocessor support\n\n (also known as Super FX)\n\n - [ ] GSU1\n\n - [ ] GSU2\n\n- [x] Complete the 65816 instruction set\n\n- [x] Complete the SPC700 instruction set\n\n- [x] Complete the NEC μPD77C25 instruction set\n\n- [ ] Complete the GSU instruction set\n\n- [ ] Multitap (MP5) controller support\n\n- [x] [SNES Mouse](https://en.wikipedia.org/wiki/Super_NES_Mouse) support\n\n- [ ] [SNES Super Scope](https://en.wikipedia.org/wiki/Super_Scope) support\n\n- [x] Save States\n\n- [ ] Capcom CX4 coprocessor support\n\n (this processor is only used in Mega Man X2 and Mega Man X3)\n\n- [ ] SPC7110 data decompression chip\n\n\n\n## Contributing\n\n\n\nContributions of any kind (bug reports, feature requests, pull requests, …) are\n\nvery welcome.\n", "file_path": "README.md", "rank": 35, "score": 4.904464592991095 }, { "content": " pub(crate) addr: u16,\n\n pub(crate) addr_inc: u16,\n\n stashed_write: u8,\n\n pub(crate) priority: bool,\n\n}\n\n\n\nimpl Oam {\n\n pub const fn new() -> Self {\n\n Self {\n\n objs: [Object::new(); 128],\n\n addr: 0,\n\n addr_inc: 0,\n\n stashed_write: 0,\n\n priority: false,\n\n }\n\n }\n\n\n\n /// Reset the OAM address.\n\n /// This occurs usually at the beginning of a V-Blank\n\n /// if it is not fblanked.\n", "file_path": "rsnes/src/oam.rs", "rank": 36, "score": 4.761550563484011 }, { "content": "use super::*;\n\n\n\n#[test]\n", "file_path": "save-state/src/tests.rs", "rank": 37, "score": 4.750245969755288 }, { "content": "use proc_macro::TokenStream;\n\n\n", "file_path": "save-state-macro/src/lib.rs", "rank": 38, "score": 4.624557574132381 }, { "content": " let firstsprite = self.oam.get_first_sprite();\n\n for obj_id in 0..128 {\n\n let obj = &mut self.oam.objs[usize::from(obj_id + firstsprite) & 0x7f];\n\n obj.used = false;\n\n let size = self.obj_size[usize::from(obj.is_large)];\n\n if (-i16::from(size[0]) >= obj.x && obj.x != -256)\n\n || obj.x >= 256\n\n || y.wrapping_sub(obj.y) >= size[1]\n\n {\n\n continue;\n\n }\n\n if objs_in_line >= 32 {\n\n self.overflow_flags |= 0x40;\n\n break;\n\n }\n\n obj.used = true;\n\n }\n\n 'obj_loop: for obj in 0..128 {\n\n let obj = self.oam.objs[!usize::from(obj + firstsprite) & 0x7f];\n\n if !obj.used {\n", "file_path": "rsnes/src/ppu.rs", "rank": 40, "score": 4.458969120907609 }, { "content": "impl rsnes::backend::AudioBackend for AudioBackend {\n\n fn push_sample(&mut self, sample: StereoSample) {\n\n let _ = self\n\n .producer\n\n .push(sample.l)\n\n .and_then(|()| self.producer.push(sample.r));\n\n }\n\n}\n\n\n\nmod shaders {\n\n macro_rules! include_shader {\n\n ($t:expr) => {\n\n include_bytes!(concat!(env!(\"OUT_DIR\"), \"/\", $t))\n\n };\n\n }\n\n\n\n static VERTEX_SHADER: &[u8] = include_shader!(\"main.vertex.spirv\");\n\n static FRAGMENT_SHADER: &[u8] = include_shader!(\"main.fragment.spirv\");\n\n\n\n fn create_shader(device: &wgpu::Device, source: &[u8]) -> wgpu::ShaderModule {\n", "file_path": "emulator/src/main.rs", "rank": 42, "score": 4.410427340000471 }, { "content": "use std::collections::HashMap;\n\nuse std::path::{Path, PathBuf};\n\nuse toml::value::{Table, Value};\n\n\n\nstatic CONFIG_FILE_PATHS: &'static [(bool, &'static str)] = &[\n\n (true, \".config/rsnes/config.toml\"),\n\n (true, \".config/rsnes.toml\"),\n\n (false, \"/etc/rsnes.toml\"),\n\n];\n\n\n\n#[derive(Debug)]\n\npub enum ConfigLoadError {\n\n Io(std::io::Error),\n\n De(toml::de::Error),\n\n WrongType {\n\n expected: &'static str,\n\n got: &'static str,\n\n },\n\n UnknownField(String),\n\n RequiredAttr {\n", "file_path": "emulator/src/config.rs", "rank": 43, "score": 4.312282893611155 }, { "content": "use save_state_macro::*;\n\n\n\n#[derive(Debug, Clone, Copy, InSaveState)]\n\npub struct Object {\n\n pub x: i16,\n\n pub y: u8,\n\n pub tile_nr: u8,\n\n pub attrs: u8,\n\n pub is_large: bool,\n\n\n\n pub used: bool,\n\n}\n\n\n\nimpl Object {\n\n pub const fn new() -> Object {\n\n Self {\n\n x: 0,\n\n y: 0,\n\n tile_nr: 0,\n\n attrs: 0,\n", "file_path": "rsnes/src/oam.rs", "rank": 45, "score": 4.292078969250371 }, { "content": "mod dsp;\n\npub mod sa1;\n\n\n\n#[doc(inline)]\n\npub use dsp::{Dsp, DspVersion};\n", "file_path": "rsnes/src/enhancement/mod.rs", "rank": 46, "score": 4.232587365549692 }, { "content": " }\n\n\n\n pub fn set_region(&mut self, pal: bool) {\n\n if let Some(dsp) = &mut self.dsp {\n\n dsp.set_timing_proportion(if pal {\n\n crate::timing::NECDSP_CPU_TIMING_PROPORTION_PAL\n\n } else {\n\n crate::timing::NECDSP_CPU_TIMING_PROPORTION_NTSC\n\n })\n\n }\n\n if let Some(sa1) = &mut self.sa1 {\n\n sa1.set_region(pal)\n\n }\n\n }\n\n\n\n pub fn tick(&mut self, n: Cycles) {\n\n if let Some(dsp) = &mut self.dsp {\n\n dsp.tick(n)\n\n }\n\n }\n", "file_path": "rsnes/src/cartridge.rs", "rank": 48, "score": 4.138564499286767 }, { "content": "}\n\n\n\n#[derive(Debug, Clone, InSaveState)]\n\npub struct Dma {\n\n channels: [Channel; 8],\n\n running: bool,\n\n dma_enabled: u8,\n\n hdma_enabled: u8,\n\n cancelled: u8,\n\n do_transfer: u8,\n\n pub(crate) hdma_ahead_cycles: i32,\n\n pub(crate) ahead_cycles: i32,\n\n}\n\n\n\nimpl Dma {\n\n pub fn new() -> Self {\n\n Self {\n\n channels: [Channel::new(); 8],\n\n running: false,\n\n dma_enabled: 0,\n", "file_path": "rsnes/src/dma.rs", "rank": 49, "score": 4.114203646523293 }, { "content": "use shaderc::{CompileOptions, Compiler, OptimizationLevel, ShaderKind, SourceLanguage::GLSL};\n\n\n\nconst SHADERS_PATH: &str = \"shaders/\";\n\n\n\n#[allow(clippy::enum_variant_names)]\n", "file_path": "emulator/build.rs", "rank": 50, "score": 3.8587980629602967 }, { "content": " left,\n\n down,\n\n right,\n\n l,\n\n r,\n\n start,\n\n select,\n\n },\n\n } => {\n\n use rsnes::controller::buttons::*;\n\n let mut key = 0;\n\n for (code, button) in [\n\n (a, A),\n\n (b, B),\n\n (x, X),\n\n (y, Y),\n\n (up, UP),\n\n (left, LEFT),\n\n (down, DOWN),\n\n (right, RIGHT),\n", "file_path": "emulator/src/config.rs", "rank": 51, "score": 3.816665771476231 }, { "content": "use core::{cell::Cell, mem::replace};\n\nuse save_state_macro::*;\n\n\n\npub mod buttons {\n\n pub const B: u16 = 1;\n\n pub const Y: u16 = 2;\n\n pub const SELECT: u16 = 4;\n\n pub const START: u16 = 8;\n\n pub const UP: u16 = 0x10;\n\n pub const DOWN: u16 = 0x20;\n\n pub const LEFT: u16 = 0x40;\n\n pub const RIGHT: u16 = 0x80;\n\n pub const A: u16 = 0x100;\n\n pub const X: u16 = 0x200;\n\n pub const L: u16 = 0x400;\n\n pub const R: u16 = 0x800;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Controller {\n", "file_path": "rsnes/src/controller.rs", "rank": 52, "score": 3.8019180792270153 }, { "content": " cartridge.header()\n\n );\n\n }\n\n let is_pal = match profile.region {\n\n rsnes::cartridge::CountryFrameRate::Any => matches!(\n\n cartridge.get_country_frame_rate(),\n\n rsnes::cartridge::CountryFrameRate::Pal\n\n ),\n\n rsnes::cartridge::CountryFrameRate::Pal => true,\n\n rsnes::cartridge::CountryFrameRate::Ntsc => false,\n\n };\n\n if options.verbose {\n\n println!(\n\n \"[info] Selected {} region\",\n\n if is_pal { \"PAL\" } else { \"NTSC\" }\n\n );\n\n }\n\n let (audio_backend, _audio_stream) =\n\n AudioBackend::new().unwrap_or_else(|| error!(\"Failed finding an audio output device\"));\n\n let mut snes = Device::new(\n", "file_path": "emulator/src/main.rs", "rank": 53, "score": 3.688157691851025 }, { "content": " obj_size: [[u8; 2]; 2],\n\n /// Tile base address without and with gap\n\n obj_tile_addr: [u16; 2],\n\n obj_layer: Layer,\n\n obj_cache: [ObjCacheEntry; 256],\n\n overflow_flags: u8,\n\n color_math: ColorMath,\n\n direct_color_mode: bool,\n\n object_interlace: bool,\n\n interlace_active: bool,\n\n window_positions: [[u8; 2]; 2],\n\n overscan: bool,\n\n pseudo512: bool,\n\n mosaic_size: u8,\n\n mode7_settings: Mode7Settings,\n\n field: bool,\n\n force_blank: bool,\n\n is_pal: bool,\n\n pub(crate) open_bus1: u8,\n\n pub(crate) open_bus2: u8,\n", "file_path": "rsnes/src/ppu.rs", "rank": 54, "score": 3.6425040709105185 }, { "content": " audio_backend,\n\n ArrayFrameBuffer([[0; 4]; rsnes::backend::FRAME_BUFFER_SIZE], true),\n\n is_pal,\n\n profile.threaded,\n\n );\n\n snes.controllers.port1 = config::controller_profile_to_port(port1_profile.as_ref());\n\n snes.controllers.port2 = config::controller_profile_to_port(port2_profile.as_ref());\n\n snes.load_cartridge(cartridge);\n\n\n\n let size = winit::dpi::PhysicalSize::new(\n\n rsnes::ppu::SCREEN_WIDTH * 4,\n\n rsnes::ppu::MAX_SCREEN_HEIGHT * 4,\n\n );\n\n let event_loop = EventLoop::new();\n\n let window = WindowBuilder::new()\n\n .with_decorations(true)\n\n .with_visible(true)\n\n .with_fullscreen(None)\n\n .with_resizable(true)\n\n .with_maximized(false)\n", "file_path": "emulator/src/main.rs", "rank": 56, "score": 3.5570304314817576 }, { "content": " self.with_main_cpu().dispatch_instruction() * 6\n\n }) + self.memory_cycles;\n\n self.cpu_ahead_cycles += cycles as i32;\n\n }\n\n }\n\n\n\n pub fn get_memory_cycle(&self, addr: Addr24) -> Cycles {\n\n #[repr(u8)]\n\n enum Speed {\n\n Fast = 6,\n\n Slow = 8,\n\n XSlow = 12,\n\n }\n\n use Speed::*;\n\n macro_rules! romaccess {\n\n () => {\n\n if self.cpu.access_speed {\n\n Fast\n\n } else {\n\n Slow\n", "file_path": "rsnes/src/timing.rs", "rank": 57, "score": 3.310609288229365 }, { "content": " let data = port.read_port_data();\n\n port.data1 |= u16::from(data & 1);\n\n port.data2 |= u16::from(data >> 1);\n\n }\n\n }\n\n }\n\n\n\n pub(crate) fn access(&self, id: u16) -> u8 {\n\n let port = if id & 2 > 0 { &self.port2 } else { &self.port1 };\n\n let data = if id & 4 > 0 { port.data2 } else { port.data1 };\n\n if id & 1 > 0 {\n\n (data >> 8) as u8\n\n } else {\n\n (data & 0xff) as u8\n\n }\n\n }\n\n}\n", "file_path": "rsnes/src/controller.rs", "rank": 58, "score": 3.3097855653128936 }, { "content": "pub mod backend;\n\npub mod cartridge;\n\npub mod controller;\n\npub mod cpu;\n\npub mod device;\n\npub mod dma;\n\npub mod enhancement;\n\nmod instr;\n\npub mod oam;\n\npub mod ppu;\n\nmod registers;\n\npub mod smp;\n\npub mod spc700;\n\nmod timing;\n", "file_path": "rsnes/src/lib.rs", "rank": 59, "score": 3.195291310052478 }, { "content": " } else {\n\n None\n\n }\n\n }\n\n }\n\n\n\n pub fn write_byte(&mut self, addr: Addr24, val: u8) {\n\n if self.has_sa1() {\n\n self.sa1_write::<false>(addr, val)\n\n } else {\n\n if let Some((index, MappingEntry { write, .. })) = self.mapping.find(addr) {\n\n write.get()(self, index, val)\n\n }\n\n }\n\n }\n\n\n\n pub const fn get_country_frame_rate(&self) -> CountryFrameRate {\n\n use CountryFrameRate::*;\n\n match self.header.country {\n\n 0 | 1 | 13 | 15 => Ntsc,\n", "file_path": "rsnes/src/cartridge.rs", "rank": 60, "score": 3.104776209819407 }, { "content": "\n\nimpl<T: InSaveState + Default> InSaveState for Option<T> {\n\n fn serialize(&self, state: &mut SaveStateSerializer) {\n\n self.is_some().serialize(state);\n\n match self {\n\n Some(v) => v.serialize(state),\n\n // TODO: use extend_from_slice when const generics get stabilized enough\n\n None => (),\n\n }\n\n }\n\n\n\n fn deserialize(&mut self, state: &mut SaveStateDeserializer) {\n\n let mut i = false;\n\n i.deserialize(state);\n\n *self = if i {\n\n let mut i = T::default();\n\n i.deserialize(state);\n\n Some(i)\n\n } else {\n\n None\n", "file_path": "save-state/src/lib.rs", "rank": 61, "score": 2.9982546049737335 }, { "content": " is_large: false,\n\n used: false,\n\n }\n\n }\n\n\n\n pub fn write_low_low(&mut self, val1: u8, val2: u8) {\n\n self.x = (((self.x as u16) & 0xff00) | u16::from(val1)) as i16;\n\n self.y = val2;\n\n }\n\n\n\n pub fn write_low_high(&mut self, val1: u8, val2: u8) {\n\n self.tile_nr = val1;\n\n self.attrs = val2;\n\n }\n\n\n\n pub fn write_high(&mut self, val: u8) {\n\n self.x = ((self.x as u16 & 0xff) | (0u16.wrapping_sub((val & 1).into()) & 0xff00)) as i16;\n\n self.is_large = val & 2 > 0;\n\n }\n\n\n", "file_path": "rsnes/src/oam.rs", "rank": 62, "score": 2.923039790333628 }, { "content": " fn deserialize(&mut self, state: &mut SaveStateDeserializer) {\n\n self.get_mut().deserialize(state)\n\n }\n\n}\n\n\n\n// This uses 0 and 255 for false and true. That will make the memory\n\n// representation more robust against memory corruption by random\n\n// bit flips.\n\nimpl InSaveState for bool {\n\n fn serialize(&self, state: &mut SaveStateSerializer) {\n\n let i: u8 = if *self { 0xff } else { 0 };\n\n i.serialize(state)\n\n }\n\n\n\n fn deserialize(&mut self, state: &mut SaveStateDeserializer) {\n\n let mut i: u8 = 0;\n\n i.deserialize(state);\n\n *self = i.count_ones() >= 4\n\n }\n\n}\n", "file_path": "save-state/src/lib.rs", "rank": 63, "score": 2.8987998328877493 }, { "content": " }\n\n\n\n pub const fn is_xflip(&self) -> bool {\n\n self.attrs & 0x40 > 0\n\n }\n\n\n\n pub const fn is_yflip(&self) -> bool {\n\n self.attrs & 0x80 > 0\n\n }\n\n\n\n pub fn get_tile_addr(&self, base: u16, tilex: u8, tiley: u8) -> u16 {\n\n let tile_nr = self.tile_nr.wrapping_add(tilex).wrapping_add(tiley << 4);\n\n base.wrapping_add(u16::from(tile_nr) << 4)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, InSaveState)]\n\npub struct Oam {\n\n pub(crate) objs: [Object; 128],\n\n // 10-bit value\n", "file_path": "rsnes/src/oam.rs", "rank": 64, "score": 2.8980785735530694 }, { "content": "\n\n pub const fn to_byte(self) -> u8 {\n\n self as u8\n\n }\n\n}\n\n\n\nimpl save_state::InSaveState for MaskLogic {\n\n fn serialize(&self, state: &mut SaveStateSerializer) {\n\n self.to_byte().serialize(state)\n\n }\n\n\n\n fn deserialize(&mut self, state: &mut SaveStateDeserializer) {\n\n let mut n: u8 = 0;\n\n n.deserialize(state);\n\n *self = Self::from_byte(n)\n\n }\n\n}\n\n\n\nconst fn sign_extend<const B: u16>(n: u16) -> u16 {\n\n if n & ((1 << B) >> 1) > 0 {\n\n n | !((1 << B) - 1)\n\n } else {\n\n n & (((1 << B) >> 1) - 1)\n\n }\n\n}\n\n\n\n/// Settings used to draw Mode 7's BG1.\n\n#[derive(Debug, Clone, InSaveState)]\n", "file_path": "rsnes/src/ppu.rs", "rank": 65, "score": 2.8515063322121477 }, { "content": " (1, 32) => DspVersion::Dsp2,\n\n (1, 8) => DspVersion::Dsp3,\n\n (2, 8) => DspVersion::Dsp1,\n\n _ => DspVersion::Dsp1B,\n\n },\n\n RomType::HiRom => match (rom_size >> 20, ram_size >> 10) {\n\n (4, 0) => DspVersion::Dsp1,\n\n (4, 2) => DspVersion::Dsp1B, // TODO: Some games may use DSP1 (?)\n\n (2, 2 | 8) => DspVersion::Dsp1B,\n\n _ => DspVersion::Dsp1B, // TODO: is this appropriate?\n\n },\n\n _ => return None,\n\n };\n\n Some(ver)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum CountryFrameRate {\n\n Any,\n", "file_path": "rsnes/src/cartridge.rs", "rank": 66, "score": 2.7397593853614755 }, { "content": " header = Some((new, score));\n\n }\n\n }\n\n }\n\n }\n\n let (header, _score) = header.ok_or(ReadRomError::NoSuitableHeader)?;\n\n\n\n let rom = create_rom(bytes, header.rom_size);\n\n\n\n use core::num::Wrapping;\n\n let Wrapping(checksum): Wrapping<u16> =\n\n rom.iter().copied().map(Into::into).map(Wrapping).sum();\n\n if checksum != header.checksum {\n\n eprintln!(\"warning: checksum did not match! Checksum in ROM is {:04x}; Calculated checksum is {:04x}\", header.checksum, checksum);\n\n }\n\n\n\n let ram_size = header.ram_size;\n\n\n\n let dsp = if let Some(Coprocessor::Dsp) = header.coprocessor {\n\n let ver = header\n", "file_path": "rsnes/src/cartridge.rs", "rank": 67, "score": 2.676818690633497 }, { "content": " pub port1: ControllerPort,\n\n pub port2: ControllerPort,\n\n pio: u8,\n\n pub(crate) auto_joypad_timer: u16,\n\n}\n\n\n\nimpl ControllerPorts {\n\n pub fn new() -> Self {\n\n Self {\n\n port1: ControllerPort::new(Controller::Standard(StandardController::new())),\n\n port2: ControllerPort::new(Controller::None),\n\n pio: 0,\n\n auto_joypad_timer: 0,\n\n }\n\n }\n\n\n\n /// Write to the programmable I/O-port.\n\n /// Returns if EXTLATCH shall be triggered.\n\n pub fn set_pio(&mut self, val: u8) -> bool {\n\n (replace(&mut self.pio, val) & !val) & 0x80 > 0\n", "file_path": "rsnes/src/controller.rs", "rank": 68, "score": 2.616053864453213 }, { "content": " }\n\n }\n\n\n\n pub fn load_cartridge(&mut self, mut cartridge: Cartridge) {\n\n cartridge.set_region(self.is_pal);\n\n self.cartridge = Some(cartridge);\n\n self.cpu = Cpu::new();\n\n self.reset_program_counter();\n\n }\n\n\n\n pub fn reset_program_counter(&mut self) {\n\n let addr = crate::cpu::RESET_VECTOR_ADDR;\n\n self.cpu.regs.pc = Addr24::new(0, self.read::<u16>(addr));\n\n if self.cartridge.as_ref().unwrap().has_sa1() {\n\n let vector = self.with_sa1_cpu().read::<u16>(addr);\n\n self.cartridge.as_mut().unwrap().sa1_mut().cpu_mut().regs.pc = Addr24::new(0, vector);\n\n }\n\n }\n\n\n\n /// Read a value from the mapped memory at the specified address.\n", "file_path": "rsnes/src/device.rs", "rank": 69, "score": 2.5585950757866573 }, { "content": " state.data.as_slice()[..core::mem::size_of::<[T; N]>()].try_into();\n\n state.consume(N);\n\n // TODO: use normal transmute instead as soon as possible!!\n\n // see https://github.com/rust-lang/rust/issues/43408\n\n // see https://github.com/rust-lang/rust/issues/60471\n\n *self = unsafe { core::mem::transmute_copy(res.unwrap()) }\n\n } else {\n\n panic!(\"not enough data to deserialize\")\n\n }\n\n } else {\n\n self.iter_mut().for_each(|i| i.deserialize(state))\n\n }\n\n }\n\n}\n\n\n\nimpl<T: InSaveState + Copy> InSaveState for core::cell::Cell<T> {\n\n fn serialize(&self, state: &mut SaveStateSerializer) {\n\n self.get().serialize(state)\n\n }\n\n\n", "file_path": "save-state/src/lib.rs", "rank": 70, "score": 2.5042291658399822 }, { "content": " smp: Smp::new(audio_backend, is_pal, is_threaded),\n\n ppu: Ppu::new(frame_buffer, is_pal),\n\n dma: Dma::new(),\n\n controllers: ControllerPorts::new(),\n\n cartridge: None,\n\n open_bus: 0,\n\n ram: [0; RAM_SIZE],\n\n wram_addr: Cell::new(0),\n\n memory_cycles: 0,\n\n cpu_ahead_cycles: 186,\n\n new_scanline: true,\n\n new_frame: true,\n\n scanline_drawn: false,\n\n do_hdma: true,\n\n irq_time_h: 0x7fc,\n\n irq_time_v: 0x1ff,\n\n shall_irq: false,\n\n shall_nmi: false,\n\n nmi_vblank_bit: Cell::new(false),\n\n math_registers: MathRegisters::new(),\n", "file_path": "rsnes/src/device.rs", "rank": 71, "score": 2.4907200173152133 }, { "content": " while !snes.new_frame {\n\n snes.run_cycle::<MASTER_CYCLES_PER_TICK>();\n\n cycle_count += u64::from(MASTER_CYCLES_PER_TICK)\n\n }\n\n // a more precise calculation is not possible by using floats\n\n next_device_update += Duration::from_nanos((8800 * cycle_count) / 189);\n\n // reset the next update timer if it fell to far behind\n\n if now > next_device_update + TIME_UNTIL_TIMER_RESET {\n\n next_device_update = now;\n\n }\n\n }\n\n let now = Instant::now();\n\n if now >= next_graphics_update {\n\n window.request_redraw();\n\n next_graphics_update = now + TIME_PER_GPU_FRAME;\n\n }\n\n }\n\n Event::RedrawRequested(_) => {\n\n match surf.get_current_texture() {\n\n Ok(surface_texture) => {\n", "file_path": "emulator/src/main.rs", "rank": 72, "score": 2.384644720965176 }, { "content": " if vblanked {\n\n self.ppu.vblank();\n\n }\n\n if self.ppu.get_pos().x + crate::ppu::RAY_AHEAD_CYCLES >= self.ppu.get_scanline_cycles()\n\n && self.ppu.get_pos().y + 1 < vend\n\n && !self.scanline_drawn\n\n {\n\n self.scanline_drawn = true;\n\n self.ppu.draw_scanline();\n\n }\n\n let h_irq_enabled = self.cpu.nmitimen & 0x10 > 0;\n\n let v_irq_enabled = self.cpu.nmitimen & 0x20 > 0;\n\n self.shall_irq = self.shall_irq\n\n || ((h_irq_enabled || v_irq_enabled)\n\n && (!h_irq_enabled\n\n || ((self.ppu.get_pos().x as i16 - N as i16) >> 2 < self.irq_time_h as i16\n\n && self.ppu.get_pos().x >> 2 >= self.irq_time_h))\n\n && (!v_irq_enabled || self.ppu.get_pos().y == self.irq_time_v)\n\n && (h_irq_enabled || !v_irq_enabled || self.new_scanline));\n\n self.nmi_vblank_bit\n", "file_path": "rsnes/src/timing.rs", "rank": 73, "score": 2.3678979301584704 }, { "content": " .with_inner_size(size)\n\n .with_title(format!(\"{} - {}\", env!(\"CARGO_PKG_NAME\"), title))\n\n .build(&event_loop)\n\n .unwrap_or_else(|err| error!(\"Failure while creating window ({})\", err));\n\n\n\n let inst = wgpu::Instance::new(wgpu::Backends::VULKAN);\n\n let surf = unsafe { inst.create_surface(&window) };\n\n let adapter = inst\n\n .request_adapter(&wgpu::RequestAdapterOptions {\n\n power_preference: wgpu::PowerPreference::LowPower,\n\n compatible_surface: Some(&surf),\n\n force_fallback_adapter: false,\n\n })\n\n .block_on()\n\n .unwrap_or_else(|| error!(\"Failure finding a graphics adapter\"));\n\n let (device, queue) = adapter\n\n .request_device(\n\n &wgpu::DeviceDescriptor {\n\n label: None,\n\n features: wgpu::Features::empty(),\n", "file_path": "emulator/src/main.rs", "rank": 74, "score": 2.3566189762312746 }, { "content": " /// This method also updates open bus.\n\n pub fn read<D: Data>(&mut self, addr: Addr24) -> D {\n\n let value = self.read_data::<D>(addr);\n\n self.open_bus = value.to_open_bus();\n\n self.memory_cycles +=\n\n (self.get_memory_cycle(addr) - 6) * core::mem::size_of::<D::Arr>() as u32;\n\n value\n\n }\n\n\n\n /// Write a value to the mapped memory at the specified address.\n\n /// This method also updates open bus.\n\n pub fn write<D: Data>(&mut self, addr: Addr24, value: D) {\n\n self.open_bus = value.to_open_bus();\n\n self.write_data(addr, value);\n\n self.memory_cycles +=\n\n (self.get_memory_cycle(addr) - 6) * core::mem::size_of::<D::Arr>() as u32;\n\n }\n\n}\n\n\n\nimpl<B: AudioBackend, FB: FrameBuffer> Device<B, FB> {\n", "file_path": "rsnes/src/device.rs", "rank": 75, "score": 2.222029944673146 }, { "content": " limits: wgpu::Limits::downlevel_defaults().using_resolution(adapter.limits()),\n\n },\n\n None,\n\n )\n\n .block_on()\n\n .unwrap_or_else(|err| error!(\"Failure requesting a GPU command queue ({})\", err));\n\n let (vs_entry, vs_shader) = shaders::create_vs(&device);\n\n let (fs_entry, fs_shader) = shaders::create_fs(&device);\n\n\n\n let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n label: None,\n\n entries: &[\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility: wgpu::ShaderStages::FRAGMENT,\n\n ty: wgpu::BindingType::Texture {\n\n sample_type: wgpu::TextureSampleType::Float { filterable: true },\n\n view_dimension: wgpu::TextureViewDimension::D2,\n\n multisampled: false,\n\n },\n", "file_path": "emulator/src/main.rs", "rank": 76, "score": 2.2181907509503382 }, { "content": "# rsnes\n\n\n\n<div align='center'>\n\n <img src='https://user-images.githubusercontent.com/26610181/131219139-4b2c12ca-cc3d-4a72-827c-1c83476a4401.png'\n\n alt='rsnes logo' width='384cm' align='center'>\n\n</div>\n\n\n\nA [SNES](https://en.wikipedia.org/wiki/Super_Nintendo_Entertainment_System)\n\nemulator written in [Rust](https://www.rust-lang.org/)\n\n\n\n## Implementation Status\n\n\n\nMany games are already playable, but some graphics and sounds are faulty.\n\nComparatively functioning games are e.g. Donkey Kong Country, Wolfenstein 3D,\n\nF-Zero, Super Mario World, Super Mario Kart, The Legend of Zelda,\n\nSuper Street Fighter II.\n\n\n\n## Usage\n\n\n\nIn the default configuration, controller port 1 is connected to a\n\nstandard controller and port 2 is left unconnected.\n\nThese keyboard keys will drive controller 1:\n\n\n\n| Keyboard key on QWERTY | Controller key |\n\n|------------------------|----------------------|\n\n| **W** | **↑** |\n\n| **A** | **←** |\n\n| **S** | **↓** |\n\n| **D** | **→** |\n\n| **Q** | **L** |\n\n| **E** | **R** |\n\n| **Left Alt** | **Start** |\n\n| **Right Alt** | **Select** |\n\n| **J** | **A** |\n\n| **K** | **B** |\n\n| **L** | **X** |\n\n| **;** *\\** | **Y** |\n\n| 0-9 | Store Save State 0-9 |\n\n| Shift + 0-9 | Load Save State 0-9 |\n\n\n\n*\\** the button right of *L*\n\n\n\n## Configuration\n\n\n\nYou can configure rsnes with a [TOML](https://toml.io/) configuration file.\n\nYou can provide this file by using the option `--config <PATH>` or by placing\n\nit into one of these paths:\n\n\n\n- `$HOME/.config/rsnes/config.toml`\n\n- `$HOME/.config/rsnes.toml`\n\n- `/etc/rsnes.toml`\n\n\n\nSee `emulator/example.toml` for\n\n[documentation](https://github.com/nat-rix/rsnes/blob/main/emulator/example.toml).\n\n\n", "file_path": "README.md", "rank": 77, "score": 1.446926102342868 } ]
Rust
src/serialization/database.rs
MDeiml/attomath
4aac4dad3cd776dd2cb1602aa930c04c315d5186
use crate::{error::ProofError, expression::SingleSubstitution, Identifier, Theorem}; use std::collections::HashMap; #[derive(Debug, PartialEq, Eq)] pub struct Database { names: HashMap<String, (usize, usize)>, theorems: Vec<(Theorem, Proof<usize>, Option<String>)>, last_name: usize, } #[derive(Debug, PartialEq, Eq, Clone)] pub enum Proof<K> { Simplify(K, Identifier, Identifier), Combine(K, K, usize), Axiom(Theorem), } impl<K> Proof<K> { pub fn map_id_result<K1, F, E>(self, f: F) -> Result<Proof<K1>, E> where F: Fn(K) -> Result<K1, E>, { Ok(match self { Proof::Simplify(id, a, b) => Proof::Simplify(f(id)?, a, b), Proof::Combine(id_a, id_b, index) => Proof::Combine(f(id_a)?, f(id_b)?, index), Proof::Axiom(theorem) => Proof::Axiom(theorem), }) } pub fn map_id<K1, F>(self, f: F) -> Proof<K1> where F: Fn(K) -> K1, { self.map_id_result::<_, _, ()>(|id| Ok(f(id))).unwrap() } } #[derive(Debug)] pub enum DatabaseError { TheoremNotFound(Option<String>, Option<usize>), NameCollision(String), TheoremMismatch(Theorem, Theorem), ProofError(ProofError), } impl From<ProofError> for DatabaseError { fn from(e: ProofError) -> Self { Self::ProofError(e) } } impl Database { pub fn new() -> Self { Self { names: HashMap::new(), theorems: Vec::new(), last_name: 0, } } fn get_index(&self, name: Option<&str>, index: Option<usize>) -> Result<usize, DatabaseError> { let (start, end) = match name { Some(name) => *self .names .get(name) .ok_or(DatabaseError::TheoremNotFound(Some(name.to_owned()), index))?, None => (self.last_name, self.theorems.len()), }; match index { Some(i) => { if start + i < end { Ok(start + i) } else { Err(DatabaseError::TheoremNotFound( name.map(|s| s.to_owned()), index, )) } } None => { if start == end { Err(DatabaseError::TheoremNotFound( name.map(|s| s.to_owned()), index, )) } else { Ok(end - 1) } } } } pub fn get(&self, name: Option<&str>, index: Option<usize>) -> Result<&Theorem, DatabaseError> { Ok(&self.theorems[self.get_index(name, index)?].0) } pub fn add_name(&mut self, name: String) -> Result<(), DatabaseError> { if self.theorems.is_empty() { return Err(DatabaseError::TheoremNotFound(None, Some(0))); } let index = self.theorems.len() - 1; if self.theorems[index].2.is_some() { return Err(DatabaseError::TheoremNotFound(None, None)); } match self.names.entry(name.clone()) { std::collections::hash_map::Entry::Occupied(_) => { Err(DatabaseError::NameCollision(name)) } std::collections::hash_map::Entry::Vacant(entry) => { &entry.insert((self.last_name, index + 1)); self.last_name = index + 1; self.theorems[index].2 = Some(name.to_owned()); Ok(()) } } } pub fn add_proof<'a>( &'a mut self, proof: Proof<(Option<String>, Option<usize>)>, ) -> Result<&'a Theorem, DatabaseError> { let proof = proof.map_id_result(|id| self.get_index(id.0.as_deref(), id.1))?; let new_theorem = match proof { Proof::Simplify(id, a, b) => { let theorem = &self.theorems[id].0; let mut new_theorem = theorem.substitute(&SingleSubstitution::new(a, b).unwrap())?; new_theorem.standardize(); new_theorem } Proof::Combine(id_a, id_b, index) => { let theorem_a = &self.theorems[id_a].0; let theorem_b = &self.theorems[id_b].0; let mut new_theorem = theorem_a.combine(&theorem_b, index)?; new_theorem.standardize(); new_theorem } Proof::Axiom(ref theorem) => theorem.clone(), }; self.theorems.push((new_theorem, proof, None)); Ok(&self.theorems.last().unwrap().0) } pub fn substitute(&mut self, theorem: Theorem) -> Result<(), DatabaseError> { let last = &mut self .theorems .last_mut() .ok_or(DatabaseError::TheoremNotFound(None, None))? .0; let mut theorem_standardized = theorem.clone(); theorem_standardized.standardize(); if last == &theorem_standardized { *last = theorem; Ok(()) } else { Err(DatabaseError::TheoremMismatch(theorem, last.clone())) } } fn reverse_id(&self, id: usize, current_id: usize) -> (Option<&str>, Option<usize>) { if let Some(name) = &self.theorems[id].2 { (Some(name), None) } else if id == current_id - 1 { (None, None) } else if id >= self.last_name { (None, Some(id - self.last_name)) } else { let name = self.theorems[id..] .iter() .filter_map(|x| x.2.as_ref()) .next() .unwrap(); let (start, end) = self.names[name]; if end >= current_id { (None, Some(id - start)) } else { (Some(name), Some(id - start)) } } } pub fn proofs<'a>( &'a self, ) -> impl 'a + Iterator<Item = (&Theorem, Proof<(Option<&str>, Option<usize>)>, Option<&str>)> { self.theorems .iter() .enumerate() .map(move |(current_id, (theorem, proof, name))| { let proof = proof.clone().map_id(|id| self.reverse_id(id, current_id)); (theorem, proof, name.as_deref()) }) } }
use crate::{error::ProofError, expression::SingleSubstitution, Identifier, Theorem}; use std::collections::HashMap; #[derive(Debug, PartialEq, Eq)] pub struct Database { names: HashMap<String, (usize, usize)>, theorems: Vec<(Theorem, Proof<usize>, Option<String>)>, last_name: usize, } #[derive(Debug, PartialEq, Eq, Clone)] pub enum Proof<K> { Simplify(K, Identifier, Identifier), Combine(K, K, usize), Axiom(Theorem), } impl<K> Proof<K> { pub fn map_id_result<K1, F, E>(self, f: F) -> Result<Proof<K1>, E> where F: Fn(K) -> Result<K1, E>, { Ok(match self { Proof::Simplify(id, a, b) => Proof::Simplify(f(id)?, a, b), Proof::Combine(id_a, id_b, index) => Proof::Combine(f(id_a)?, f(id_b)?, index), Proof::Axiom(theorem) => Proof::Axiom(theorem), }) } pub fn map_id<K1, F>(self, f: F) -> Proof<K1> where F: Fn(K) -> K1, { self.map_id_result::<_, _, ()>(|id| Ok(f(id))).unwrap() } } #[derive(Debug)] pub enum DatabaseError { TheoremNotFound(Option<String>, Option<usize>), NameCollision(String), TheoremMismatch(Theorem, Theorem), ProofError(ProofError), } impl From<ProofError> for DatabaseError { fn from(e: ProofError) -> Self { Self::ProofError(e) } } impl Database { pub fn new() -> Self { Self { names: HashMap::new(), theorems: Vec::new(), last_name: 0, } } fn get_index(&self, name: Option<&str>, index: Option<usize>) -> Result<usize, DatabaseError> { let (start, end) = match name { Some(name) => *self .names .get(name) .ok_or(DatabaseError::TheoremNotFound(Some(name.to_owned()), index))?, None => (self.last_name, self.theorems.len()), }; match index { Some(i) => { if start + i < end { Ok(start + i) } else { Err(DatabaseError::TheoremNotFound( name.map(|s| s.to_owned()), index, )) } } None => { if start == end { Err(DatabaseError::TheoremNotFound( name.map(|s| s.to_owned()), index, )) } else { Ok(end - 1) } } } } pub fn get(&self, name: Option<&str>, index: Option<usize>) -> Result<&Theorem, DatabaseError> { Ok(&self.theorems[self.get_index(name, index)?].0) } pub fn add_name(&mut self, name: String) -> Result<(), DatabaseError> { if self.theorems.is_empty() { return Err(DatabaseError::TheoremNotFound(None, Some(0))); } let index = self.theorems.len() - 1; if self.theorems[index].2.is_some() { return Err(DatabaseError::TheoremNotFound(None, None)); } match self.names.entry(name.clone()) { std::collections::hash_map::Entry::Occupied(_) => { Err(DatabaseError::NameCollision(name)) } std::collections::hash_map::Entry::Vacant(entry) => { &entry.insert((self.last_name, index + 1)); self.last_name = index + 1; self.theorems[index].2 = Some(name.to_owned()); Ok(()) } } } pub fn add_proof<'a>( &'a mut self, proof: Proof<(Option<String>, Option<usize>)>, ) -> Result<&'a Theorem, DatabaseError> { let proof = proof.map_id_result(|id| self.get_index(id.0.as_deref(), id.1))?; let new_theorem = match proof { Proof::Simplify(id, a, b) => { let theorem = &self.theorems[id].0; let mut new_theorem = theorem.substitute(&SingleSubstitution::new(a, b).unwrap())?; new_theorem.standardize(); new_theorem } Proof::Combine(id_a, id_b, index) => { let theorem_a = &self.theorems[id_a].0; let theorem_b = &self.theorems[id_b].0; let mut new_theorem = theorem_a.combine(&theorem_b, index)?; new_theorem.standardize(); new_theorem } Proof::Axiom(ref theorem) => theorem.clone(), }; self.theorems.push((new_theorem, proof, None)); Ok(&self.theorems.last().unwrap().0) }
fn reverse_id(&self, id: usize, current_id: usize) -> (Option<&str>, Option<usize>) { if let Some(name) = &self.theorems[id].2 { (Some(name), None) } else if id == current_id - 1 { (None, None) } else if id >= self.last_name { (None, Some(id - self.last_name)) } else { let name = self.theorems[id..] .iter() .filter_map(|x| x.2.as_ref()) .next() .unwrap(); let (start, end) = self.names[name]; if end >= current_id { (None, Some(id - start)) } else { (Some(name), Some(id - start)) } } } pub fn proofs<'a>( &'a self, ) -> impl 'a + Iterator<Item = (&Theorem, Proof<(Option<&str>, Option<usize>)>, Option<&str>)> { self.theorems .iter() .enumerate() .map(move |(current_id, (theorem, proof, name))| { let proof = proof.clone().map_id(|id| self.reverse_id(id, current_id)); (theorem, proof, name.as_deref()) }) } }
pub fn substitute(&mut self, theorem: Theorem) -> Result<(), DatabaseError> { let last = &mut self .theorems .last_mut() .ok_or(DatabaseError::TheoremNotFound(None, None))? .0; let mut theorem_standardized = theorem.clone(); theorem_standardized.standardize(); if last == &theorem_standardized { *last = theorem; Ok(()) } else { Err(DatabaseError::TheoremMismatch(theorem, last.clone())) } }
function_block-full_function
[ { "content": "pub fn or_fail<I, O, E: ParseError<I>, F>(mut f: F) -> impl FnMut(I) -> IResult<I, O, E>\n\nwhere\n\n F: Parser<I, O, E>,\n\n{\n\n move |input| {\n\n f.parse(input).map_err(|error| match error {\n\n nom::Err::Error(e) => nom::Err::Failure(e),\n\n e => e,\n\n })\n\n }\n\n}\n", "file_path": "src/serialization/error.rs", "rank": 0, "score": 132622.50170997303 }, { "content": "/// Tests whether the given identifier is an operator.\n\n///\n\n/// Operators occupy the range `(Identifier::MIN ..= -1)`.\n\n/// The special value `Identifier::MIN` is also an operator.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use attomath::{expression::is_operator, Identifier};\n\n///\n\n/// assert!(is_operator(-2));\n\n/// assert!(is_operator(Identifier::MIN));\n\n/// assert!(!is_operator(0));\n\n/// ```\n\npub fn is_operator(x: Identifier) -> bool {\n\n x < 0\n\n}\n\n\n\n/// A substion to combine two other substitutions.\n\n///\n\n/// Tries to use `first` first and uses `then` if the variable did not get substituted by `first`.\n\npub struct ChainSubstitution<S: Substitution, T: Substitution> {\n\n pub first: S,\n\n pub then: T,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Either<S, T> {\n\n Left(S),\n\n Right(T),\n\n}\n\n\n\nimpl<S: Borrow<[Identifier]>, T: Borrow<[Identifier]>> Borrow<[Identifier]> for Either<S, T> {\n\n fn borrow(&self) -> &[Identifier] {\n", "file_path": "src/expression.rs", "rank": 1, "score": 72440.53308190452 }, { "content": "struct Permutations<'a, T> {\n\n sequence: &'a mut [T],\n\n counters: Vec<usize>,\n\n depth: usize,\n\n}\n\n\n\nimpl<'a, T> Permutations<'a, T> {\n\n fn new(sequence: &'a mut [T]) -> Self {\n\n let length = sequence.len();\n\n Permutations {\n\n sequence,\n\n counters: vec![0; length],\n\n depth: 0,\n\n }\n\n }\n\n\n\n fn next(&mut self) -> Option<&mut [T]> {\n\n if self.depth >= self.sequence.len() {\n\n return None;\n\n }\n", "file_path": "src/theorem.rs", "rank": 2, "score": 53321.55939320048 }, { "content": "fn main() {\n\n let filename = std::env::args().nth(1).unwrap();\n\n let mut fmt = Formatter::new();\n\n let mut database = Database::new();\n\n\n\n let file = File::open(filename).unwrap();\n\n for (line_number, line) in BufReader::new(file).lines().enumerate() {\n\n let line = line.unwrap();\n\n let command = match Command::parse(&fmt, &line) {\n\n Ok(command) => command,\n\n Err(err) => {\n\n eprintln!(\n\n \"Syntax error in line {}: {:?}\\n\\t{}\",\n\n line_number, err, line\n\n );\n\n return;\n\n }\n\n };\n\n match command.clone().apply(&mut fmt, &mut database) {\n\n Ok(_) => (),\n", "file_path": "src/main.rs", "rank": 3, "score": 36194.49992289758 }, { "content": "#[derive(Debug)]\n\nenum GreedyErrorKind {\n\n Context(&'static str),\n\n Nom(ErrorKind),\n\n Char(char),\n\n}\n\n\n", "file_path": "src/serialization/error.rs", "rank": 4, "score": 32354.361956467183 }, { "content": "/// A `Substitution` maps variable ids to expressions (represented by a sequence of identifiers).\n\n///\n\n/// This is intented to be used together with [`Expression`]s.\n\npub trait Substitution {\n\n type T: Borrow<[Identifier]> + std::fmt::Debug;\n\n\n\n /// Get the stored substitution for the variable with identifier `id`. Or `None` if the\n\n /// variable should not be replaced.\n\n ///\n\n /// # Panics\n\n /// This may panic if `id` is not in the range of the particular substitution.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::{Substitution, WholeSubstitution, Expression};\n\n ///\n\n /// let expr = Expression::from_raw([-2, 0, 1]).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// sub.insert(0, expr.to_slice());\n\n /// assert_eq!(sub.substitution_opt(0), Some(expr.to_slice()));\n\n /// assert_eq!(sub.substitution_opt(1), None);\n\n /// ```\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<Self::T>>;\n", "file_path": "src/expression.rs", "rank": 5, "score": 32021.073007318766 }, { "content": "pub trait Length {\n\n fn length(&self) -> usize;\n\n}\n\n\n\nimpl Length for &str {\n\n fn length(&self) -> usize {\n\n self.len()\n\n }\n\n}\n\n\n\nimpl Display for GreedyError<&str> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n for (input, error) in self.0.iter() {\n\n writeln!(f, \"{:?} {}\", error, &input[0..20])?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<I> ParseError<I> for GreedyError<I>\n", "file_path": "src/serialization/error.rs", "rank": 6, "score": 30627.413785086275 }, { "content": "/// When using this struct it is guaranteed that only valid theorems can be constructed (using\n\n/// [`Theorem::substitute`], [`Theorem::combine`] and [`Theorem::standardize`]) provided that only valid theorems (or axioms)\n\n/// are constructed using [`Theorem::new`].\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Debug)]\n\npub struct Theorem {\n\n conclusion: OwnedStatement,\n\n assumptions: Vec<OwnedStatement>,\n\n dvrs: Vec<DVR>,\n\n}\n\n\n\nimpl Theorem {\n\n /// Returns this theorems' conclusion\n\n pub fn conclusion(&self) -> &OwnedStatement {\n\n &self.conclusion\n\n }\n\n\n\n /// Returns this theorems' assumptions\n\n pub fn assumptions(&self) -> &[OwnedStatement] {\n\n &self.assumptions\n\n }\n", "file_path": "src/theorem.rs", "rank": 7, "score": 25807.964127875308 }, { "content": " pub fn combine(&self, other: &Theorem, index: usize) -> Result<Self, ProofError> {\n\n let max_var = self.max_var();\n\n let mut substitution =\n\n WholeSubstitution::with_capacity((Wrapping(max_var as usize) + Wrapping(1)).0);\n\n other\n\n .conclusion\n\n .unify(&self.assumptions[index], &mut substitution)?;\n\n let shift = other.max_var() + 1;\n\n let shift_sub = ShiftSubstitution::new(shift);\n\n let substitution = ChainSubstitution {\n\n first: substitution,\n\n then: shift_sub,\n\n };\n\n let mut t = self.substitute_skip_assumption(&substitution, Some(index))?;\n\n t.assumptions.extend_from_slice(&other.assumptions);\n\n t.assumptions.shrink_to_fit();\n\n t.dvrs.extend_from_slice(&other.dvrs);\n\n t.dvrs.shrink_to_fit();\n\n Ok(t)\n\n }\n\n}\n\n\n", "file_path": "src/theorem.rs", "rank": 8, "score": 25807.182303713776 }, { "content": " .max()\n\n .unwrap_or(-1)\n\n }\n\n\n\n /// Uses the given substitution on this theorem's assumptions, dvrs and conclusion to create a\n\n /// new theorem. (see [`Statement::substitute`][crate::Statement::substitute], [`DVR::substitute`])\n\n ///\n\n /// # Errors\n\n /// This method can return a `DVRError` if the substitution violates one of this theorem's\n\n /// `DVR`s.\n\n ///\n\n pub fn substitute<S: Substitution>(&self, substitution: &S) -> Result<Self, ProofError> {\n\n self.substitute_skip_assumption(substitution, None)\n\n }\n\n\n\n fn substitute_skip_assumption<S: Substitution>(\n\n &self,\n\n substitution: &S,\n\n skip_assumption: Option<usize>,\n\n ) -> Result<Self, ProofError> {\n", "file_path": "src/theorem.rs", "rank": 9, "score": 25806.736866691776 }, { "content": " // part of the conclusion. But these are now numbered 0 .. next_var and do not have to be\n\n // changed any longer.\n\n for (i, v) in var_map.iter_mut().enumerate() {\n\n *v = if i < next_var as usize {\n\n Some(i as Identifier)\n\n } else {\n\n None\n\n };\n\n }\n\n // var_maps holds the current candidates for the variable substitution that results in the\n\n // lowest lexicographic ordering\n\n let mut var_maps = vec![var_map];\n\n\n\n for assumptions in indexed_assumptions\n\n .group_by_mut(|(_, i), (_, j)| normalized_assumptions[*i] == normalized_assumptions[*j])\n\n {\n\n let mut next_var1 = 0;\n\n let mut assumptions_min: Option<Vec<(OwnedStatement, usize)>> = None;\n\n\n\n // for each new group and each candidate in var_maps try all permutation of \"new\"\n", "file_path": "src/theorem.rs", "rank": 10, "score": 25804.19555461024 }, { "content": "\n\n /// Returns this theorems' `DVR`s\n\n pub fn dvrs(&self) -> &[DVR] {\n\n &self.dvrs\n\n }\n\n\n\n /// Create a new `Theorem` containig the given assumptions, `DVR`s and conclusion\n\n ///\n\n /// This should only be used for axioms and theorems that are already proven.\n\n pub fn new(\n\n conclusion: OwnedStatement,\n\n assumptions: Vec<OwnedStatement>,\n\n dvrs: Vec<DVR>,\n\n ) -> Self {\n\n Theorem {\n\n conclusion,\n\n assumptions,\n\n dvrs,\n\n }\n\n }\n", "file_path": "src/theorem.rs", "rank": 11, "score": 25803.959981350872 }, { "content": " .unwrap();\n\n }\n\n\n\n self.dvrs.sort_unstable();\n\n self.dvrs.dedup();\n\n }\n\n\n\n /// Returns the variable with the biggest identifier occuring in this theorem. This can be used\n\n /// together with [`WholeSubstitution::with_capacity`].\n\n pub fn max_var(&self) -> Identifier {\n\n self.conclusion\n\n .expression\n\n .variables()\n\n .chain(\n\n self.assumptions\n\n .iter()\n\n .map(|st| st.expression.variables())\n\n .flatten(),\n\n )\n\n .filter(|symb| !is_operator(*symb))\n", "file_path": "src/theorem.rs", "rank": 12, "score": 25799.602605229215 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn permutations() {\n\n let mut arr = [0, 1, 2, 3];\n\n let mut perm = Permutations::new(&mut arr);\n\n let mut counter = 0;\n\n while let Some(_) = perm.next() {\n\n counter += 1;\n\n }\n\n assert_eq!(counter, 24);\n\n }\n\n}\n", "file_path": "src/theorem.rs", "rank": 13, "score": 25799.5389285385 }, { "content": " Ok(Theorem {\n\n conclusion,\n\n assumptions,\n\n dvrs,\n\n })\n\n }\n\n\n\n /// Creates a new `Theorem` by applying `other` to this theorem's assumption with index `index`\n\n ///\n\n /// # Panics\n\n /// This may panic if `index > self.assumptions().len`\n\n ///\n\n /// # Errors\n\n /// This can product the following errors:\n\n /// * `OperatorMismatch`, `VariableMismatch` or `JudgementMismatch` - if the conclusion of\n\n /// `other` cannot be unified with the specified assumption (see\n\n /// [`Statement::unify`][crate::Statement::unify])\n\n /// * `DVRError`- if the substitution needed to transform the conclusion of `other` into the\n\n /// specified assumption violates one of this theorems' `DVR`s\n\n ///\n", "file_path": "src/theorem.rs", "rank": 14, "score": 25798.56998350813 }, { "content": "\n\n /// Turns this theorem into its standard representation, numbering variables in the order of\n\n /// their apperance and sorting the assumptions and dvrs (see\n\n /// [`Expression::standardize`][crate::Expression::standardize]).\n\n ///\n\n /// It holds, that applying standardize twice is the same as applying it once. Two theorems are\n\n /// logically \"equal\", that is they can be constructed from each other by reordering\n\n /// assumptions and substituting variables by mutally distinct variables, if and only if they\n\n /// are the same after calling standardize on them.\n\n pub fn standardize(&mut self) {\n\n let max_var = self.max_var();\n\n let mut var_map = vec![None; (Wrapping(max_var as usize) + Wrapping(1)).0];\n\n let mut next_var = 0;\n\n\n\n // first number the variables of the conclusion in order\n\n self.conclusion\n\n .expression\n\n .standardize_range(&mut var_map, &mut next_var, ..);\n\n\n\n // remove duplicate assumptions\n", "file_path": "src/theorem.rs", "rank": 15, "score": 25798.30366091356 }, { "content": " // variables, i.e. variables that have are not yet determined in the candidates.\n\n // Save the best permutations in var_maps1 as the new candidates for the next iteration\n\n let mut var_maps1 = Vec::new();\n\n for var_map in var_maps.iter_mut() {\n\n let mut perm = assumptions.iter().cloned().collect::<Vec<_>>();\n\n for (assumption, _) in perm.iter_mut() {\n\n assumption.expression.substitute_variables(&var_map);\n\n }\n\n let mut perm = Permutations::new(&mut perm);\n\n while let Some(permutation) = perm.next() {\n\n let mut var_map1 = var_map.clone();\n\n next_var1 = next_var;\n\n\n\n for (assumption, _) in permutation.iter_mut() {\n\n assumption.expression.standardize_range(\n\n &mut var_map1,\n\n &mut next_var1,\n\n next_var..,\n\n );\n\n }\n", "file_path": "src/theorem.rs", "rank": 16, "score": 25797.466201369527 }, { "content": " })\n\n .collect::<Vec<_>>();\n\n // sort the assumptions by their normalized version. Assumptions with the same\n\n // normalization are now grouped together\n\n indexed_assumptions.sort_unstable_by_key(|(_, index)| &normalized_assumptions[*index]);\n\n\n\n // The rest of this function is to find for each group of the same normalization the\n\n // variable substitution (some variables could be fixed by previous groups) that results\n\n // in the smallest lexicographic ordering of the group after its elements are sorted\n\n\n\n // first standardize all assumptions in their new order, so for each group the \"free\"\n\n // variables are going to be substituted by some permutation of themselves\n\n let mut temp_next_var = next_var;\n\n for (assumption, _) in indexed_assumptions.iter_mut() {\n\n assumption\n\n .expression\n\n .standardize_range(&mut var_map, &mut temp_next_var, ..);\n\n }\n\n\n\n // we have to forget the variable map we just computed, but not the variables that are also\n", "file_path": "src/theorem.rs", "rank": 17, "score": 25797.416062888577 }, { "content": " // TODO: maybe move this into combine / simplify\n\n self.assumptions.sort_unstable();\n\n self.assumptions.dedup();\n\n\n\n // calculate a \"normalized\" version for each assumption. That is, number each single\n\n // theorems variables in order\n\n let mut indexed_assumptions = self\n\n .assumptions\n\n .iter()\n\n .cloned()\n\n .enumerate()\n\n .map(|(a, b)| (b, a))\n\n .collect::<Vec<_>>();\n\n let normalized_assumptions = self\n\n .assumptions\n\n .drain(..)\n\n .map(|assumption| {\n\n let mut normalized = assumption;\n\n normalized.expression.standardize();\n\n normalized\n", "file_path": "src/theorem.rs", "rank": 18, "score": 25797.294509949694 }, { "content": " }\n\n }\n\n var_maps = var_maps1;\n\n next_var = next_var1;\n\n // save the minimal substitution in indexed_assumptions. This way we do not have to\n\n // substitute again with some candidate (*)\n\n assumptions.swap_with_slice(assumptions_min.unwrap().as_mut_slice());\n\n }\n\n\n\n // just use the saved assumptions (*) this is equivalent to every substitution candidate\n\n self.assumptions\n\n .extend(indexed_assumptions.into_iter().map(|(a, _)| a));\n\n\n\n // every variable in a DVR has to appear in some assumption or the conclusion. Just order\n\n // the DVRs accordingly.\n\n for dvr in self.dvrs.iter_mut() {\n\n *dvr = dvr\n\n .substitute(&VariableSubstitution::new(var_maps[0].as_slice()).unwrap())\n\n .next()\n\n .unwrap()\n", "file_path": "src/theorem.rs", "rank": 19, "score": 25796.673782635182 }, { "content": "use std::{cmp::Ordering, num::Wrapping};\n\n\n\nuse crate::{\n\n dvr::DVR,\n\n error::ProofError,\n\n expression::{\n\n is_operator, ChainSubstitution, ShiftSubstitution, Substitution, VariableSubstitution,\n\n WholeSubstitution,\n\n },\n\n statement::OwnedStatement,\n\n types::*,\n\n};\n\n\n\n/// A theorem consisting of zero or more [`DVR`] or __assumptions__\n\n/// and a __conclusion__\n\n///\n\n/// A theorem could represent something like _if x0 is provable and (x0 -> x1) is provable then b\n\n/// is provable_. In this example the assumptions would be _x0_ and _x0 -> x1_ and the conclusion\n\n/// would be _x1_.\n\n///\n", "file_path": "src/theorem.rs", "rank": 20, "score": 25795.672388991847 }, { "content": " match assumptions_min\n\n .as_deref_mut()\n\n .map(|a_min| {\n\n permutation\n\n .iter()\n\n .map(|(a, _)| a)\n\n .cmp(a_min.iter().map(|(a, _)| a))\n\n })\n\n .unwrap_or(Ordering::Less)\n\n {\n\n Ordering::Equal => {\n\n var_maps1.push(var_map1);\n\n }\n\n Ordering::Less => {\n\n var_maps1.clear();\n\n var_maps1.push(var_map1);\n\n assumptions_min = Some(permutation.iter().cloned().collect());\n\n }\n\n Ordering::Greater => {}\n\n }\n", "file_path": "src/theorem.rs", "rank": 21, "score": 25795.061189617245 }, { "content": " if self.depth != 0 {\n\n while self.counters[self.depth] >= self.depth {\n\n self.counters[self.depth] = 0;\n\n self.depth += 1;\n\n if self.depth >= self.sequence.len() {\n\n return None;\n\n }\n\n }\n\n if self.depth % 2 == 0 {\n\n self.sequence.swap(0, self.depth);\n\n } else {\n\n self.sequence.swap(self.counters[self.depth], self.depth);\n\n }\n\n self.counters[self.depth] += 1;\n\n self.depth = 1;\n\n Some(&mut self.sequence)\n\n } else {\n\n self.depth = 1;\n\n Some(&mut self.sequence)\n\n }\n", "file_path": "src/theorem.rs", "rank": 22, "score": 25794.029647855117 }, { "content": " let conclusion = self.conclusion.substitute(substitution);\n\n let assumptions: Vec<OwnedStatement> = self\n\n .assumptions\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, a)| {\n\n if Some(i) == skip_assumption {\n\n None\n\n } else {\n\n Some(a)\n\n }\n\n })\n\n .map(|a| a.substitute(substitution))\n\n .collect();\n\n let dvrs = self\n\n .dvrs\n\n .iter()\n\n .map(|dvr| dvr.substitute(substitution))\n\n .flatten()\n\n .collect::<Result<Vec<_>, _>>()?;\n", "file_path": "src/theorem.rs", "rank": 23, "score": 25793.219868834978 }, { "content": "pub enum Command {\n\n Proof(\n\n Proof<(Option<String>, Option<usize>)>,\n\n Option<Theorem>,\n\n Option<String>,\n\n ),\n\n Judgement(String),\n\n Operator(String, u8),\n\n}\n\n\n\nimpl Command {\n\n pub fn apply(self, fmt: &mut Formatter, database: &mut Database) -> Result<(), DatabaseError> {\n\n match self {\n\n Command::Proof(proof, theorem, name) => {\n\n let _theorem = database.add_proof(proof)?;\n\n if let Some(name) = name {\n\n database.add_name(name)?;\n\n }\n\n if let Some(theorem) = theorem {\n\n database.substitute(theorem)?;\n", "file_path": "src/serialization/command.rs", "rank": 28, "score": 25.61028773529167 }, { "content": " }\n\n DatabaseError::ProofError(err) => {\n\n let (id_a, id_b, index) = match command {\n\n Command::Proof(Proof::Combine(id_a, id_b, index), _, _) => {\n\n (id_a, id_b, index)\n\n }\n\n _ => unreachable!(),\n\n };\n\n let theorem_a = database.get(id_a.0.as_deref(), id_a.1).unwrap();\n\n let theorem_b = database.get(id_b.0.as_deref(), id_b.1).unwrap();\n\n let mut sa = String::new();\n\n fmt.format_theorem(&mut sa, &theorem_a);\n\n let mut sb = String::new();\n\n fmt.format_theorem(&mut sb, &theorem_b);\n\n let mut ss = String::new();\n\n fmt.format_statement(&mut ss, &theorem_a.assumptions()[index]);\n\n eprintln!(\n\n \"proof error combining\\n{}\\ninto the argument {} of\\n{}\",\n\n sb, ss, sa\n\n );\n", "file_path": "src/main.rs", "rank": 33, "score": 22.771394460964608 }, { "content": " /// assert_eq!(sub.substitution_opt(1), None);\n\n /// ```\n\n pub fn insert(&mut self, id: Identifier, expr: Expression<&'a [Identifier]>) {\n\n self.substitution[id as usize] = Some(expr)\n\n }\n\n}\n\n\n\nimpl<'a> Substitution for WholeSubstitution<'a> {\n\n type T = &'a [Identifier];\n\n\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<&'a [Identifier]>> {\n\n self.substitution[id as usize]\n\n }\n\n}\n\n\n\n/// A single variable substitution\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct SingleSubstitution(Identifier, Identifier);\n\n\n\nimpl SingleSubstitution {\n", "file_path": "src/expression.rs", "rank": 35, "score": 22.609949546359754 }, { "content": " Err(err) => {\n\n eprint!(\"Error in line {}: \", line_number);\n\n match err {\n\n DatabaseError::TheoremNotFound(name, id) => match (name, id) {\n\n (Some(name), Some(id)) => eprint!(\"theorem {}.{} not found\", name, id),\n\n (Some(name), None) => eprintln!(\"theorem {} not found\", name),\n\n (None, Some(id)) => eprint!(\"theorem {} not found\", id),\n\n (None, None) => eprintln!(\"theorem $ not found\"),\n\n },\n\n DatabaseError::NameCollision(name) => {\n\n eprintln!(\"{} already defined\", name)\n\n }\n\n DatabaseError::TheoremMismatch(theorem_a, theorem_b) => {\n\n eprintln!(\"theorem mismatch\");\n\n let mut sa = String::new();\n\n fmt.format_theorem(&mut sa, &theorem_a);\n\n let mut sb = String::new();\n\n fmt.format_theorem(&mut sb, &theorem_b);\n\n eprintln!(\"expected: {}\", sb);\n\n eprintln!(\" found: {}\", sa);\n", "file_path": "src/main.rs", "rank": 36, "score": 22.20097798340933 }, { "content": " Ok((input, Command::Judgement(judgement.to_owned())))\n\n }\n\n\n\n fn parse_operator<'a>(input: &'a str) -> IResult<&'a str, Command, GreedyError<&'a str>> {\n\n let (input, _) = tag(\"opr \")(input)?;\n\n let (input, operator) = take_until(\" \")(input)?;\n\n let (input, _) = char(' ')(input)?;\n\n let (input, arity) = map_opt(digit1, |s: &str| s.parse::<u8>().ok())(input)?;\n\n Ok((input, Command::Operator(operator.to_owned(), arity)))\n\n }\n\n\n\n fn serialize_id(&self, s: &mut String, id: &(Option<String>, Option<usize>)) {\n\n match id {\n\n (Some(name), Some(index)) => write!(s, \"{}.{}\", name, index + 1).unwrap(),\n\n (Some(name), None) => s.push_str(&name),\n\n (None, Some(index)) => write!(s, \"{}\", index + 1).unwrap(),\n\n (None, None) => s.push_str(\"$\"),\n\n }\n\n }\n\n\n", "file_path": "src/serialization/command.rs", "rank": 37, "score": 22.054169248486808 }, { "content": " pub fn serialize(&self, s: &mut String, fmt: &Formatter) {\n\n match self {\n\n Command::Proof(Proof::Simplify(id, a, b), theorem, name) => {\n\n s.push_str(\"smp \");\n\n self.serialize_id(s, id);\n\n s.push_str(\" (\");\n\n fmt.format_variable(s, *a);\n\n s.push_str(\" ~ \");\n\n fmt.format_variable(s, *b);\n\n s.push_str(\" )\");\n\n if let Some(theorem) = theorem {\n\n s.push_str(\" { \");\n\n fmt.format_theorem(s, theorem);\n\n s.push_str(\" }\");\n\n }\n\n if let Some(name) = name {\n\n write!(s, \": {}\", name).unwrap();\n\n }\n\n }\n\n Command::Proof(Proof::Combine(id_a, id_b, index), theorem, name) => {\n", "file_path": "src/serialization/command.rs", "rank": 38, "score": 21.749770377036135 }, { "content": "\n\n /// Creates a new `DVR` restricting `a` and `b` from being the same variable.\n\n ///\n\n /// # Errors\n\n /// This function fails with a `DVRError` if `a == b`\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::DVR;\n\n /// use attomath::error::ProofError;\n\n ///\n\n /// let dvr = DVR::new(0, 1);\n\n /// assert_eq!(dvr.map(|d| d.variables()), Ok((0, 1)));\n\n ///\n\n /// let dvr = DVR::new(1, 1);\n\n /// assert_eq!(dvr, Err(ProofError::DVRError(1)));\n\n /// ```\n\n pub fn new(a: Identifier, b: Identifier) -> Result<Self, ProofError> {\n\n if is_operator(a) {\n\n Err(ProofError::DVRError(a))\n", "file_path": "src/dvr.rs", "rank": 39, "score": 20.580164595757843 }, { "content": " database.proofs().map(|(theorem, proof, name)| {\n\n let mut should_serialize_theorem = name.is_some();\n\n if !should_serialize_theorem {\n\n let mut theorem1 = theorem.clone();\n\n theorem1.standardize();\n\n should_serialize_theorem |= &theorem1 != theorem;\n\n }\n\n Command::Proof(\n\n proof.map_id(|(s, n)| (s.map(|s| s.to_owned()), n)),\n\n if should_serialize_theorem {\n\n Some(theorem.clone())\n\n } else {\n\n None\n\n },\n\n name.map(|n| n.to_owned()),\n\n )\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/serialization/command.rs", "rank": 40, "score": 20.136460027333644 }, { "content": " /// Create a new substitution to rename one variable.\n\n ///\n\n /// Returns `None` if one of the identifiers is not a variable (see [is_operator]).\n\n pub fn new(a: Identifier, b: Identifier) -> Option<Self> {\n\n if is_operator(a) || is_operator(b) {\n\n return None;\n\n }\n\n Some(SingleSubstitution(a, b))\n\n }\n\n}\n\n\n\nimpl Substitution for SingleSubstitution {\n\n type T = [Identifier; 1];\n\n\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<[Identifier; 1]>> {\n\n if id == self.0 {\n\n Some(Expression { data: [self.1] })\n\n } else {\n\n None\n\n }\n", "file_path": "src/expression.rs", "rank": 41, "score": 20.014899819746457 }, { "content": " }\n\n}\n\n\n\n/// A complete variable substitution.\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct VariableSubstitution<T: Borrow<[Option<Identifier>]>>(T);\n\n\n\nimpl<T: Borrow<[Option<Identifier>]>> VariableSubstitution<T> {\n\n /// Creates a substitution with the capacity to store replacements for variables `0` to\n\n /// `n - 1`.\n\n ///\n\n /// Returns none if a identifier in `variables` is not a variable (see [is_operator]).\n\n pub fn new(variables: T) -> Option<Self> {\n\n for var in variables.borrow() {\n\n if let Some(var) = var {\n\n if is_operator(*var) {\n\n return None;\n\n }\n\n }\n\n }\n", "file_path": "src/expression.rs", "rank": 43, "score": 19.807117219825752 }, { "content": " )),\n\n )),\n\n |(_, theorem, _, name)| (Some(theorem), name),\n\n ),\n\n map(tuple((tag(\": \"), rest)), |(_, name): (&str, &str)| {\n\n (None, Some(name.to_owned()))\n\n }),\n\n ))(input)?;\n\n Ok((\n\n input,\n\n Command::Proof(Proof::Combine(id_a, id_b, index - 1), theorem, name),\n\n ))\n\n })(input)\n\n }\n\n\n\n fn parse_axiom<'a>(\n\n fmt: &Formatter,\n\n input: &'a str,\n\n ) -> IResult<&'a str, Command, GreedyError<&'a str>> {\n\n let (input, _) = tag(\"axm { \")(input)?;\n", "file_path": "src/serialization/command.rs", "rank": 44, "score": 19.64054980572643 }, { "content": "\n\n impl Iterator for Iter {\n\n type Item = Result<DVR, ProofError>;\n\n\n\n fn next(&mut self) -> Option<Result<DVR, ProofError>> {\n\n if self.index >= self.a.len() * self.b.len() {\n\n return None;\n\n }\n\n let res = DVR::new(\n\n self.a[self.index % self.a.len()],\n\n self.b[self.index / self.a.len()],\n\n );\n\n self.index += 1;\n\n Some(res)\n\n }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n let rem = self.a.len() * self.b.len() - self.index;\n\n (rem, Some(rem))\n\n }\n\n }\n\n Iter {\n\n a: vars_a,\n\n b: vars_b,\n\n index: 0,\n\n }\n\n }\n\n}\n", "file_path": "src/dvr.rs", "rank": 45, "score": 19.576997020248037 }, { "content": " .copied()\n\n .filter(|s| !is_operator(*s))\n\n }\n\n\n\n /// Returns the subexpression beginning at the given index.\n\n ///\n\n /// # Panics\n\n /// This method panics if start_index is not in the range `0..self.data().borrow().len()`\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::Expression;\n\n ///\n\n /// let st = Expression::from_raw([-2, 0, -2, 1, 0]).unwrap();\n\n /// assert_eq!(*st.subexpression(2).data(), &[-2, 1, 0]);\n\n /// assert_eq!(*st.subexpression(3).data(), &[1]);\n\n /// ```\n\n pub fn subexpression<'a>(&'a self, start_index: usize) -> Expression<&'a [Identifier]> {\n\n Self::subexpression_check(&self.data, start_index).expect(\n\n format!(\n", "file_path": "src/expression.rs", "rank": 46, "score": 19.528877225453886 }, { "content": " .add_proof(Proof::Combine(\n\n (None, None),\n\n (Some(\"b\".to_owned()), None),\n\n 0,\n\n ))\n\n .unwrap();\n\n database.add_name(\"c\".to_owned()).unwrap();\n\n\n\n let theorem = database.get(Some(\"c\"), None).unwrap();\n\n assert_eq!(theorem, &Theorem::new(c, vec![], vec![]));\n\n\n\n let mut fmt = Formatter::new();\n\n fmt.add_operator(\"A\".to_owned(), 0);\n\n fmt.add_operator(\"B\".to_owned(), 0);\n\n fmt.add_operator(\"C\".to_owned(), 0);\n\n fmt.add_judgement(\"|-\".to_owned());\n\n let s = serialize_database(&fmt, &database);\n\n assert_eq!(\n\n s,\n\n r#\"jdg |-\n", "file_path": "src/serialization/command.rs", "rank": 47, "score": 18.449943604755084 }, { "content": " /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let result = a.unify(&b, &mut sub);\n\n /// assert_eq!(result, Err(ProofError::OperatorMismatch(-3, -2)));\n\n /// ```\n\n pub fn unify<'a, S: Borrow<[Identifier]>>(\n\n &'a self,\n\n other: &Expression<S>,\n\n substitution: &mut WholeSubstitution<'a>,\n\n ) -> Result<(), ProofError> {\n\n let expr = self.data.borrow();\n\n let mut expr_index = 0;\n\n for &symb in other.data.borrow().iter() {\n\n if is_operator(symb) {\n\n let symb_self = expr[expr_index];\n\n expr_index += 1;\n\n if symb_self != symb {\n\n return Err(ProofError::OperatorMismatch(symb_self, symb));\n\n }\n\n } else {\n\n if let Some(old) = substitution.substitution_opt(symb) {\n", "file_path": "src/expression.rs", "rank": 48, "score": 18.358273995100113 }, { "content": " or_fail(|input| {\n\n let (input, id_a) = map_parser(is_not(\"(\"), Self::parse_id)(input)?;\n\n let (input, _) = tag(\"(\")(input)?;\n\n let (input, index) = map_opt(digit1, |s: &str| {\n\n s.parse::<usize>().ok().filter(|i| *i != 0)\n\n })(input)?;\n\n let (input, _) = tag(\") <- \")(input)?;\n\n let (input, id_b) = map_parser(is_not(\" \"), Self::parse_id)(input)?;\n\n let (input, (theorem, name)) = alt((\n\n map(eof, |_| (None, None)),\n\n map(\n\n tuple((\n\n tag(\" { \"),\n\n map_parser(take_until(\" }\"), |input| fmt.parse_theorem(input)),\n\n tag(\" }\"),\n\n alt((\n\n map(eof, |_| None),\n\n map(tuple((tag(\": \"), rest)), |(_, name): (&str, &str)| {\n\n Some(name.to_owned())\n\n }),\n", "file_path": "src/serialization/command.rs", "rank": 49, "score": 17.57817572492599 }, { "content": " |input| self.parse_arity(0, input),\n\n ))(input)\n\n }\n\n\n\n pub fn format_variable(&self, s: &mut String, mut id: Identifier) {\n\n assert!(id >= 0);\n\n id += 1;\n\n s.extend(\n\n std::iter::from_fn(move || {\n\n if id == 0 {\n\n None\n\n } else if id <= 26 {\n\n let c = ('a' as u8 + (id % 26) as u8 - 1) as char;\n\n id = 0;\n\n Some(c)\n\n } else {\n\n let c = ('a' as u8 + (id % 26) as u8) as char;\n\n id /= 26;\n\n Some(c)\n\n }\n", "file_path": "src/serialization/formatter.rs", "rank": 50, "score": 17.263122661446328 }, { "content": " )),\n\n )),\n\n |(_, theorem, _, name)| (Some(theorem), name),\n\n ),\n\n map(tuple((tag(\": \"), rest)), |(_, name): (&str, &str)| {\n\n (None, Some(name.to_owned()))\n\n }),\n\n ))(input)?;\n\n Ok((\n\n input,\n\n Command::Proof(Proof::Simplify(id, a, b), theorem, name),\n\n ))\n\n })(input)\n\n }\n\n\n\n fn parse_combine<'a>(\n\n fmt: &Formatter,\n\n input: &'a str,\n\n ) -> IResult<&'a str, Command, GreedyError<&'a str>> {\n\n let (input, _) = tag(\"cmb \")(input)?;\n", "file_path": "src/serialization/command.rs", "rank": 51, "score": 17.175179049825008 }, { "content": " fn subexpression_check<'a>(\n\n expr: &'a T,\n\n start_index: usize,\n\n ) -> Option<Expression<&'a [Identifier]>> {\n\n let mut depth = 1;\n\n for (i, &s) in expr.borrow()[start_index..].iter().enumerate() {\n\n if is_operator(s) && s != Identifier::MIN {\n\n depth += 1;\n\n } else {\n\n depth -= 1;\n\n }\n\n if depth == 0 {\n\n return Some(Expression {\n\n data: &expr.borrow()[start_index..=start_index + i],\n\n });\n\n }\n\n }\n\n None\n\n }\n\n}\n", "file_path": "src/expression.rs", "rank": 52, "score": 16.434657741046287 }, { "content": "#[derive(Debug, PartialEq, Eq)]\n\npub struct Formatter {\n\n operators: Vec<(String, u8)>,\n\n judgements: Vec<String>,\n\n}\n\n\n\nimpl Formatter {\n\n pub fn new() -> Self {\n\n Formatter {\n\n operators: Vec::new(),\n\n judgements: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn operators(&self) -> impl Iterator<Item = (&str, u8)> {\n\n self.operators\n\n .iter()\n\n .map(|(operator, arity)| (operator.as_str(), *arity))\n\n }\n\n\n", "file_path": "src/serialization/formatter.rs", "rank": 53, "score": 16.38167685816282 }, { "content": " .unwrap();\n\n\n\n database\n\n .add_proof(Proof::Axiom(Theorem::new(b.clone(), vec![], vec![])))\n\n .unwrap();\n\n database.add_name(\"b\".to_owned()).unwrap();\n\n\n\n database\n\n .add_proof(Proof::Axiom(Theorem::new(c.clone(), vec![a, b], vec![])))\n\n .unwrap();\n\n database.add_name(\"abc\".to_owned()).unwrap();\n\n\n\n database\n\n .add_proof(Proof::Combine(\n\n (Some(\"abc\".to_owned()), None),\n\n (Some(\"b\".to_owned()), Some(0)),\n\n 0,\n\n ))\n\n .unwrap();\n\n database\n", "file_path": "src/serialization/command.rs", "rank": 54, "score": 16.070733218876587 }, { "content": "use crate::{\n\n error::ProofError,\n\n expression::{is_operator, Substitution},\n\n types::*,\n\n};\n\n\n\n/// A _distince variable relation_ for expressing that two variables must be different.\n\n///\n\n/// In the default case it is always assumed that all statements are correct if you replace\n\n/// a variable with a different subexpression. This leads to logical errors in statements like\n\n/// `forall x0. exists x1. x0 != x1`.\n\n#[derive(PartialEq, Eq, Clone, PartialOrd, Ord, Debug)]\n\npub struct DVR(Identifier, Identifier);\n\n\n\nimpl DVR {\n\n /// Returns this `DVR`s variables\n\n pub fn variables(&self) -> (Identifier, Identifier) {\n\n let DVR(a, b) = self;\n\n (*a, *b)\n\n }\n", "file_path": "src/dvr.rs", "rank": 55, "score": 16.00144652672507 }, { "content": "cmb $(5) <- 1\n\ncmb ax-1(2) <- wi\n\nsmp $ (a ~ b)\n\nsmp $ (a ~ b)\n\nsmp $ (a ~ b)\n\ncmb 3(3) <- $ { wff a => |- (a -> a) }: id\n\n\"#;\n\n let mut fmt = Formatter::new();\n\n match parse_database(&mut fmt, s) {\n\n Err((line, e)) => {\n\n eprintln!(\"In line:\\n\\t{}\", line);\n\n match e {\n\n Error::DatabaseError(DatabaseError::TheoremMismatch(t1, t2)) => {\n\n let mut s1 = String::new();\n\n fmt.format_theorem(&mut s1, &t1);\n\n let mut s2 = String::new();\n\n fmt.format_theorem(&mut s2, &t2);\n\n panic!(\"TheoremMismatch:\\n{}\\n{}\", s1, s2);\n\n }\n\n e => {\n\n panic!(\"{:?}\", e);\n\n }\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n\n}\n", "file_path": "src/serialization/command.rs", "rank": 56, "score": 15.61110368397199 }, { "content": " or_fail(|input| {\n\n let (input, theorem) =\n\n map_parser(take_until(\" }\"), |input| fmt.parse_theorem(input))(input)?;\n\n let (input, _) = tag(\" }\")(input)?;\n\n let (input, name) = alt((\n\n map(eof, |_| None),\n\n map(tuple((tag(\": \"), rest)), |(_, name): (&str, &str)| {\n\n Some(name.to_owned())\n\n }),\n\n ))(input)?;\n\n Ok((\n\n input,\n\n Command::Proof(Proof::Axiom(theorem.clone()), None, name),\n\n ))\n\n })(input)\n\n }\n\n\n\n fn parse_judgement<'a>(input: &'a str) -> IResult<&'a str, Command, GreedyError<&'a str>> {\n\n let (input, _) = tag(\"jdg \")(input)?;\n\n let (input, judgement) = rest(input)?;\n", "file_path": "src/serialization/command.rs", "rank": 57, "score": 15.427269009779783 }, { "content": " /// expected.dedup();\n\n /// assert_eq!(new_dvrs, Ok(expected));\n\n ///\n\n /// let dvr = DVR::new(0, 1).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let expr0 = Expression::from_raw(vec![-2, 0, 1]).unwrap();\n\n /// sub.insert(0, expr0.to_slice());\n\n /// let expr1 = Expression::from_raw(vec![-2, 1, 2]).unwrap();\n\n /// sub.insert(1, expr1.to_slice());\n\n ///\n\n /// let new_dvrs = dvr.substitute(&sub).collect::<Result<Vec<_>, _>>();\n\n ///\n\n /// assert_eq!(new_dvrs, Err(ProofError::DVRError(1)));\n\n /// ```\n\n pub fn substitute<S: Substitution>(\n\n &self,\n\n substitution: &S,\n\n ) -> impl Iterator<Item = Result<DVR, ProofError>> {\n\n let DVR(a, b) = self;\n\n let vars_a = if let Some(sub) = substitution.substitution_opt(*a) {\n", "file_path": "src/dvr.rs", "rank": 58, "score": 15.40211748672587 }, { "content": " type T = [Identifier; 1];\n\n\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<[Identifier; 1]>> {\n\n if id < 0 {\n\n panic!(\"id is not a variable\");\n\n }\n\n Some(Expression {\n\n data: [id + self.shift as Identifier],\n\n })\n\n }\n\n}\n\n\n\n/// A general substitution\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct WholeSubstitution<'a> {\n\n substitution: Vec<Option<Expression<&'a [Identifier]>>>,\n\n}\n\n\n\nimpl<'a> WholeSubstitution<'a> {\n\n /// Creates a substitution with the capacity to store replacements for variables `0` to\n", "file_path": "src/expression.rs", "rank": 59, "score": 15.28263873059454 }, { "content": " s.push_str(\"cmb \");\n\n self.serialize_id(s, id_a);\n\n write!(s, \"({}) <- \", index + 1).unwrap();\n\n self.serialize_id(s, id_b);\n\n if let Some(theorem) = theorem {\n\n s.push_str(\" { \");\n\n fmt.format_theorem(s, theorem);\n\n s.push_str(\" }\");\n\n }\n\n if let Some(name) = name {\n\n write!(s, \": {}\", name).unwrap();\n\n }\n\n }\n\n Command::Proof(Proof::Axiom(theorem), _, name) => {\n\n s.push_str(\"axm { \");\n\n fmt.format_theorem(s, theorem);\n\n s.push_str(\" }\");\n\n if let Some(name) = name {\n\n write!(s, \": {}\", name).unwrap();\n\n }\n", "file_path": "src/serialization/command.rs", "rank": 60, "score": 15.256815886607434 }, { "content": " /// assert_eq!(res, Err(ProofError::JudgementMismatch(0, 1)));\n\n /// ```\n\n pub fn unify<'a, S: Borrow<[Identifier]>>(\n\n &'a self,\n\n other: &Statement<S>,\n\n substitution: &mut WholeSubstitution<'a>,\n\n ) -> Result<(), ProofError> {\n\n if self.judgement != other.judgement {\n\n return Err(ProofError::JudgementMismatch(\n\n self.judgement,\n\n other.judgement,\n\n ));\n\n }\n\n self.expression.unify(&other.expression, substitution)?;\n\n return Ok(());\n\n }\n\n\n\n /// Convenience function for using a `Substitution` on this statements expression (see\n\n /// [`Expression::substitute`])\n\n pub fn substitute<S: Substitution>(&self, substitution: &S) -> Statement<Box<[Identifier]>> {\n\n Statement {\n\n judgement: self.judgement,\n\n expression: self.expression.substitute(substitution),\n\n }\n\n }\n\n}\n", "file_path": "src/statement.rs", "rank": 61, "score": 15.183137746569603 }, { "content": " /// `n - 1`.\n\n pub fn with_capacity(n: usize) -> Self {\n\n WholeSubstitution {\n\n substitution: vec![None; n],\n\n }\n\n }\n\n\n\n /// Marks the `id` to be substituted by `expr`.\n\n ///\n\n /// # Panics\n\n /// This method panics if `id` is not in the range of this substitutions capacity\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::{Expression, Substitution, WholeSubstitution};\n\n ///\n\n /// let expr = Expression::from_raw([-2, 0, 1]).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// sub.insert(0, expr.to_slice());\n\n /// assert_eq!(sub.substitution_opt(0), Some(expr.to_slice()));\n", "file_path": "src/expression.rs", "rank": 62, "score": 15.07166310948092 }, { "content": " context(\"smp\", |input| Self::parse_simplify(&fmt, input)),\n\n context(\"cmb\", |input| Self::parse_combine(&fmt, input)),\n\n context(\"axm\", |input| Self::parse_axiom(&fmt, input)),\n\n )),\n\n ),\n\n context(\"jdg\", |input| Self::parse_judgement(input)),\n\n context(\"opr\", |input| Self::parse_operator(input)),\n\n ))))(input)?;\n\n Ok(command)\n\n }\n\n\n\n fn parse_id(input: &str) -> IResult<&str, (Option<String>, Option<usize>), GreedyError<&str>> {\n\n alt((\n\n terminated(map(char('$'), |_| (None, None)), eof),\n\n terminated(\n\n map_opt(digit1, |s: &str| {\n\n Some((\n\n None,\n\n Some(s.parse::<usize>().ok().and_then(|i| {\n\n if i == 0 {\n", "file_path": "src/serialization/command.rs", "rank": 63, "score": 14.891085043152133 }, { "content": "\n\nimpl<T: BorrowMut<[Identifier]> + std::fmt::Debug> Expression<T> {\n\n /// Turns this expression into its standard representation, numbering variables in the order of\n\n /// their apperance.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::Expression;\n\n ///\n\n /// let mut s = Expression::from_raw([-2, -2, 2, 0, 2]).unwrap();\n\n /// s.standardize();\n\n /// assert_eq!(s.data(), &[-2, -2, 0, 1, 0]);\n\n /// ```\n\n pub fn standardize(&mut self) {\n\n let max_var = self.variables().max().unwrap_or(-1);\n\n let mut var_map = vec![None; (Wrapping(max_var as usize) + Wrapping(1)).0];\n\n let mut next_var = 0;\n\n self.standardize_range(&mut var_map, &mut next_var, ..);\n\n }\n\n\n", "file_path": "src/expression.rs", "rank": 64, "score": 14.78127860920461 }, { "content": " match err {\n\n attomath::error::ProofError::OperatorMismatch(op_a, op_b) => eprintln!(\n\n \"operator mismatch: expected {} found {}\",\n\n fmt.operators().nth((-op_b) as usize - 1).unwrap().0,\n\n fmt.operators().nth((-op_a) as usize - 1).unwrap().0,\n\n ),\n\n attomath::error::ProofError::VariableMismatch(var, sub_a, sub_b) => {\n\n let mut sv = String::new();\n\n fmt.format_variable(&mut sv, var);\n\n let mut sa = String::new();\n\n fmt.format_expression(\n\n &mut sa,\n\n &Expression::from_raw(sub_b).unwrap(),\n\n );\n\n let mut sb = String::new();\n\n fmt.format_expression(\n\n &mut sb,\n\n &Expression::from_raw(sub_a).unwrap(),\n\n );\n\n eprintln!(\n", "file_path": "src/main.rs", "rank": 65, "score": 14.467628067111729 }, { "content": " pub fn judgements(&self) -> impl Iterator<Item = &str> {\n\n self.judgements.iter().map(|judgement| judgement.as_str())\n\n }\n\n\n\n pub fn add_operator(&mut self, operator: String, arity: u8) {\n\n // TODO: verify\n\n self.operators.push((operator, arity));\n\n }\n\n\n\n pub fn add_judgement(&mut self, judgement: String) {\n\n // TODO: verify\n\n self.judgements.push(judgement);\n\n }\n\n\n\n pub fn format_operator<T: Borrow<[Identifier]> + std::fmt::Debug>(\n\n &self,\n\n s: &mut String,\n\n id: Identifier,\n\n left: &Expression<T>,\n\n right: &Expression<T>,\n", "file_path": "src/serialization/formatter.rs", "rank": 66, "score": 14.431474630252621 }, { "content": " })\n\n .collect::<Vec<_>>()\n\n .into_iter(),\n\n );\n\n }\n\n\n\n pub fn parse_variable<'a>(\n\n &self,\n\n input: &'a str,\n\n ) -> IResult<&'a str, Identifier, GreedyError<&'a str>> {\n\n let (input, var) = take_while1(|c| c >= 'a' && c <= 'z')(input)?;\n\n let mut id = 0i16;\n\n for c in var.chars() {\n\n id *= 26;\n\n if id == 0 {\n\n id += 1;\n\n }\n\n id += (c as u8 - 'a' as u8) as i16;\n\n }\n\n Ok((input, id - 1))\n", "file_path": "src/serialization/formatter.rs", "rank": 67, "score": 14.310278048468493 }, { "content": " None\n\n } else {\n\n Some(i - 1)\n\n }\n\n })?),\n\n ))\n\n }),\n\n eof,\n\n ),\n\n terminated(\n\n map_opt(\n\n tuple((take_till1(|c| c == '.' || c == ' '), char('.'), digit1)),\n\n |(name, _, s): (&str, char, &str)| -> Option<(Option<String>, Option<usize>)> {\n\n Some((\n\n Some(name.to_owned()),\n\n Some(s.parse::<usize>().ok().and_then(|i| {\n\n if i == 0 {\n\n None\n\n } else {\n\n Some(i - 1)\n", "file_path": "src/serialization/command.rs", "rank": 68, "score": 14.150449607104637 }, { "content": " }\n\n }\n\n }\n\n return Ok(());\n\n }\n\n\n\n /// Use the given substitution on this expression to create a new expression\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::{Expression, WholeSubstitution};\n\n ///\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let expr = Expression::from_raw([-2, 0, 1]).unwrap();\n\n /// sub.insert(0, expr.to_slice());\n\n /// assert_eq!(*expr.substitute(&sub).to_slice().data(), &[-2, -2, 0, 1, 1]);\n\n /// ```\n\n pub fn substitute<S: Substitution>(&self, substitution: &S) -> Expression<Box<[Identifier]>> {\n\n let mut new_expr = Vec::with_capacity(self.data.borrow().len());\n\n for symb in self.data.borrow().iter() {\n", "file_path": "src/expression.rs", "rank": 69, "score": 13.68894952617311 }, { "content": "/// Statement {\n\n/// judgement: 0,\n\n/// expression: Expression::from_raw(vec![0].into_boxed_slice()).unwrap(),\n\n/// },\n\n/// Statement {\n\n/// judgement: 0,\n\n/// expression: Expression::from_raw(vec![-1, 0, 1].into_boxed_slice()).unwrap(),\n\n/// }\n\n/// ];\n\n/// let dvrs = vec![DVR::new(0, 1).unwrap()];\n\n/// let theorem = Theorem::new(conclusion, assumptions, dvrs);\n\n///\n\n/// let mut s = String::new();\n\n/// fmt.format_theorem(&mut s, &theorem);\n\n/// assert_eq!(s, \"a <> b, |- a, |- (a -> b) => |- b\");\n\n///\n\n/// let (remaining, theorem1) = fmt.parse_theorem(&s).unwrap();\n\n/// assert_eq!(remaining, \"\");\n\n/// assert_eq!(theorem1, theorem);\n\n/// ```\n", "file_path": "src/serialization/formatter.rs", "rank": 70, "score": 13.508671628076506 }, { "content": "use crate::{error::ProofError, types::*};\n\nuse std::{\n\n borrow::{Borrow, BorrowMut},\n\n cmp::Ordering,\n\n num::Wrapping,\n\n ops::RangeBounds,\n\n};\n\n\n\n/// A `Expression` is a binary tree with nodes called \"operators\" and leafs which called\n\n/// \"variables\".\n\n///\n\n/// The exception to this is the special leaf `Identifier::MIN` which is an operator\n\n/// and used as a \"terminator\" two allow for operators with arity 1 or less.\n\n///\n\n/// The expression is encoded by prefix orderinto a sequence of [`Identifier`]s which can be both\n\n/// variables and operators (see [`is_operator`]).\n\n#[derive(Clone, Eq, Ord, Debug, Copy)]\n\npub struct Expression<T: Borrow<[Identifier]>> {\n\n data: T,\n\n}\n", "file_path": "src/expression.rs", "rank": 71, "score": 13.062986177469384 }, { "content": " })\n\n })\n\n }\n\n}\n\n\n\n/// Substitutes variables by shifting them to the right by the given ammount\n\npub struct ShiftSubstitution {\n\n shift: Identifier,\n\n}\n\n\n\nimpl ShiftSubstitution {\n\n pub fn new(shift: Identifier) -> Self {\n\n if shift < 0 {\n\n panic!(\"shift must be nonnegative\");\n\n }\n\n ShiftSubstitution { shift }\n\n }\n\n}\n\n\n\nimpl Substitution for ShiftSubstitution {\n", "file_path": "src/expression.rs", "rank": 72, "score": 13.028188999243211 }, { "content": " self.format_variable(s, a);\n\n s.push_str(\" <> \");\n\n self.format_variable(s, b);\n\n }\n\n\n\n pub fn parse_dvr<'a>(&self, input: &'a str) -> IResult<&'a str, DVR, GreedyError<&'a str>> {\n\n let (input, a) = self.parse_variable(input)?;\n\n let (input, _) = tag(\" <> \")(input)?;\n\n let (input, b) = verify(|input| self.parse_variable(input), |b| *b != a)(input)?;\n\n Ok((input, DVR::new(a, b).unwrap()))\n\n }\n\n\n\n pub fn format_theorem(&self, s: &mut String, theorem: &Theorem) {\n\n let dvrs = theorem.dvrs();\n\n let assumptions = theorem.assumptions();\n\n if !dvrs.is_empty() || !assumptions.is_empty() {\n\n for (i, dvr) in dvrs.iter().enumerate() {\n\n self.format_dvr(s, dvr);\n\n if i != dvrs.len() - 1 || !assumptions.is_empty() {\n\n s.push_str(\", \");\n", "file_path": "src/serialization/formatter.rs", "rank": 73, "score": 12.872626375384922 }, { "content": "opr A 0\n\nopr B 0\n\nopr C 0\n\naxm { |- A }\n\naxm { |- B }: b\n\naxm { |- A, |- B => |- C }: abc\n\ncmb abc(1) <- b.1\n\ncmb $(1) <- b { |- C }: c\n\n\"#\n\n );\n\n\n\n let mut fmt1 = Formatter::new();\n\n let database1 = parse_database(&mut fmt1, &s).unwrap();\n\n assert_eq!(database1, database);\n\n assert_eq!(fmt1, fmt);\n\n }\n\n\n\n #[test]\n\n fn id() {\n\n let s = r#\"jdg wff\n", "file_path": "src/serialization/command.rs", "rank": 74, "score": 12.85327903180555 }, { "content": " fn from(e: DatabaseError) -> Self {\n\n Self::DatabaseError(e)\n\n }\n\n }\n\n\n\n fn serialize_database(fmt: &Formatter, database: &Database) -> String {\n\n let mut s = String::new();\n\n for command in Command::from_formatter(fmt).chain(Command::from_database(database)) {\n\n command.serialize(&mut s, fmt);\n\n s.push('\\n');\n\n }\n\n s\n\n }\n\n\n\n fn parse_database<'a>(\n\n fmt: &mut Formatter,\n\n input: &'a str,\n\n ) -> Result<Database, (&'a str, Error<'a>)> {\n\n let mut database = Database::new();\n\n for line in input.lines() {\n", "file_path": "src/serialization/command.rs", "rank": 75, "score": 12.784642114759794 }, { "content": "use std::fmt::Write;\n\n\n\nuse nom::{\n\n branch::alt,\n\n bytes::complete::{is_not, tag, take_till1, take_until},\n\n character::complete::{char, digit1},\n\n combinator::{all_consuming, eof, map, map_opt, map_parser, rest},\n\n error::context,\n\n sequence::{terminated, tuple},\n\n IResult,\n\n};\n\n\n\nuse crate::Theorem;\n\n\n\nuse super::{\n\n error::{or_fail, GreedyError},\n\n Database, DatabaseError, Formatter, Proof,\n\n};\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n", "file_path": "src/serialization/command.rs", "rank": 76, "score": 12.53793819634839 }, { "content": " input: &'a str,\n\n ) -> IResult<&'a str, Judgement, GreedyError<&'a str>> {\n\n let (input, judgement) = map_opt(is_not(\" \"), |s| {\n\n self.judgements\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, j)| if s == j { Some(i) } else { None })\n\n .next()\n\n })(input)?;\n\n Ok((input, judgement as u8))\n\n }\n\n\n\n pub fn format_statement<T: Borrow<[Identifier]> + std::fmt::Debug>(\n\n &self,\n\n s: &mut String,\n\n statement: &Statement<T>,\n\n ) {\n\n self.format_judgement(s, statement.judgement);\n\n s.push(' ');\n\n self.format_expression(s, &statement.expression);\n", "file_path": "src/serialization/formatter.rs", "rank": 77, "score": 12.520164968063918 }, { "content": "\n\n pub(crate) fn substitute_variables(&mut self, var_map: &[Option<Identifier>]) {\n\n for symb in self.data.borrow_mut().iter_mut() {\n\n if !is_operator(*symb) {\n\n if let Some(sub) = var_map[*symb as usize] {\n\n *symb = sub;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Borrow<[Identifier]> + std::fmt::Debug> Expression<T> {\n\n /// The internal encoding in prefix order\n\n pub fn data<'a>(&'a self) -> &'a T {\n\n &self.data\n\n }\n\n\n\n /// Borrow this expressions contents\n\n pub fn to_slice<'a>(&'a self) -> Expression<&'a [Identifier]> {\n", "file_path": "src/expression.rs", "rank": 78, "score": 12.457915085715614 }, { "content": "use crate::{\n\n error::ProofError,\n\n expression::{Expression, Substitution, WholeSubstitution},\n\n types::*,\n\n};\n\nuse std::borrow::Borrow;\n\n\n\n/// Type alias for a statement that owns its expression\n\npub type OwnedStatement = Statement<Box<[Identifier]>>;\n\n\n\n/// A a combination of a [`Judgement`] and an [`Expression`], for example _x0 -> x0 is provable_\n\n///\n\n/// The __judgement__ is given in form of an integer, but often represents some meaning, like _this\n\n/// expression is provable_ or _this expression is syntactically correct_.\n\n#[derive(Eq, Clone, PartialOrd, Ord, Debug)]\n\npub struct Statement<T: Borrow<[Identifier]>> {\n\n pub judgement: Judgement,\n\n pub expression: Expression<T>,\n\n}\n\n\n", "file_path": "src/statement.rs", "rank": 79, "score": 12.242983860111282 }, { "content": " )\n\n .unwrap(),\n\n };\n\n let b = Statement {\n\n judgement: 0,\n\n expression: Expression::from_raw(\n\n vec![-2, Identifier::MIN, Identifier::MIN].into_boxed_slice(),\n\n )\n\n .unwrap(),\n\n };\n\n let c = Statement {\n\n judgement: 0,\n\n expression: Expression::from_raw(\n\n vec![-3, Identifier::MIN, Identifier::MIN].into_boxed_slice(),\n\n )\n\n .unwrap(),\n\n };\n\n\n\n database\n\n .add_proof(Proof::Axiom(Theorem::new(a.clone(), vec![], vec![])))\n", "file_path": "src/serialization/command.rs", "rank": 80, "score": 12.205921183874596 }, { "content": " }\n\n\n\n pub fn format_expression<T: Borrow<[Identifier]> + std::fmt::Debug>(\n\n &self,\n\n s: &mut String,\n\n expression: &Expression<T>,\n\n ) {\n\n let id = expression.to_slice().data()[0];\n\n if id == Identifier::MIN {\n\n return;\n\n }\n\n if is_operator(id) {\n\n let left = expression.subexpression(1);\n\n let right = expression.subexpression(1 + left.data().len());\n\n self.format_operator(s, id, &left, &right);\n\n } else {\n\n self.format_variable(s, id)\n\n }\n\n }\n\n\n", "file_path": "src/serialization/formatter.rs", "rank": 81, "score": 12.000604520451981 }, { "content": " (|| {\n\n let command = Command::parse(&fmt, line)?;\n\n\n\n command.apply(fmt, &mut database)?;\n\n Ok(())\n\n })()\n\n .map_err(|e| (line, e))?;\n\n }\n\n Ok(database)\n\n }\n\n\n\n #[test]\n\n fn a_b_c() {\n\n use crate::{expression::Expression, statement::Statement};\n\n let mut database = Database::new();\n\n\n\n let a = Statement {\n\n judgement: 0,\n\n expression: Expression::from_raw(\n\n vec![-1, Identifier::MIN, Identifier::MIN].into_boxed_slice(),\n", "file_path": "src/serialization/command.rs", "rank": 82, "score": 11.96670789029956 }, { "content": " pub fn parse_expression<'a>(\n\n &self,\n\n input: &'a str,\n\n ) -> IResult<&'a str, Expression<Box<[Identifier]>>, GreedyError<&'a str>> {\n\n // TODO: remove unwrap\n\n alt((\n\n map(\n\n |input| self.parse_variable(input),\n\n |v| Expression::from_raw(vec![v].into_boxed_slice()).unwrap(),\n\n ),\n\n |input| self.parse_operator(input),\n\n ))(input)\n\n }\n\n\n\n pub fn format_judgement(&self, s: &mut String, judgement: Judgement) {\n\n s.push_str(&self.judgements[judgement as usize]);\n\n }\n\n\n\n pub fn parse_judgement<'a>(\n\n &self,\n", "file_path": "src/serialization/formatter.rs", "rank": 83, "score": 11.75633870122122 }, { "content": " let (input, _) = tag(\"smp \")(input)?;\n\n or_fail(|input| {\n\n let (input, id) = map_parser(is_not(\" \"), Self::parse_id)(input)?;\n\n let (input, _) = tag(\" (\")(input)?;\n\n let (input, a) = fmt.parse_variable(input)?;\n\n let (input, _) = tag(\" ~ \")(input)?;\n\n let (input, b) = fmt.parse_variable(input)?;\n\n let (input, _) = tag(\")\")(input)?;\n\n let (input, (theorem, name)) = alt((\n\n map(eof, |_| (None, None)),\n\n map(\n\n tuple((\n\n tag(\" { \"),\n\n map_parser(take_until(\" }\"), |input| fmt.parse_theorem(input)),\n\n tag(\" }\"),\n\n alt((\n\n map(eof, |_| None),\n\n map(tuple((tag(\": \"), rest)), |(_, name): (&str, &str)| {\n\n Some(name.to_owned())\n\n }),\n", "file_path": "src/serialization/command.rs", "rank": 84, "score": 11.617463249211891 }, { "content": " ///\n\n /// let a = Expression::from_raw([-2, 0, -2, 1, 0]).unwrap();\n\n /// let b = Expression::from_raw([-2, 0, 1]).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let result = a.unify(&b, &mut sub); // (x0 ~> x0, x1 ~> (x1 -> x0))\n\n /// assert_eq!(result, Ok(()));\n\n /// assert_eq!(b.substitute(&sub), a);\n\n ///\n\n /// let a = Expression::from_raw([-2, 0, -2, 1, 0]).unwrap();\n\n /// let b = Expression::from_raw([-2, 0, 0]).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let result = a.unify(&b, &mut sub); // (x0 ~> x0, x0 ~> (x1 -> x0))\n\n /// assert_eq!(result, Err(ProofError::VariableMismatch(\n\n /// 0,\n\n /// vec![0].into_boxed_slice(),\n\n /// vec![-2, 1, 0].into_boxed_slice(),\n\n /// )));\n\n ///\n\n /// let a = Expression::from_raw([-3, 0, -2, 1, 0]).unwrap();\n\n /// let b = Expression::from_raw([-2, 0, 1]).unwrap();\n", "file_path": "src/expression.rs", "rank": 85, "score": 11.564432344534593 }, { "content": " /// use attomath::DVR;\n\n /// use attomath::expression::{Expression, WholeSubstitution};\n\n /// use attomath::error::ProofError;\n\n ///\n\n /// let dvr = DVR::new(0, 1).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let expr0 = Expression::from_raw(vec![-2, 0, 1]).unwrap();\n\n /// sub.insert(0, expr0.to_slice());\n\n /// let expr1 = Expression::from_raw(vec![2]).unwrap();\n\n /// sub.insert(1, expr1.to_slice());\n\n ///\n\n /// let mut new_dvrs = dvr.substitute(&sub).collect::<Result<Vec<_>, _>>();\n\n /// new_dvrs = new_dvrs.map(|mut ds| {\n\n /// ds.sort();\n\n /// ds.dedup();\n\n /// ds\n\n /// });\n\n ///\n\n /// let mut expected = vec![DVR::new(0, 2).unwrap(), DVR::new(1, 2).unwrap()];\n\n /// expected.sort();\n", "file_path": "src/dvr.rs", "rank": 86, "score": 11.524420990360426 }, { "content": "impl<T: Borrow<[Identifier]>, S: Borrow<[Identifier]>> PartialEq<Statement<S>> for Statement<T> {\n\n fn eq(&self, other: &Statement<S>) -> bool {\n\n self.judgement == other.judgement && self.expression == other.expression\n\n }\n\n}\n\n\n\nimpl<T: Borrow<[Identifier]> + std::fmt::Debug> Statement<T> {\n\n /// Convenience function for unifying the expressions of two judgements (see\n\n /// [`Expression::unify`])\n\n ///\n\n /// # Errors\n\n /// * `JudgementMismatch` - if `self.judgement != other.judgement`\n\n /// * `VariableMismatch` or `OperatorMismatch` - if `self.expression.unify(other.expression)`\n\n /// fails\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::Statement;\n\n /// use attomath::expression::{Expression, WholeSubstitution};\n\n /// use attomath::error::ProofError;\n", "file_path": "src/statement.rs", "rank": 87, "score": 11.453703735291695 }, { "content": " use Either::*;\n\n match self {\n\n Left(expr) => expr.borrow(),\n\n Right(expr) => expr.borrow(),\n\n }\n\n }\n\n}\n\n\n\nimpl<S: Substitution, T: Substitution> Substitution for ChainSubstitution<S, T> {\n\n type T = Either<S::T, T::T>;\n\n\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<Self::T>> {\n\n self.first\n\n .substitution_opt(id)\n\n .map(|s| Expression {\n\n data: Either::Left(s.data),\n\n })\n\n .or_else(|| {\n\n self.then.substitution_opt(id).map(|s| Expression {\n\n data: Either::Right(s.data),\n", "file_path": "src/expression.rs", "rank": 88, "score": 11.414166383867926 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::num::Wrapping;\n\n\n\n use quickcheck::Arbitrary;\n\n\n\n use super::*;\n\n\n\n impl Arbitrary for Expression<Box<[Identifier]>> {\n\n fn arbitrary(g: &mut quickcheck::Gen) -> Self {\n\n loop {\n\n let data = Vec::<Identifier>::arbitrary(g).into_boxed_slice();\n\n if let Some(subexpression) = Expression::subexpression_check(&data, 0) {\n\n return Expression {\n\n data: subexpression.data.to_owned().into_boxed_slice(),\n\n };\n\n }\n\n }\n", "file_path": "src/expression.rs", "rank": 89, "score": 11.413760661154265 }, { "content": " Expression {\n\n data: self.data.borrow(),\n\n }\n\n }\n\n\n\n /// Create a expression from its prefix order encoding. This checks if the given sequence\n\n /// encodes a binary tree and returns `None` if it does not.\n\n pub fn from_raw(expr: T) -> Option<Self> {\n\n if Self::check(&expr) {\n\n Some(Expression { data: expr })\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Convencience function to iterate over this expressions variables. May contain duplicates.\n\n pub fn variables<'a>(&'a self) -> impl Iterator<Item = Identifier> + 'a {\n\n self.data\n\n .borrow()\n\n .iter()\n", "file_path": "src/expression.rs", "rank": 90, "score": 11.338666595262207 }, { "content": " }\n\n }\n\n\n\n quickcheck! {\n\n fn unify_substitute(a: Expression<Box<[Identifier]>>, b: Expression<Box<[Identifier]>>) -> bool {\n\n let max_var = b.variables().max().unwrap_or(-1);\n\n let mut substitution =\n\n WholeSubstitution::with_capacity((Wrapping(max_var as usize) + Wrapping(1)).0);\n\n match a.unify(&b, &mut substitution) {\n\n Ok(()) => b.substitute(&substitution) == a,\n\n Err(_) => true\n\n }\n\n }\n\n\n\n fn from_raw_single_variable(a: Identifier) -> bool {\n\n if is_operator(a) {\n\n return true;\n\n }\n\n Expression::from_raw([a]).is_some()\n\n }\n\n }\n\n}\n", "file_path": "src/expression.rs", "rank": 91, "score": 11.3356105151545 }, { "content": " Some(VariableSubstitution(variables))\n\n }\n\n}\n\n\n\nimpl<S: Borrow<[Option<Identifier>]>> Substitution for VariableSubstitution<S> {\n\n type T = [Identifier; 1];\n\n\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<[Identifier; 1]>> {\n\n self.0.borrow()[id as usize].map(|v| Expression { data: [v] })\n\n }\n\n}\n\n\n", "file_path": "src/expression.rs", "rank": 92, "score": 11.256168080772435 }, { "content": " }\n\n\n\n pub fn parse_arity<'a>(\n\n &self,\n\n arity: u8,\n\n input: &'a str,\n\n ) -> IResult<&'a str, Expression<Box<[Identifier]>>, GreedyError<&'a str>> {\n\n if arity == 0 {\n\n let (input, o) = map_opt(is_not(\" ),\"), |s| {\n\n self.operators\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, (o, a))| if s == o && *a == 0 { Some(i) } else { None })\n\n .next()\n\n })(input)?;\n\n // TODO: remove unwrap\n\n Ok((\n\n input,\n\n Expression::from_raw(\n\n vec![-(o as Identifier) - 1, Identifier::MIN, Identifier::MIN]\n", "file_path": "src/serialization/formatter.rs", "rank": 93, "score": 11.055249210985252 }, { "content": " pub(crate) fn standardize_range<R: RangeBounds<Identifier>>(\n\n &mut self,\n\n var_map: &mut [Option<Identifier>],\n\n next_var: &mut Identifier,\n\n range: R,\n\n ) {\n\n for symb in self.data.borrow_mut().iter_mut() {\n\n if !is_operator(*symb) && range.contains(symb) {\n\n let var = var_map[*symb as usize].unwrap_or_else(|| {\n\n let var = *next_var;\n\n var_map[*symb as usize] = Some(var);\n\n *next_var += 1;\n\n var\n\n });\n\n if range.contains(&var) {\n\n *symb = var;\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/expression.rs", "rank": 94, "score": 10.718975466138811 }, { "content": " }\n\n\n\n pub fn parse_statement<'a>(\n\n &self,\n\n input: &'a str,\n\n ) -> IResult<&'a str, OwnedStatement, GreedyError<&'a str>> {\n\n let (input, judgement) = self.parse_judgement(input)?;\n\n let (input, _) = char(' ')(input)?;\n\n let (input, expression) = self.parse_expression(input)?;\n\n Ok((\n\n input,\n\n Statement {\n\n judgement,\n\n expression,\n\n },\n\n ))\n\n }\n\n\n\n pub fn format_dvr(&self, s: &mut String, dvr: &DVR) {\n\n let (a, b) = dvr.variables();\n", "file_path": "src/serialization/formatter.rs", "rank": 95, "score": 10.46962375979684 }, { "content": " } else if is_operator(b) {\n\n Err(ProofError::DVRError(b))\n\n } else if a < b {\n\n Ok(DVR(a, b))\n\n } else if a > b {\n\n Ok(DVR(b, a))\n\n } else {\n\n Err(ProofError::DVRError(a))\n\n }\n\n }\n\n\n\n /// Uses the given `Substitution` to create new `DVR`s for each pair of variables in the new\n\n /// expressions for `self.variables()`.\n\n ///\n\n /// # Errors\n\n /// The `Iterator` will produce a `DVRError` if the substitutions for this `DVR`s' variables\n\n /// contains common variables\n\n ///\n\n /// # Example\n\n /// ```\n", "file_path": "src/dvr.rs", "rank": 96, "score": 10.187781094881979 }, { "content": "use crate::types::*;\n\n\n\n/// An error which is produced when trying to proof something incorrect\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum ProofError {\n\n /// Error produced when trying to unify expressions with different operators (see\n\n /// [`Expression::unify`](../expression/struct.Expression.html#method.unify)). Contains the\n\n /// identifiers of the mismatched operators.\n\n OperatorMismatch(Identifier, Identifier),\n\n /// Error produced when trying to unify expressions where one variable would have to be\n\n /// substituted by different subexpressions (see\n\n /// [`Expression::unify`](../expression/struct.Expression.html#method.unify)). Contains the\n\n /// identifier for the variable and the mismatched subexpressions.\n\n VariableMismatch(Identifier, Box<[Identifier]>, Box<[Identifier]>),\n\n /// Error produced when trying to unify statements with different judgements (see\n\n /// [`Statement::unify`](../statement/struct.Statement.html#method.unify)). Contains the\n\n /// mismatched judgements.\n\n JudgementMismatch(Judgement, Judgement),\n\n /// Error produced when trying to create a theorem with conflicting dvrs (see\n\n /// [`DVR`](../dvr/struct.DVR.html)).\n\n DVRError(Identifier),\n\n}\n", "file_path": "src/error.rs", "rank": 97, "score": 10.16792745249586 }, { "content": " }\n\n }\n\n Command::Judgement(judgement) => {\n\n fmt.add_judgement(judgement);\n\n }\n\n Command::Operator(operator, arity) => {\n\n fmt.add_operator(operator, arity);\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn parse<'a>(\n\n fmt: &Formatter,\n\n input: &'a str,\n\n ) -> Result<Self, nom::Err<GreedyError<&'a str>>> {\n\n let (_, command) = or_fail(all_consuming(alt((\n\n context(\n\n \"proof\",\n\n alt((\n", "file_path": "src/serialization/command.rs", "rank": 98, "score": 10.04767937919993 }, { "content": " context(\"conclusion\", |input| self.parse_statement(input))(input)?;\n\n let (input, _) = eof(input)?;\n\n let mut dvrs = Vec::new();\n\n let mut assumptions = Vec::new();\n\n for (dvr, assumption) in dvrs_and_assumptions {\n\n if let Some(dvr) = dvr {\n\n dvrs.push(dvr);\n\n }\n\n if let Some(assumption) = assumption {\n\n assumptions.push(assumption);\n\n }\n\n }\n\n Ok((input, Theorem::new(conclusion, assumptions, dvrs)))\n\n },\n\n |input| {\n\n let (input, conclusion) =\n\n context(\"conclusion\", |input| self.parse_statement(input))(input)?;\n\n let (input, _) = eof(input)?;\n\n Ok((input, Theorem::new(conclusion, vec![], vec![])))\n\n },\n\n ))(input)\n\n }\n\n}\n", "file_path": "src/serialization/formatter.rs", "rank": 99, "score": 10.005722080770713 } ]
Rust
socrates-core/src/service/registry.rs
ckaran/socrates-rs
450f364e38cf663367c36991a99cd8b689d76768
use hashbrown::HashMap; use super::*; pub struct RegisteredService { core_props: ServiceCoreProps, type_id: TypeId, name: Arc<str>, owner_id: DynamodId, used_by_count: HashMap<DynamodId, u32>, service_object: Arc<dyn Service>, } impl RegisteredService { pub fn make_service_ref(&self) -> ServiceRef { ServiceRef { core: self.core_props.clone(), name: (*(self.name)).into(), type_id: self.type_id, owner_id: self.owner_id, } } } impl From<&RegisteredService> for ServiceRef { fn from(rs: &RegisteredService) -> ServiceRef { rs.make_service_ref() } } use im::OrdSet; #[derive(Default)] pub struct ServiceRegistry { curr_id: ServiceId, by_service_id: HashMap<ServiceId, RegisteredService>, by_name: HashMap<Arc<str>, OrdSet<ServiceCoreProps>>, by_type_id: HashMap<TypeId, OrdSet<ServiceCoreProps>>, zombies: HashMap<ServiceId, RegisteredService>, } impl ServiceRegistry { pub fn new() -> ServiceRegistry { Default::default() } pub fn register_service( &mut self, svc_type_id: TypeId, svc_name: &str, service_object: Arc<dyn Service>, svc_ranking: ServiceRanking, owner_id: DynamodId, ) -> ServiceRef { let new_id = self.curr_id + 1; let service = RegisteredService { core_props: ServiceCoreProps { id: new_id, ranking: svc_ranking, }, type_id: svc_type_id, name: svc_name.into(), owner_id, used_by_count: HashMap::new(), service_object, }; let service_ref = service.make_service_ref(); let svc_name = Arc::clone(&service.name); let svcs_using_name = self.by_name.entry(svc_name).or_insert(OrdSet::new()); svcs_using_name.insert(service.core_props.clone()); let svcs_using_type_id = self.by_type_id.entry(svc_type_id).or_insert(OrdSet::new()); svcs_using_type_id.insert(service.core_props.clone()); self.by_service_id.insert(new_id, service); self.curr_id = new_id; service_ref } pub fn unregister_service(&mut self, svc_id: ServiceId) -> Option<ServiceRef> { if let Some(rs) = self.by_service_id.remove(&svc_id) { self.by_name.remove(&rs.name).expect("unsynced registry!"); self.by_type_id .remove(&rs.type_id) .expect("unsynced registry!"); let svc_ref = rs.make_service_ref(); if !rs.used_by_count.is_empty() { self.zombies.insert(svc_id, rs); } else { println!("Dropping service (no users): {:?}", rs.make_service_ref()); } Some(svc_ref) } else { None } } #[inline(always)] fn get_services_id( core_props: Option<&OrdSet<ServiceCoreProps>>, ) -> impl Iterator<Item = ServiceId> { core_props .cloned() .into_iter() .flat_map(OrdSet::into_iter) .map(|cp| cp.id) } pub fn get_services_id_by_name(&self, svc_name: &str) -> impl Iterator<Item = ServiceId> { ServiceRegistry::get_services_id(self.by_name.get(svc_name)) } pub fn get_services_id_by_type_id( &self, svc_type_id: TypeId, ) -> impl Iterator<Item = ServiceId> { ServiceRegistry::get_services_id(self.by_type_id.get(&svc_type_id)) } pub fn get_service_ref(&self, svc_id: ServiceId) -> Option<ServiceRef> { self.by_service_id .get(&svc_id) .map(|rs| rs.make_service_ref()) } pub fn get_service_object( &mut self, svc_id: ServiceId, requestor: DynamodId, ) -> Option<Weak<dyn Service>> { self.by_service_id.get_mut(&svc_id).map(|rs| { let cr = rs.used_by_count.entry(requestor).or_insert(0); *cr = *cr + 1; Arc::downgrade(&rs.service_object) }) } pub fn remove_use(&mut self, svc_id: ServiceId, user_id: DynamodId) { if let Some(rs) = self.by_service_id.get_mut(&svc_id) { if ServiceRegistry::decrement_use(rs, user_id) == Some(0) { rs.used_by_count.remove(&user_id); } self.by_name .get_mut(&rs.name) .map(|v| v.remove(&rs.core_props)); } else if let Some(rs) = self.zombies.get_mut(&svc_id) { if ServiceRegistry::decrement_use(rs, user_id) == Some(0) { rs.used_by_count.remove(&user_id); if rs.used_by_count.is_empty() { println!("Dropping zombie service: {:?}", rs.make_service_ref()); self.zombies.remove(&svc_id); } } } } fn decrement_use(rs: &mut RegisteredService, owner_id: DynamodId) -> Option<u32> { rs.used_by_count.get_mut(&owner_id).map(|cr| { *cr = (*cr) - 1; *cr }) } }
use hashbrown::HashMap; use super::*; pub struct RegisteredService { core_props: ServiceCoreProps, type_id: TypeId, name: Arc<str>, owner_id: Dyn
.expect("unsynced registry!"); let svc_ref = rs.make_service_ref(); if !rs.used_by_count.is_empty() { self.zombies.insert(svc_id, rs); } else { println!("Dropping service (no users): {:?}", rs.make_service_ref()); } Some(svc_ref) } else { None } } #[inline(always)] fn get_services_id( core_props: Option<&OrdSet<ServiceCoreProps>>, ) -> impl Iterator<Item = ServiceId> { core_props .cloned() .into_iter() .flat_map(OrdSet::into_iter) .map(|cp| cp.id) } pub fn get_services_id_by_name(&self, svc_name: &str) -> impl Iterator<Item = ServiceId> { ServiceRegistry::get_services_id(self.by_name.get(svc_name)) } pub fn get_services_id_by_type_id( &self, svc_type_id: TypeId, ) -> impl Iterator<Item = ServiceId> { ServiceRegistry::get_services_id(self.by_type_id.get(&svc_type_id)) } pub fn get_service_ref(&self, svc_id: ServiceId) -> Option<ServiceRef> { self.by_service_id .get(&svc_id) .map(|rs| rs.make_service_ref()) } pub fn get_service_object( &mut self, svc_id: ServiceId, requestor: DynamodId, ) -> Option<Weak<dyn Service>> { self.by_service_id.get_mut(&svc_id).map(|rs| { let cr = rs.used_by_count.entry(requestor).or_insert(0); *cr = *cr + 1; Arc::downgrade(&rs.service_object) }) } pub fn remove_use(&mut self, svc_id: ServiceId, user_id: DynamodId) { if let Some(rs) = self.by_service_id.get_mut(&svc_id) { if ServiceRegistry::decrement_use(rs, user_id) == Some(0) { rs.used_by_count.remove(&user_id); } self.by_name .get_mut(&rs.name) .map(|v| v.remove(&rs.core_props)); } else if let Some(rs) = self.zombies.get_mut(&svc_id) { if ServiceRegistry::decrement_use(rs, user_id) == Some(0) { rs.used_by_count.remove(&user_id); if rs.used_by_count.is_empty() { println!("Dropping zombie service: {:?}", rs.make_service_ref()); self.zombies.remove(&svc_id); } } } } fn decrement_use(rs: &mut RegisteredService, owner_id: DynamodId) -> Option<u32> { rs.used_by_count.get_mut(&owner_id).map(|cr| { *cr = (*cr) - 1; *cr }) } }
amodId, used_by_count: HashMap<DynamodId, u32>, service_object: Arc<dyn Service>, } impl RegisteredService { pub fn make_service_ref(&self) -> ServiceRef { ServiceRef { core: self.core_props.clone(), name: (*(self.name)).into(), type_id: self.type_id, owner_id: self.owner_id, } } } impl From<&RegisteredService> for ServiceRef { fn from(rs: &RegisteredService) -> ServiceRef { rs.make_service_ref() } } use im::OrdSet; #[derive(Default)] pub struct ServiceRegistry { curr_id: ServiceId, by_service_id: HashMap<ServiceId, RegisteredService>, by_name: HashMap<Arc<str>, OrdSet<ServiceCoreProps>>, by_type_id: HashMap<TypeId, OrdSet<ServiceCoreProps>>, zombies: HashMap<ServiceId, RegisteredService>, } impl ServiceRegistry { pub fn new() -> ServiceRegistry { Default::default() } pub fn register_service( &mut self, svc_type_id: TypeId, svc_name: &str, service_object: Arc<dyn Service>, svc_ranking: ServiceRanking, owner_id: DynamodId, ) -> ServiceRef { let new_id = self.curr_id + 1; let service = RegisteredService { core_props: ServiceCoreProps { id: new_id, ranking: svc_ranking, }, type_id: svc_type_id, name: svc_name.into(), owner_id, used_by_count: HashMap::new(), service_object, }; let service_ref = service.make_service_ref(); let svc_name = Arc::clone(&service.name); let svcs_using_name = self.by_name.entry(svc_name).or_insert(OrdSet::new()); svcs_using_name.insert(service.core_props.clone()); let svcs_using_type_id = self.by_type_id.entry(svc_type_id).or_insert(OrdSet::new()); svcs_using_type_id.insert(service.core_props.clone()); self.by_service_id.insert(new_id, service); self.curr_id = new_id; service_ref } pub fn unregister_service(&mut self, svc_id: ServiceId) -> Option<ServiceRef> { if let Some(rs) = self.by_service_id.remove(&svc_id) { self.by_name.remove(&rs.name).expect("unsynced registry!"); self.by_type_id .remove(&rs.type_id)
random
[ { "content": "pub trait Named {\n\n fn type_name() -> &'static str;\n\n}\n\n\n", "file_path": "socrates-core/src/service/service.rs", "rank": 0, "score": 67362.85685948143 }, { "content": "#[inline(always)]\n\npub fn service_name<T: Named + ?Sized>() -> &'static str {\n\n <T>::type_name()\n\n}\n\n\n", "file_path": "socrates-core/src/service/service.rs", "rank": 1, "score": 62405.19411250492 }, { "content": "#[inline(always)]\n\npub fn any_service_type_id() -> std::any::TypeId {\n\n service_type_id::<dyn Service>()\n\n}\n", "file_path": "socrates-core/src/service/service.rs", "rank": 2, "score": 48919.24868286117 }, { "content": "#[derive(Clone)]\n\nstruct MyListener;\n\n\n\nimpl EventListener<ServiceEvent> for MyListener {\n\n fn on_event(&self, event: &ServiceEvent) {\n\n println!(\"Received: {:?}\", event);\n\n }\n\n}\n\n\n", "file_path": "socrates-launcher/src/main.rs", "rank": 3, "score": 48743.35862885308 }, { "content": "struct ReferenceInfo {\n\n pub name: String,\n\n pub unqualified_svc_name: String,\n\n pub cardinality: Cardinality,\n\n pub policy: Policy,\n\n pub policy_option: PolicyOption,\n\n}\n\n\n", "file_path": "socrates-macro/src/lib.rs", "rank": 4, "score": 47757.52750554074 }, { "content": "struct ComponentRunner {}\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 5, "score": 46837.819201474995 }, { "content": "//#[derive(Component)]\n\n//#[provide(Greeter)]\n\nstruct SimpleGreeter;\n\n\n\nimpl SimpleGreeter {\n\n fn new() -> SimpleGreeter {\n\n SimpleGreeter\n\n }\n\n}\n\n\n\nimpl Greeter for SimpleGreeter {\n\n fn greet(&self, req: &GreetRequest) -> String {\n\n let gr = match req.idiom {\n\n Idiom::Regular => \"Hello\",\n\n Idiom::Formal => \"Greetings to you, \",\n\n Idiom::Slang => \"Yo\",\n\n };\n\n format!(\"{} {}\", gr, req.who).into()\n\n }\n\n}\n\n\n\nuse socrates::service::Service;\n", "file_path": "examples/example-provider/src/lib.rs", "rank": 6, "score": 46837.819201474995 }, { "content": "struct DynamodLib {\n\n id: DynamodId,\n\n lib: libloading::Library, // must be last to be dropped last\n\n}\n\nimpl DynamodLib {\n\n pub fn new(id: DynamodId, lib: libloading::Library) -> DynamodLib {\n\n DynamodLib { id, lib }\n\n }\n\n}\n\nimpl Drop for DynamodLib {\n\n fn drop(&mut self) {\n\n println!(\"Dropping dynamod #{}\", self.id);\n\n }\n\n}\n", "file_path": "socrates-core/src/module/dynamod.rs", "rank": 7, "score": 46837.819201474995 }, { "content": " #[service_trait]\n\n pub trait Greeter {\n\n fn greet(&self, who: &str) -> String;\n\n }\n\n\n\n struct FormalGreeter;\n\n impl Greeter for FormalGreeter {\n\n fn greet(&self, who: &str) -> String {\n\n format!(\"{} {}\", \"Hello\", who)\n\n }\n\n }\n\n impl FormalGreeter {\n\n fn new() -> FormalGreeter {\n\n FormalGreeter\n\n }\n\n }\n\n\n\n impl Service for FormalGreeter {}\n\n\n\n query_interface::interfaces!(FormalGreeter: Greeter);\n\n\n", "file_path": "socrates-test/src/lib.rs", "rank": 8, "score": 41681.212557280815 }, { "content": "pub trait Lifecycle {\n\n fn on_start(&self) {}\n\n}\n\n\n\nuse hashbrown::HashMap;\n\n\n\npub struct ComponentManagerHandler {\n\n manager: Listener<ComponentManager, ServiceEvent>,\n\n}\n\n\n\nimpl Activator for ComponentManagerHandler {}\n\n\n\nimpl ComponentManagerHandler {\n\n pub fn start(\n\n context: &ModuleContext,\n\n mut manager: ComponentManager,\n\n ) -> Result<ComponentManagerHandler> {\n\n manager.set_context(context);\n\n\n\n let active_manager = context.register_listener(Listener::new(manager))?;\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 9, "score": 40779.88515097778 }, { "content": " #[service_trait]\n\n pub trait Greeter {\n\n fn greet(&self, req: &GreetRequest) -> String;\n\n }\n\n\n\n}\n", "file_path": "examples/example-api/src/lib.rs", "rank": 10, "score": 40779.88515097778 }, { "content": "#[inline(always)]\n\npub fn service_type_id<T: ?Sized + std::any::Any>() -> std::any::TypeId {\n\n std::any::TypeId::of::<T>()\n\n}\n\n\n", "file_path": "socrates-core/src/service/service.rs", "rank": 11, "score": 40157.44471431991 }, { "content": "pub trait Activator: Send {}\n", "file_path": "socrates-core/src/module/activator.rs", "rank": 12, "score": 38709.51165400771 }, { "content": "pub trait Factory<Source>: Sized {\n\n fn build(from: Source) -> Option<Self>;\n\n}\n\n\n\nimpl<T: Service + ?Sized> Factory<&ModuleContext> for Svc<T> {\n\n fn build(ctx: &ModuleContext) -> Option<Self> {\n\n ctx.get_first_service_typed::<T>()\n\n }\n\n}\n\n\n\nimpl<T: Service + ?Sized> Factory<&ModuleContext> for Option<Svc<T>> {\n\n fn build(ctx: &ModuleContext) -> Option<Self> {\n\n Some(ctx.get_first_service_typed::<T>())\n\n }\n\n}\n\n\n\nimpl<T: Service + ?Sized> Factory<&ModuleContext> for Vec<Svc<T>> {\n\n fn build(ctx: &ModuleContext) -> Option<Self> {\n\n Some(ctx.get_all_services_typed::<T>())\n\n }\n", "file_path": "socrates-core/src/component/factory.rs", "rank": 13, "score": 36880.08747402231 }, { "content": "pub trait Update<Source>: Sized {\n\n fn update(&self, ctx: Source) -> Option<()>;\n\n}\n\n\n\nimpl<U, T: Factory<U>> Update<U> for parking_lot::Mutex<T> {\n\n fn update(&self, ctx: U) -> Option<()> {\n\n let new_value = T::build(ctx)?;\n\n let mut this = self.lock();\n\n *this = new_value;\n\n Some(())\n\n }\n\n}\n", "file_path": "socrates-core/src/component/factory.rs", "rank": 14, "score": 36880.08747402231 }, { "content": "#[no_mangle]\n\nfn activate(ctx: Context) -> Result<Box<dyn Activator>> {\n\n println!(\"I'm started (consumer)\");\n\n // panic!(\"shoudln't segfault!\");\n\n println!(\n\n \"My Component def: {:?}\",\n\n <MyConsumer as Component>::get_definition()\n\n );\n\n\n\n let cm = ComponentManager::new().add_component::<MyConsumer>();\n\n let cmh = ComponentManagerHandler::start(&ctx, cm)?;\n\n\n\n Ok(cmh.boxed())\n\n\n\n // srv: Svc<dyn Greeter>, our only way to use the service\n\n // it cannot be cloned, you must move it or request another\n\n // instance from the framework!\n\n // if let Some(srv) = ctx.get_service_typed::<Greeter>() {\n\n // let c = MyConsumer::new(ctx.clone(), srv);\n\n\n\n // // let cm: MyConsumer = Component::instantiate();\n", "file_path": "examples/example-consumer/src/lib.rs", "rank": 15, "score": 36135.446860171214 }, { "content": "pub trait ComponentControllerT: Send + Sync {\n\n fn set_context(&mut self, context: &ModuleContext);\n\n fn query_registry(&self);\n\n fn on_service_event(&self, event: &ServiceEvent);\n\n fn print_status(&self);\n\n}\n\n\n\nimpl<T: Component> ComponentController<T> {\n\n pub fn new(\n\n definition: ComponentDefinition,\n\n instantiate: fn(&ModuleContext, &ComponentReferences) -> Option<T>,\n\n ) -> ComponentController<T> {\n\n ComponentController {\n\n context: None,\n\n definition,\n\n instantiate,\n\n references: RwLock::new(ComponentReferences::new()),\n\n instances: RwLock::new(Vec::new()),\n\n }\n\n }\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 16, "score": 35440.5111545482 }, { "content": "pub trait Service: Object + Send + Sync {}\n\nmopo!(Service);\n\n\n", "file_path": "socrates-core/src/service/service.rs", "rank": 17, "score": 35246.664745168324 }, { "content": "pub trait EventListener<E>: Send + Sync {\n\n fn on_event(&self, event: &E);\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Listener<T: EventListener<E>, E> {\n\n event_listener: Arc<T>,\n\n _phantom: std::marker::PhantomData<E>,\n\n}\n\n\n\nimpl<T: EventListener<E>, E> Listener<T, E> {\n\n pub fn new(sel: T) -> Listener<T, E> {\n\n Listener {\n\n event_listener: Arc::new(sel),\n\n _phantom: std::marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: EventListener<E>, E> Deref for Listener<T, E> {\n", "file_path": "socrates-core/src/common/listener.rs", "rank": 18, "score": 34548.80918863656 }, { "content": "#[proc_macro_derive(Component, attributes(provide, custom_lifecycle))]\n\npub fn component(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n let mut references = Vec::new();\n\n if let syn::Data::Struct(ref struct_def) = input.data {\n\n for f in struct_def.fields.iter() {\n\n if let syn::Type::Path(ref path) = &f.ty {\n\n let p = &path.path;\n\n println!(\"base path {:?}\", f.ident);\n\n\n\n for seg in p.segments.iter() {\n\n println!(\"segment {:?}\", seg.ident.to_string());\n\n if let syn::PathArguments::AngleBracketed(ref type_params) = seg.arguments {\n\n for arg in type_params.args.iter() {\n\n if let syn::GenericArgument::Type(tpe) = arg {\n\n if let syn::Type::TraitObject(trait_obj) = tpe {\n\n for bound in trait_obj.bounds.iter() {\n\n if let syn::TypeParamBound::Trait(ref trt) = bound {\n\n for trt_seg in trt.path.segments.iter() {\n\n println!(\"trait {:?}\", trt_seg.ident.to_string());\n", "file_path": "socrates-macro/src/lib.rs", "rank": 19, "score": 34548.80918863656 }, { "content": "pub trait ServiceEventListener: EventListener<ServiceEvent> {}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum ServiceEvent {\n\n ServiceRegistered(ServiceRef),\n\n ServiceModified(ServiceRef),\n\n ServiceUnregistered(ServiceRef),\n\n}\n\n\n\nimpl ServiceEvent {\n\n pub fn get_service_ref(&self) -> &ServiceRef {\n\n match self {\n\n ServiceEvent::ServiceRegistered(ref rfe)\n\n | ServiceEvent::ServiceModified(ref rfe)\n\n | ServiceEvent::ServiceUnregistered(ref rfe) => rfe,\n\n }\n\n }\n\n}\n", "file_path": "socrates-core/src/service/event.rs", "rank": 20, "score": 34161.699703238715 }, { "content": "// Not a trait object\n\npub trait Component: Lifecycle + Sized + Send + Sync {\n\n fn get_definition() -> ComponentDefinition;\n\n fn instantiate(context: &ModuleContext, references: &ComponentReferences) -> Option<Self>;\n\n fn update(\n\n &self,\n\n field_id: usize,\n\n context: &ModuleContext,\n\n references: &ComponentReferences,\n\n ) -> Option<()>;\n\n}\n\n\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 21, "score": 33775.51981124911 }, { "content": "#[proc_macro_attribute]\n\npub fn service_trait(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let mut input: syn::ItemTrait = parse_macro_input!(item);\n\n\n\n let svc_trait_path: syn::Path = syn::parse_str(\"socrates::service::Service\").unwrap();\n\n\n\n let svc_trait_bound = syn::TraitBound {\n\n paren_token: None,\n\n modifier: syn::TraitBoundModifier::None,\n\n lifetimes: None,\n\n path: svc_trait_path,\n\n };\n\n\n\n input\n\n .supertraits\n\n .push(syn::TypeParamBound::Trait(svc_trait_bound));\n\n\n\n let trait_name = &input.ident;\n\n let trait_name_as_string = trait_name.to_string();\n\n\n\n let expanded = quote! {\n", "file_path": "socrates-macro/src/lib.rs", "rank": 22, "score": 30676.160894148994 }, { "content": "pub fn build<Source, Provided: Into<Source>, T: Factory<Source>>(from: Provided) -> Option<T> {\n\n T::build(from.into())\n\n}\n\n\n", "file_path": "socrates-core/src/component/factory.rs", "rank": 23, "score": 27375.47904488824 }, { "content": "type ComponentName = String;\n\npub struct ComponentManager {\n\n // config: String // TODO JSon\n\n components: HashMap<ComponentName, Box<dyn ComponentControllerT>>,\n\n}\n\nimpl ComponentManager {\n\n pub fn new() -> ComponentManager {\n\n ComponentManager {\n\n components: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn add_component<T: Component + 'static>(mut self) -> ComponentManager {\n\n let def = <T as Component>::get_definition();\n\n let component_name = def.name.clone();\n\n let f = <T as Component>::instantiate;\n\n let cc = ComponentController::new(def, f);\n\n self.components.insert(component_name, Box::new(cc));\n\n ComponentManager {\n\n components: self.components,\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 24, "score": 25720.397405277858 }, { "content": "use super::*;\n\n\n\npub type ActivateFn = fn(Context) -> Result<Box<dyn Activator>>;\n\n\n", "file_path": "socrates-core/src/module/activator.rs", "rank": 25, "score": 11.246774828859433 }, { "content": "// WIP experimenting\n\n// Playground for service components\n\n\n\npub use self::definition::*;\n\nuse super::common::*;\n\nuse super::module::Context as ModuleContext;\n\nuse super::module::*;\n\nuse super::service::*;\n\nuse std::ops::Deref;\n\n\n\npub mod definition;\n\npub mod factory;\n\n\n\npub struct Context {\n\n pub module_context: ModuleContext,\n\n}\n\n\n\n// Not a trait object\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 27, "score": 10.26781213223404 }, { "content": "use super::common::*;\n\nuse super::service::*;\n\nuse parking_lot::Mutex;\n\nuse std::any::TypeId;\n\nuse std::sync::{Arc, Weak};\n\n\n\nmod activator;\n\nmod container;\n\nmod context;\n\nmod dynamod;\n\n\n\npub use super::Result;\n\npub type DynamodId = u32;\n\npub use self::activator::ActivateFn;\n\npub use self::activator::Activator;\n\npub use self::container::Container;\n\npub use self::context::Context;\n\npub use self::dynamod::Dynamod;\n", "file_path": "socrates-core/src/module/mod.rs", "rank": 28, "score": 9.844808895491244 }, { "content": "pub use self::reference::ServiceCoreProps;\n\npub use self::reference::ServiceRanking;\n\npub use self::reference::ServiceRef;\n\npub use self::registration::ServiceRegistration;\n\npub use self::registry::ServiceRegistry;\n\npub use self::service::any_service_type_id;\n\npub use self::service::service_name;\n\npub use self::service::service_type_id;\n\npub use self::service::Named;\n\npub use self::service::Service;\n\npub use self::service::ServiceId;\n\npub use self::svc::Svc;\n", "file_path": "socrates-core/src/service/mod.rs", "rank": 29, "score": 9.48209495181265 }, { "content": "use super::super::service::query::ServiceQuery;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]\n\npub struct ComponentDefinition {\n\n pub name: String,\n\n pub provides: Vec<Provide>,\n\n pub references: Vec<Reference>,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]\n\npub struct Provide {\n\n pub name: String,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct Reference {\n\n pub name: String,\n\n pub svc_name: String, // This is just for human-readability\n\n pub svc_query: ServiceQuery,\n\n pub options: ReferenceOptions,\n", "file_path": "socrates-core/src/component/definition.rs", "rank": 30, "score": 9.31691824967627 }, { "content": "use parking_lot::RwLock;\n\nuse std::any::TypeId;\n\nuse std::sync::{Arc, Weak};\n\n\n\nmod event;\n\nmod manager;\n\npub mod query;\n\nmod reference;\n\nmod registration;\n\nmod registry;\n\nmod service;\n\nmod svc;\n\n\n\nuse super::common::*;\n\nuse super::module::*;\n\n\n\npub use self::event::ServiceEvent;\n\npub use self::event::ServiceEventListener;\n\npub use self::manager::ServiceManager;\n\npub use self::query::ServiceQuery;\n", "file_path": "socrates-core/src/service/mod.rs", "rank": 31, "score": 8.96718071730502 }, { "content": " pub fn register_service(\n\n &self,\n\n type_id: std::any::TypeId,\n\n svc_name: &str,\n\n svc_ranking: ServiceRanking,\n\n svc: Box<dyn Service>,\n\n ) -> Result<ServiceRegistration> {\n\n let svc_manager = self.use_manager_or_fail()?;\n\n\n\n let service_ref =\n\n svc_manager.register_service(type_id, svc_name, svc_ranking, self.dynamod_id, svc)?;\n\n\n\n let srv_reg = ServiceRegistration::new(service_ref, self.shared_service_manager());\n\n\n\n Ok(srv_reg)\n\n }\n\n\n\n pub fn register_service_typed<T: Service + Named + ?Sized>(\n\n &self,\n\n svc: Box<dyn Service>,\n", "file_path": "socrates-core/src/module/context.rs", "rank": 32, "score": 8.184611245911682 }, { "content": "use super::*;\n\n\n\npub struct Dynamod {\n\n pub id: DynamodId,\n\n pub path: String,\n\n activator: Option<Box<dyn Activator>>,\n\n svc_manager: Weak<ServiceManager>,\n\n lib: DynamodLib, // must be last to be dropped last\n\n}\n\n\n\nimpl Dynamod {\n\n pub fn new(\n\n id: DynamodId,\n\n svc_manager: Weak<ServiceManager>,\n\n path: &str,\n\n lib: libloading::Library,\n\n ) -> Dynamod {\n\n Dynamod {\n\n id,\n\n path: path.to_owned(),\n", "file_path": "socrates-core/src/module/dynamod.rs", "rank": 33, "score": 7.799207322330391 }, { "content": "use super::*;\n\n\n\npub type ServiceRanking = i32;\n\n\n\n/// Total ordering between two services\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct ServiceCoreProps {\n\n pub ranking: ServiceRanking,\n\n pub id: ServiceId,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct ServiceRef {\n\n pub core: ServiceCoreProps,\n\n pub name: String,\n\n pub type_id: TypeId,\n\n pub owner_id: DynamodId,\n\n}\n", "file_path": "socrates-core/src/service/reference.rs", "rank": 34, "score": 7.70866785642843 }, { "content": "use super::*;\n\n\n\nuse super::super::module::Context as ModuleContext;\n\nuse super::Context as ComponentContext;\n\n\n", "file_path": "socrates-core/src/component/factory.rs", "rank": 35, "score": 7.687994008662652 }, { "content": "\n\n // By Name\n\n pub fn get_services_id_by_name(&self, svc_name: &str) -> impl Iterator<Item = ServiceId> {\n\n self.registry.read().get_services_id_by_name(svc_name)\n\n }\n\n pub fn get_services_ref_by_name(\n\n &self,\n\n svc_name: &str,\n\n ) -> impl Iterator<Item = ServiceRef> + '_ {\n\n self.registry\n\n .read()\n\n .get_services_id_by_name(svc_name)\n\n .flat_map(move |svc_id| self.registry.read().get_service_ref(svc_id))\n\n }\n\n\n\n pub fn get_services_by_name(\n\n &self,\n\n svc_name: &str,\n\n user_id: DynamodId,\n\n ) -> impl Iterator<Item = (ServiceId, Weak<dyn Service>)> + '_ {\n", "file_path": "socrates-core/src/service/manager.rs", "rank": 36, "score": 7.261717928234164 }, { "content": "extern crate socrates_core;\n\n\n\npub use socrates_core::*;\n\npub mod common {\n\n pub use socrates_core::common::*;\n\n}\n\n\n\npub mod module {\n\n pub use socrates_core::module::*;\n\n}\n\npub mod service {\n\n pub use socrates_core::service::*;\n\n}\n\n\n\npub mod component {\n\n pub use socrates_core::component::*;\n\n}\n\n\n\nextern crate socrates_macro;\n\npub use socrates_macro::*;\n", "file_path": "socrates/src/lib.rs", "rank": 38, "score": 6.960550987016237 }, { "content": "pub mod listener;\n\n\n\npub use self::listener::EventListener;\n\npub use self::listener::Listener;\n\npub use self::listener::WeakListener;\n", "file_path": "socrates-core/src/common/mod.rs", "rank": 39, "score": 6.879480825937172 }, { "content": " self.registry\n\n .read()\n\n .get_services_id_by_name(svc_name)\n\n .flat_map(move |svc_id| {\n\n self.get_service_object(svc_id, user_id)\n\n .map(|svc_obj| (svc_id, svc_obj))\n\n })\n\n }\n\n\n\n pub fn remove_use(&self, svc_id: ServiceId, user_id: DynamodId) {\n\n self.registry.write().remove_use(svc_id, user_id);\n\n }\n\n}\n\n\n\nuse im::vector::Vector;\n\n\n\n#[derive(Default, Clone)]\n\npub struct ServiceListeners {\n\n listeners: Vector<WeakListener<ServiceEvent>>,\n\n}\n", "file_path": "socrates-core/src/service/manager.rs", "rank": 40, "score": 6.808631867671075 }, { "content": "use super::*;\n\n\n", "file_path": "socrates-core/src/service/event.rs", "rank": 41, "score": 6.765785078755799 }, { "content": "use super::*;\n\n\n\n#[derive(Clone)]\n\npub struct Context {\n\n dynamod_id: DynamodId,\n\n svc_manager: Weak<ServiceManager>,\n\n}\n\n\n\nimpl Context {\n\n pub fn new(dynamod_id: DynamodId, svc_manager: Weak<ServiceManager>) -> Context {\n\n Context {\n\n dynamod_id,\n\n svc_manager,\n\n }\n\n }\n\n\n\n pub fn use_manager_or_fail(&self) -> Result<Arc<ServiceManager>> {\n\n self.svc_manager\n\n .upgrade()\n\n .ok_or(\"Socrates container is down.\".into())\n", "file_path": "socrates-core/src/module/context.rs", "rank": 42, "score": 6.716761351363151 }, { "content": "// WIP experimenting\n\n// Playground for better service queries (filters), service tracking and multi-service tracking\n\nuse super::*;\n\n\n\n#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash)]\n\npub enum ServiceQuery<T: Service + ?Sized = dyn Service> {\n\n ServiceId(ServiceId),\n\n Name(String),\n\n TypeId(TypeQuery<T>), // TODO add combinators And, Or, Not and property matchers.\n\n}\n\n\n\n// Must be implemented manually to ignore the fact that !(T: Clone)\n\nimpl<T: Service + ?Sized> Clone for ServiceQuery<T> {\n\n fn clone(&self) -> ServiceQuery<T> {\n\n match self {\n\n ServiceQuery::ServiceId(id) => ServiceQuery::ServiceId(*id),\n\n ServiceQuery::Name(s) => ServiceQuery::Name(s.clone()),\n\n ServiceQuery::TypeId(tq) => ServiceQuery::TypeId(tq.clone()),\n\n }\n\n }\n", "file_path": "socrates-core/src/service/query.rs", "rank": 43, "score": 6.68173262896318 }, { "content": " }\n\n\n\n #[inline(always)]\n\n pub fn by_name(s: String) -> ServiceQuery {\n\n ServiceQuery::Name(s)\n\n }\n\n\n\n #[inline(always)]\n\n pub fn by_type_id(s: TypeId) -> ServiceQuery {\n\n let tq: TypeQuery = <TypeQuery<dyn Service>>::raw(s);\n\n ServiceQuery::TypeId(tq)\n\n }\n\n\n\n #[inline(always)]\n\n pub fn by_type<T: Service + ?Sized>() -> ServiceQuery<T> {\n\n ServiceQuery::TypeId(<TypeQuery<T>>::by_type::<T>())\n\n }\n\n\n\n pub fn matches(&self, e: &ServiceRef) -> bool {\n\n match self {\n\n ServiceQuery::ServiceId(id) => e.core.id == *id,\n\n ServiceQuery::Name(s) => e.name == *s,\n\n ServiceQuery::TypeId(tq) => e.type_id == tq.type_id,\n\n }\n\n }\n\n}\n", "file_path": "socrates-core/src/service/query.rs", "rank": 44, "score": 6.555472093423423 }, { "content": "use super::*;\n\n\n\npub struct Svc<T: Service + ?Sized = dyn Service> {\n\n // Options are implementation details -- must be built with values\n\n service: Option<Weak<T>>,\n\n svc_manager: Weak<ServiceManager>,\n\n service_id: ServiceId,\n\n user_id: DynamodId,\n\n}\n\n\n\nimpl<T: Service + ?Sized> Svc<T> {\n\n pub fn new(\n\n service: Weak<T>,\n\n service_id: ServiceId,\n\n user_id: DynamodId,\n\n svc_manager: Weak<ServiceManager>,\n\n ) -> Svc<T> {\n\n Svc {\n\n service: Some(service),\n\n svc_manager,\n", "file_path": "socrates-core/src/service/svc.rs", "rank": 45, "score": 6.4150872648259964 }, { "content": " // #[derive(Component)]\n\n struct GreetPrinter {\n\n greeter: Svc<dyn Greeter>,\n\n }\n\n impl GreetPrinter {\n\n pub fn print_greet(&self) {\n\n println!(\"{}\", self.greeter.greet(\"world\"));\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_foo() {\n\n let formal_greeter_def = ComponentDefinition {\n\n name: \"FormalGreeter\".into(),\n\n provides: vec![Provide {\n\n name: \"Greeter\".into(),\n\n }],\n\n ..Default::default()\n\n };\n\n\n", "file_path": "socrates-test/src/lib.rs", "rank": 46, "score": 6.412715761496447 }, { "content": "#[macro_use]\n\nextern crate socrates;\n\n\n\npub mod greet {\n\n use socrates::service::Service;\n\n use socrates::service_trait;\n\n\n\n #[derive(Clone, Debug)]\n\n pub enum Idiom {\n\n Formal,\n\n Regular,\n\n Slang,\n\n }\n\n\n\n #[derive(Clone, Debug)]\n\n pub struct GreetRequest {\n\n pub who: String,\n\n pub idiom: Idiom,\n\n }\n\n\n\n #[service_trait]\n", "file_path": "examples/example-api/src/lib.rs", "rank": 48, "score": 6.218950723452656 }, { "content": "use super::*;\n\n\n\n#[derive(Default)]\n\npub struct Container {\n\n svc_manager: Arc<ServiceManager>, // the \"master\" strong ref\n\n modules: Mutex<Vec<Dynamod>>,\n\n zombie_modules: Mutex<Vec<Dynamod>>,\n\n}\n\n\n\nimpl Container {\n\n pub fn new() -> Container {\n\n Default::default()\n\n }\n\n\n\n fn shared_service_manager(&self) -> Weak<ServiceManager> {\n\n Arc::downgrade(&self.svc_manager)\n\n }\n\n\n\n pub fn install(&self, path: &str) -> Result<()> {\n\n let mut mods = self.modules.lock();\n", "file_path": "socrates-core/src/module/container.rs", "rank": 49, "score": 6.063754660443962 }, { "content": " // Get by name\n\n\n\n pub fn get_first_service_id_by_name(&self, svc_name: &str) -> Option<ServiceId> {\n\n let svc_manager = self.try_manager()?;\n\n\n\n svc_manager.get_services_id_by_name(svc_name).next()\n\n }\n\n\n\n pub fn get_all_services_id_by_name(&self, svc_name: &str) -> Vec<ServiceId> {\n\n self.try_manager()\n\n .into_iter()\n\n .flat_map(|svc_manager| svc_manager.get_services_id_by_name(svc_name))\n\n .collect()\n\n }\n\n\n\n pub fn get_first_service_ref_by_name(&self, svc_name: &str) -> Option<ServiceRef> {\n\n let svc_manager = self.try_manager()?;\n\n\n\n let s = svc_manager.get_services_ref_by_name(&svc_name).next();\n\n\n", "file_path": "socrates-core/src/module/context.rs", "rank": 50, "score": 5.969030794002364 }, { "content": " }\n\n }\n\n\n\n // Register\n\n\n\n pub fn register_service(\n\n &self,\n\n svc_type_id: TypeId,\n\n svc_name: &str,\n\n svc_ranking: ServiceRanking,\n\n owner_id: DynamodId,\n\n svc: Box<dyn Service>,\n\n ) -> Result<ServiceRef> {\n\n let service_ref = self.registry.write().register_service(\n\n svc_type_id,\n\n svc_name,\n\n svc.into(),\n\n svc_ranking,\n\n owner_id,\n\n );\n", "file_path": "socrates-core/src/service/manager.rs", "rank": 51, "score": 5.965953589591819 }, { "content": " s\n\n }\n\n\n\n pub fn get_all_services_ref_by_name(&self, svc_name: &str) -> Vec<ServiceRef> {\n\n let mut s: Vec<ServiceRef> = Vec::new();\n\n\n\n for svc_manager in self.try_manager() {\n\n for x in svc_manager.get_services_ref_by_name(svc_name) {\n\n s.push(x);\n\n }\n\n }\n\n\n\n s\n\n }\n\n\n\n pub fn get_first_service_by_name(&self, svc_name: &str) -> Option<Svc> {\n\n let svc_manager = self.try_manager()?;\n\n\n\n let s = svc_manager\n\n .get_services_by_name(svc_name, self.dynamod_id)\n", "file_path": "socrates-core/src/module/context.rs", "rank": 52, "score": 5.813647349179233 }, { "content": " _ctx: socrates::module::Context,\n\n greeter: Svc<dyn Greeter>,\n\n maybe_greeter: Option<Svc<dyn Greeter>>,\n\n greeters: Vec<Svc<dyn Greeter>>,\n\n dyn_greeter: Mutex<Svc<dyn Greeter>>,\n\n dyn_maybe_greeter: Mutex<Option<Svc<dyn Greeter>>>,\n\n dyn_greeters: Mutex<Vec<Svc<dyn Greeter>>>,\n\n\n\n}\n\n\n\nimpl MyConsumer {\n\n // pub fn new(_ctx: Context, greeter: Svc<dyn Greeter>) -> MyConsumer {\n\n // MyConsumer { _ctx, greeter }\n\n // }\n\n pub fn do_it(&self, req: &GreetRequest) -> String {\n\n self.greeter.greet(req)\n\n }\n\n}\n\n\n\nimpl Lifecycle for MyConsumer {\n", "file_path": "examples/example-consumer/src/lib.rs", "rank": 53, "score": 5.7707807752846 }, { "content": "use super::*;\n\n\n\n#[derive(Default)]\n\npub struct ServiceManager {\n\n pub registry: RwLock<ServiceRegistry>,\n\n pub listeners: RwLock<ServiceListeners>,\n\n}\n\n\n\nimpl ServiceManager {\n\n pub fn register_listener(&self, listener: WeakListener<ServiceEvent>) {\n\n let mut listeners = self.listeners.write();\n\n\n\n listeners.insert_listener(listener);\n\n }\n\n\n\n pub fn unregister_service(&self, svc_id: ServiceId) {\n\n let mb_ref = self.registry.write().unregister_service(svc_id);\n\n\n\n if let Some(service_ref) = mb_ref {\n\n self.fire_event(&ServiceEvent::ServiceUnregistered(service_ref.clone()));\n", "file_path": "socrates-core/src/service/manager.rs", "rank": 54, "score": 5.670772839329551 }, { "content": "use super::*;\n\n\n\npub struct ServiceRegistration {\n\n pub svc_ref: ServiceRef,\n\n svc_manager: Weak<ServiceManager>,\n\n}\n\n\n\nimpl ServiceRegistration {\n\n pub fn new(svc_ref: ServiceRef, svc_manager: Weak<ServiceManager>) -> ServiceRegistration {\n\n ServiceRegistration {\n\n svc_ref,\n\n svc_manager,\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for ServiceRegistration {\n\n fn drop(&mut self) {\n\n if let Some(svc_manager) = self.svc_manager.upgrade() {\n\n svc_manager.unregister_service(self.svc_ref.core.id);\n\n }\n\n }\n\n}\n", "file_path": "socrates-core/src/service/registration.rs", "rank": 55, "score": 5.619798102985365 }, { "content": "use query_interface::{mopo, Object};\n\n\n\npub type ServiceId = u32;\n\n\n", "file_path": "socrates-core/src/service/service.rs", "rank": 56, "score": 5.434270918704012 }, { "content": " options: socrates::component::definition::ReferenceOptions {\n\n cardinality: #card,\n\n policy: #pol,\n\n policy_option: #pol_opt\n\n }\n\n }\n\n });\n\n }\n\n\n\n let service_trait = if provided.is_empty() {\n\n None\n\n } else {\n\n Some(quote! {\n\n use socrates::service::Service;\n\n impl Service for #struct_name {}\n\n\n\n #[macro_use]\n\n extern crate query_interface;\n\n interfaces!(#struct_name: #(#provided),*);\n\n\n", "file_path": "socrates-macro/src/lib.rs", "rank": 57, "score": 5.420155161837776 }, { "content": "\n\n self.get_all_services_by_type_id_typed(svc_type_id)\n\n }\n\n\n\n pub fn get_first_service_by_name_typed<T: Service + ?Sized>(\n\n &self,\n\n svc_name: &str,\n\n ) -> Option<Svc<T>> {\n\n self.get_first_service_by_name(svc_name)\n\n .and_then(|svc| Svc::cast::<T>(svc).ok())\n\n }\n\n\n\n pub fn get_all_services_by_name_typed<T: Service + ?Sized>(\n\n &self,\n\n svc_name: &str,\n\n ) -> Vec<Svc<T>> {\n\n let mut s: Vec<Svc<T>> = Vec::new();\n\n\n\n for svc_manager in self.try_manager() {\n\n for x in svc_manager.get_services_by_name(svc_name, self.dynamod_id) {\n", "file_path": "socrates-core/src/module/context.rs", "rank": 58, "score": 5.417527122682838 }, { "content": " .map(|x| Svc::new(x.1, x.0, self.dynamod_id, self.shared_service_manager()))\n\n .next();\n\n\n\n s\n\n }\n\n\n\n pub fn get_all_services_by_name(&self, svc_name: &str) -> Vec<Svc> {\n\n let mut s: Vec<Svc> = Vec::new();\n\n\n\n for svc_manager in self.try_manager() {\n\n for x in svc_manager.get_services_by_name(svc_name, self.dynamod_id) {\n\n s.push(Svc::new(\n\n x.1,\n\n x.0,\n\n self.dynamod_id,\n\n self.shared_service_manager(),\n\n ));\n\n }\n\n }\n\n\n", "file_path": "socrates-core/src/module/context.rs", "rank": 61, "score": 4.832561850647864 }, { "content": " let id = (*mods).len() as u32;\n\n let lib = libloading::Library::new(path)?;\n\n\n\n let dyn_mod = Dynamod::new(id, self.shared_service_manager(), path, lib);\n\n mods.push(dyn_mod);\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn uninstall(&self, idx: DynamodId) -> Result<()> {\n\n let mut mods = self.modules.lock();\n\n let dyn_mod = mods.remove(idx as usize);\n\n\n\n {\n\n let mut zombie_mods = self.zombie_modules.lock();\n\n let zm = dyn_mod.zombify();\n\n\n\n zombie_mods.push(zm);\n\n }\n\n Ok(())\n", "file_path": "socrates-core/src/module/container.rs", "rank": 62, "score": 4.589320780709458 }, { "content": " _consumer: MyConsumer,\n\n}\n\n\n\nimpl MyActivator {\n\n pub fn new(_consumer: MyConsumer) -> MyActivator {\n\n MyActivator { _consumer }\n\n }\n\n}\n\nimpl Activator for MyActivator {}\n\nimpl Drop for MyActivator {\n\n fn drop(&mut self) {\n\n println!(\"I'm stopped (consumer)\");\n\n }\n\n}\n\n\n\nuse parking_lot::Mutex;\n\n\n\n#[derive(Component)]\n\n#[custom_lifecycle]\n\npub struct MyConsumer {\n", "file_path": "examples/example-consumer/src/lib.rs", "rank": 63, "score": 4.505228752445168 }, { "content": "}\n\n\n\nimpl EventListener<ServiceEvent> for ComponentManager {\n\n fn on_event(&self, event: &ServiceEvent) {\n\n for (_, cc) in self.components.iter() {\n\n cc.on_service_event(&event);\n\n }\n\n }\n\n}\n\n\n\nuse parking_lot::RwLock;\n\nuse std::sync::Arc;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ComponentReferences {\n\n inner: HashMap<Arc<str>, im::OrdSet<ServiceCoreProps>>,\n\n}\n\nimpl ComponentReferences {\n\n pub fn new() -> ComponentReferences {\n\n ComponentReferences {\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 64, "score": 4.436970263211515 }, { "content": " ServiceQuery::Name(s) => self.get_all_services_by_name_typed(&s),\n\n ServiceQuery::TypeId(tid) => self.get_all_services_by_type_id_typed(tid.type_id),\n\n }\n\n }\n\n\n\n pub fn get_first_service_ref_by_query<T: Service + ?Sized>(\n\n &self,\n\n query: &ServiceQuery<T>,\n\n ) -> Option<ServiceRef> {\n\n match query {\n\n ServiceQuery::ServiceId(id) => self.get_service_ref(*id),\n\n ServiceQuery::Name(s) => self.get_first_service_ref_by_name(&s),\n\n ServiceQuery::TypeId(tid) => self.get_first_service_ref_by_type_id(tid.type_id),\n\n }\n\n }\n\n\n\n pub fn get_all_services_ref_by_query<T: Service + ?Sized>(\n\n &self,\n\n query: &ServiceQuery<T>,\n\n ) -> Vec<ServiceRef> {\n", "file_path": "socrates-core/src/module/context.rs", "rank": 65, "score": 4.429206720412933 }, { "content": " ) -> Result<ServiceRegistration> {\n\n let svc_type_id = service_type_id::<T>();\n\n let svc_name = service_name::<T>();\n\n self.register_service(svc_type_id, &svc_name, Default::default(), svc)\n\n }\n\n\n\n // Get by service_id\n\n pub fn get_service_ref(&self, svc_id: ServiceId) -> Option<ServiceRef> {\n\n let svc_manager = self.try_manager()?;\n\n\n\n svc_manager.get_service_ref(svc_id)\n\n }\n\n\n\n pub fn get_service(&self, svc_id: ServiceId) -> Option<Svc> {\n\n let svc_manager = self.try_manager()?;\n\n\n\n svc_manager\n\n .get_service_object(svc_id, self.dynamod_id)\n\n .map(|x| Svc::new(x, svc_id, self.dynamod_id, self.shared_service_manager()))\n\n }\n", "file_path": "socrates-core/src/module/context.rs", "rank": 68, "score": 4.158650840814619 }, { "content": " type Target = T;\n\n\n\n #[inline(always)]\n\n fn deref(&self) -> &T {\n\n &self.event_listener.deref()\n\n }\n\n}\n\n\n\nimpl<T: EventListener<E> + 'static, E> Listener<T, E> {\n\n pub fn weaken(&self) -> WeakListener<E> {\n\n let w = Arc::downgrade(&self.event_listener);\n\n WeakListener { event_listener: w }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct WeakListener<E> {\n\n event_listener: Weak<dyn EventListener<E>>,\n\n}\n\n\n", "file_path": "socrates-core/src/common/listener.rs", "rank": 69, "score": 4.031480161881584 }, { "content": " pub fn raw(type_id: TypeId) -> TypeQuery<dyn Service> {\n\n TypeQuery {\n\n type_id,\n\n _phantom: std::marker::PhantomData,\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n pub fn by_type<U: Service + ?Sized>() -> TypeQuery<U> {\n\n TypeQuery {\n\n type_id: service_type_id::<U>(),\n\n _phantom: std::marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl ServiceQuery {\n\n #[inline(always)]\n\n pub fn by_service_id(id: ServiceId) -> ServiceQuery {\n\n ServiceQuery::ServiceId(id)\n", "file_path": "socrates-core/src/service/query.rs", "rank": 70, "score": 4.025274506319727 }, { "content": "impl Service for SimpleGreeter {}\n\n#[macro_use]\n\nextern crate query_interface;\n\ninterfaces!(SimpleGreeter: Greeter);\n\nimpl socrates::component::Component for SimpleGreeter {\n\n fn get_definition() -> socrates::component::ComponentDefinition {\n\n socrates::component::ComponentDefinition {\n\n name: \"SimpleGreeter\".to_string(),\n\n provides: vec![socrates::component::definition::Provide {\n\n name: socrates::service::Service::get_name::<Greeter>().to_string(),\n\n }],\n\n references: vec![],\n\n }\n\n }\n\n fn instantiate(\n\n ctx: &socrates::module::Context,\n\n references: &socrates::component::ComponentReferences,\n\n ) -> Option<SimpleGreeter> {\n\n println!(\"Instanciating me, {}\", \"SimpleGreeter\");\n\n Some(SimpleGreeter)\n", "file_path": "examples/example-provider/src/lib.rs", "rank": 71, "score": 3.9684214353222322 }, { "content": " }\n\n\n\n #[inline(always)]\n\n pub fn try_manager(&self) -> Option<Arc<ServiceManager>> {\n\n self.svc_manager.upgrade()\n\n }\n\n\n\n pub fn register_listener<T: EventListener<ServiceEvent> + 'static>(\n\n &self,\n\n listener: Listener<T, ServiceEvent>,\n\n ) -> Result<Listener<T, ServiceEvent>> {\n\n let svc_manager = self.use_manager_or_fail()?;\n\n\n\n svc_manager.register_listener(listener.weaken());\n\n\n\n Ok(listener)\n\n }\n\n\n\n // Register service\n\n\n", "file_path": "socrates-core/src/module/context.rs", "rank": 72, "score": 3.9579614139099877 }, { "content": " s\n\n }\n\n\n\n pub fn get_first_service_by_query<T: Service + ?Sized>(\n\n &self,\n\n query: &ServiceQuery<T>,\n\n ) -> Option<Svc<T>> {\n\n match query {\n\n ServiceQuery::ServiceId(id) => self.get_service_by_id_typed(*id),\n\n ServiceQuery::Name(s) => self.get_first_service_by_name_typed(&s),\n\n ServiceQuery::TypeId(tid) => self.get_first_service_by_type_id_typed(tid.type_id),\n\n }\n\n }\n\n\n\n pub fn get_all_services_by_query<T: Service + ?Sized>(\n\n &self,\n\n query: &ServiceQuery<T>,\n\n ) -> Vec<Svc<T>> {\n\n match query {\n\n ServiceQuery::ServiceId(id) => self.get_service_by_id_typed(*id).into_iter().collect(),\n", "file_path": "socrates-core/src/module/context.rs", "rank": 73, "score": 3.800230795727746 }, { "content": " #[inline(always)]\n\n pub fn get_service_object(\n\n &self,\n\n svc_id: ServiceId,\n\n user_id: DynamodId,\n\n ) -> Option<Weak<dyn Service>> {\n\n self.registry.write().get_service_object(svc_id, user_id)\n\n }\n\n\n\n // By TypeId\n\n pub fn get_services_id_by_type_id(\n\n &self,\n\n svc_type_id: TypeId,\n\n ) -> impl Iterator<Item = ServiceId> {\n\n self.registry.read().get_services_id_by_type_id(svc_type_id)\n\n }\n\n\n\n pub fn get_services_ref_by_type_id(\n\n &self,\n\n svc_type_id: TypeId,\n", "file_path": "socrates-core/src/service/manager.rs", "rank": 74, "score": 3.7763022420962953 }, { "content": "#[macro_use]\n\nextern crate socrates_macro;\n\n\n\nuse socrates::component::*;\n\nuse socrates::module::{Activator, Context};\n\nuse socrates::service::ServiceRegistration;\n\nuse socrates::Result;\n\n\n\n#[no_mangle]\n", "file_path": "examples/example-provider/src/lib.rs", "rank": 75, "score": 3.7567911554193283 }, { "content": "#[macro_use]\n\nextern crate socrates_macro;\n\n\n\nuse socrates::component::*;\n\nuse socrates::module::{Activator, Context};\n\nuse socrates::service::Svc;\n\n\n\nuse socrates::Result;\n\n\n\nuse example_api::greet::{GreetRequest, Greeter, Idiom};\n\n\n\n#[no_mangle]\n", "file_path": "examples/example-consumer/src/lib.rs", "rank": 76, "score": 3.7492625158293094 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use socrates::component::definition::*;\n\n use socrates::component::*;\n\n use socrates::service::*;\n\n use socrates::service_trait;\n\n\n\n use query_interface::*;\n\n\n\n #[service_trait]\n", "file_path": "socrates-test/src/lib.rs", "rank": 77, "score": 3.738772972419715 }, { "content": " }\n\n\n\n let struct_name = &input.ident;\n\n let struct_name_as_string = struct_name.to_string();\n\n\n\n let mut quoted_provides = Vec::new();\n\n for prov in provided.iter() {\n\n let prov_name = syn::parse_str::<syn::Expr>(&prov.to_string()).ok();;\n\n quoted_provides.push(quote! {\n\n socrates::component::definition::Provide {\n\n name: socrates::service::Service::get_name::<#prov_name>().to_string(),\n\n }\n\n });\n\n }\n\n\n\n let mut quoted_references = Vec::new();\n\n for rfe in references.iter() {\n\n let rfe_name = rfe.name.to_string();\n\n let rfe_svc_name = syn::parse_str::<syn::Expr>(&rfe.unqualified_svc_name.to_string()).ok();\n\n\n", "file_path": "socrates-macro/src/lib.rs", "rank": 78, "score": 3.7205132488881034 }, { "content": " #input\n\n\n\n impl socrates::service::Named for #trait_name {\n\n fn type_name() -> &'static str {\n\n concat!(module_path!(), \"::\", #trait_name_as_string)\n\n }\n\n }\n\n };\n\n\n\n let r: TokenStream = expanded.into();\n\n println!(\"{}\", r.to_string());\n\n r\n\n}\n", "file_path": "socrates-macro/src/lib.rs", "rank": 79, "score": 3.685797716040574 }, { "content": "use socrates::common::{EventListener, Listener};\n\nuse socrates::module::Container;\n\nuse socrates::service::ServiceEvent;\n\nuse socrates::Result;\n\n#[derive(Clone)]\n", "file_path": "socrates-launcher/src/main.rs", "rank": 80, "score": 3.6792172967528525 }, { "content": "\n\n #[inline(always)]\n\n fn deref(&self) -> &T {\n\n self.as_ref()\n\n }\n\n}\n\n\n\nuse std::convert::AsRef;\n\nimpl<T: Service + ?Sized> AsRef<T> for Svc<T> {\n\n #[inline(always)]\n\n fn as_ref(&self) -> &T {\n\n // This works as long as the framework holds a strong ref on our service\n\n // which is guaranteed by Svc's ref counting mechanism.\n\n // We use only Weak refs to allow dynamic cycles between services.\n\n let rc = self.service.as_ref().unwrap().upgrade().unwrap();\n\n unsafe { std::mem::transmute(rc.as_ref()) }\n\n }\n\n}\n\n\n\nimpl<T: Service + ?Sized> Drop for Svc<T> {\n\n fn drop(&mut self) {\n\n // Could be none if panic during Svc<dyn Service>::cast\n\n if let Some(ref svc_manager) = self.svc_manager.upgrade() {\n\n svc_manager.remove_use(self.service_id, self.user_id);\n\n }\n\n }\n\n}\n", "file_path": "socrates-core/src/service/svc.rs", "rank": 81, "score": 3.66531849074252 }, { "content": "\n\n fn is_satisfied(&self) -> bool {\n\n let references = self.references.read().clone();\n\n let mut satisfied = true;\n\n for ref rfe in self.definition.references.iter() {\n\n if rfe.options.cardinality == Cardinality::Mandatory\n\n && references\n\n .get(&rfe.name as &str)\n\n .and_then(|s| s.iter().next())\n\n .is_none()\n\n {\n\n satisfied = false;\n\n break;\n\n }\n\n }\n\n satisfied\n\n }\n\n\n\n pub fn query_registry(&self) {\n\n let context = self.context.as_ref().unwrap();\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 82, "score": 3.561294128930492 }, { "content": "use std::ops::Deref;\n\nuse std::sync::{Arc, Weak};\n\n\n", "file_path": "socrates-core/src/common/listener.rs", "rank": 84, "score": 3.4839515742994887 }, { "content": " let greet_printer_def = ComponentDefinition {\n\n name: \"GreetPrinter\".into(),\n\n references: vec![Reference {\n\n name: \"Greeter\".into(),\n\n svc_name: <Greeter as Named>::type_name().into(),\n\n svc_query: ServiceQuery::by_type_id(Service::type_id::<Greeter>()),\n\n options: Default::default(),\n\n }],\n\n ..Default::default()\n\n };\n\n\n\n println!(\"{:?}\", formal_greeter_def);\n\n println!(\"{:?}\", greet_printer_def);\n\n }\n\n\n\n}\n", "file_path": "socrates-test/src/lib.rs", "rank": 85, "score": 3.483528329184689 }, { "content": "\n\n// println!(\"Instanciating me, {}\", \"MyConsumer\");\n\n// let _ctx = socrates::component::factory::build(ctx)?;\n\n// let greeter = socrates::component::factory::build(ctx)?;\n\n// let maybe_greeter = socrates::component::factory::build(ctx)?;\n\n// let greeters = socrates::component::factory::build(ctx)?;\n\n\n\n// let dyn_greeter = socrates::component::factory::build(ctx)?;\n\n// let dyn_maybe_greeter = socrates::component::factory::build(ctx)?;\n\n// let dyn_greeters = socrates::component::factory::build(ctx)?;\n\n\n\n// Some(MyConsumer {\n\n// _ctx,\n\n// greeter,\n\n// maybe_greeter,\n\n// greeters,\n\n\n\n// dyn_greeter,\n\n// dyn_maybe_greeter,\n\n// dyn_greeters\n", "file_path": "examples/example-consumer/src/lib.rs", "rank": 86, "score": 3.4404291987583395 }, { "content": "}\n\n\n\n#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash)]\n\npub struct TypeQuery<T: Service + ?Sized = dyn Service> {\n\n pub type_id: TypeId,\n\n _phantom: std::marker::PhantomData<T>,\n\n}\n\n\n\n// Must be implemented manually to ignore the fact that !(T: Clone)\n\nimpl<T: Service + ?Sized> Clone for TypeQuery<T> {\n\n fn clone(&self) -> TypeQuery<T> {\n\n TypeQuery {\n\n type_id: self.type_id,\n\n _phantom: std::marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Service + ?Sized> TypeQuery<T> {\n\n #[inline(always)]\n", "file_path": "socrates-core/src/service/query.rs", "rank": 87, "score": 3.425896544048006 }, { "content": " service_id,\n\n user_id,\n\n }\n\n }\n\n}\n\n\n\nimpl Svc<dyn Service> {\n\n pub fn cast<U: Service + ?Sized>(mut self_: Self) -> std::result::Result<Svc<U>, Self> {\n\n let weak_srv = std::mem::replace(&mut self_.service, None);\n\n // the Arc reference is strongly held by the framework and cannot be none.\n\n match weak_srv\n\n .as_ref()\n\n .and_then(|x| x.upgrade())\n\n .and_then(|srv| Service::query_arc::<U>(srv).ok())\n\n {\n\n Some(srv) => {\n\n // note, transmute Svc<dyn Service> -> Svc<U> is not allowed\n\n // because rustc doesn't know they have the same size.\n\n\n\n let mgr = std::mem::replace(&mut self_.svc_manager, Weak::new());\n", "file_path": "socrates-core/src/service/svc.rs", "rank": 88, "score": 3.3997057668049346 }, { "content": "// WIP experimenting\n\n\n\n//#![feature(extern_crate_item_prelude)]\n\n//#![feature(custom_attribute)]\n\n// The `quote!` macro requires deep recursion.\n\n#![recursion_limit = \"512\"]\n\n\n\nextern crate proc_macro;\n\n\n\n#[macro_use]\n\nextern crate syn;\n\n#[macro_use]\n\nextern crate quote;\n\n\n\nuse syn::DeriveInput;\n\n\n\nuse proc_macro::TokenStream;\n\n\n\nuse socrates_core::component::*;\n\n\n", "file_path": "socrates-macro/src/lib.rs", "rank": 89, "score": 3.396663027230983 }, { "content": " })\n\n };\n\n\n\n let lifecycle_trait = if implement_lifecycle {\n\n Some(quote! {\n\n impl socrates::component::Lifecycle for #struct_name {\n\n\n\n }\n\n })\n\n } else {\n\n None\n\n };\n\n\n\n let expanded = quote! {\n\n #service_trait\n\n impl socrates::component::Component for #struct_name {\n\n fn get_definition() -> socrates::component::ComponentDefinition {\n\n socrates::component::ComponentDefinition {\n\n name: #struct_name_as_string.to_string(),\n\n provides: vec![ #(#quoted_provides),*],\n", "file_path": "socrates-macro/src/lib.rs", "rank": 90, "score": 3.3338382524398824 }, { "content": " if ident == \"provide\" {\n\n for nested in l.nested.iter() {\n\n if let syn::NestedMeta::Meta(syn::Meta::Word(svc_name)) = nested {\n\n provides.push(Provide {\n\n name: svc_name.to_string(),\n\n });\n\n provided.push(svc_name.clone());\n\n }\n\n }\n\n }\n\n }\n\n syn::Meta::Word(ident) => {\n\n let ident = ident.to_string();\n\n if ident == \"custom_lifecycle\" {\n\n implement_lifecycle = false;\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n", "file_path": "socrates-macro/src/lib.rs", "rank": 91, "score": 3.2988373259050805 }, { "content": " self.query_registry()\n\n }\n\n fn on_service_event(&self, event: &ServiceEvent) {\n\n self.on_service_event(event)\n\n }\n\n\n\n fn print_status(&self) {\n\n self.print_status();\n\n }\n\n}\n\n\n\npub struct ComponentInstance<T: Component> {\n\n registration: Option<ServiceRegistration>,\n\n component: T, //object: Svc<dyn Service>\n\n}\n\n\n\nimpl<T: Component> ComponentInstance<T> {\n\n fn new(registration: Option<ServiceRegistration>, component: T) -> ComponentInstance<T> {\n\n ComponentInstance {\n\n registration,\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 92, "score": 3.273742829846466 }, { "content": " let card = syn::parse_str::<syn::Path>(&format!(\n\n \"socrates::component::definition::Cardinality::{:?}\",\n\n rfe.cardinality\n\n ))\n\n .ok();\n\n let pol = syn::parse_str::<syn::Path>(&format!(\n\n \"socrates::component::definition::Policy::{:?}\",\n\n rfe.policy\n\n ))\n\n .ok();\n\n let pol_opt = syn::parse_str::<syn::Path>(&format!(\n\n \"socrates::component::definition::PolicyOption::{:?}\",\n\n rfe.policy_option\n\n ))\n\n .ok();\n\n quoted_references.push(quote! {\n\n socrates::component::definition::Reference {\n\n name: #rfe_name.to_string(),\n\n svc_name: socrates::service::Service::get_name::<#rfe_svc_name>().into(),\n\n svc_query: socrates::service::query::ServiceQuery::by_type_id(socrates::service::Service::type_id::<#rfe_svc_name>()),\n", "file_path": "socrates-macro/src/lib.rs", "rank": 93, "score": 3.1059069715893797 }, { "content": "pub mod common;\n\npub mod component;\n\npub mod module;\n\npub mod service;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n StrError(String),\n\n IoError(std::io::Error),\n\n}\n\n\n\nimpl From<&str> for Error {\n\n fn from(error: &str) -> Self {\n\n Error::StrError(error.into())\n\n }\n\n}\n\n\n\nimpl From<String> for Error {\n\n fn from(error: String) -> Self {\n\n Error::StrError(error)\n", "file_path": "socrates-core/src/lib.rs", "rank": 94, "score": 3.069167004802332 }, { "content": "}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]\n\npub struct ReferenceOptions {\n\n pub cardinality: Cardinality,\n\n pub policy: Policy,\n\n pub policy_option: PolicyOption,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum Cardinality {\n\n Optional,\n\n Mandatory,\n\n Multiple,\n\n}\n\n\n\nimpl Default for Cardinality {\n\n fn default() -> Cardinality {\n\n Cardinality::Mandatory\n\n }\n", "file_path": "socrates-core/src/component/definition.rs", "rank": 95, "score": 3.003446083928621 }, { "content": " self.track_change(|references| {\n\n let mut changed = false;\n\n for ref rfe in self.definition.references.iter() {\n\n for service_ref in context.get_all_services_ref_by_query(&rfe.svc_query) {\n\n changed = true;\n\n let entry = references\n\n .inner\n\n .entry(rfe.name.clone().into())\n\n .or_insert(im::OrdSet::new());\n\n entry.insert(service_ref.core.into());\n\n }\n\n }\n\n changed\n\n });\n\n }\n\n\n\n pub fn on_service_event(&self, event: &ServiceEvent) {\n\n self.track_change(|references| {\n\n let mut changed = false;\n\n for ref rfe in self.definition.references.iter() {\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 97, "score": 2.8731213503931987 }, { "content": "## Roadmap\n\n\n\n- [x] Basic container\n\n- [x] Dynamic services\n\n- [x] Small demo\n\n- [ ] More tests! \n\n- [ ] More documentation!\n\n- [ ] More refactoring \n\n- [ ] Lifecycle state, etc\n\n- [ ] Configuration management (using JSON + serde on custom structs?)\n\n- [ ] More service properties\n\n - [x] service ranking/ordering supported\n\n- [ ] Lazy/Factories and Prototype services \n\n- [ ] Manifests for shared objects\n\n- [ ] Stop requiring `#[no_mangle]`, define a real interface for instantiation (activators, service components)\n\n- [ ] Compatibility resolution and inspection at install\n\n- [ ] Lazy loading of shared objects (by get_service)\n\n- [ ] programmable, extensible Service Component Framework\n\n * dependency injection\n\n * (a better replacement than ServiceTrackers)\n\n * plugin any kind of events (not only service events & configuration)\n\n * should work also for any Rust application without dynamic loading.\n\n- [ ] Declarative bindings for the component framework (using macros or derive)\n\n- [ ] APIs for everything\n\n * Use it to build hot reload + watcher?\n\n- [ ] More non-blocking stuff (integrate with futures-rs)\n\n - [ ] e.g event dispatch, activate / deactivate methods \n\n - [ ] reusable base services, e.g a tokio core event loop running on futures provided by services\n\n- [ ] A blog post/series?\n\n\n\n\n\n## License\n\n\n\nApache Software License 2.0\n\n\n\n## Credits\n\n\n\nBuilt on top of the libloading and query_interface crates. Thanks!\n\nThanks to everyone helping on IRC, and especially mbrubeck & talchas :-)\n\n\n", "file_path": "README.md", "rank": 98, "score": 2.7587342858706556 }, { "content": "\n\n active_manager.query_registry();\n\n\n\n Ok(ComponentManagerHandler {\n\n manager: active_manager,\n\n })\n\n }\n\n\n\n pub fn get_manager(&self) -> &ComponentManager {\n\n self.manager.deref()\n\n }\n\n\n\n pub fn boxed(self) -> Box<Self> {\n\n Box::new(self)\n\n }\n\n}\n", "file_path": "socrates-core/src/component/mod.rs", "rank": 99, "score": 2.684419753482614 } ]
Rust
src/gf/gf_num.rs
irreducible-polynoms/irrpoly-rust
b4d5e023fb02ddf32e4fd56b417a5cc89b2295f2
use crate::Gf; use std::vec::Vec; use std::fmt; use std::ops; use std::cmp; #[derive(Debug, Clone)] pub struct GfNum { field: Gf, num: usize, } impl fmt::Display for GfNum { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.num) } } impl GfNum { pub fn new(field: &Gf, value: usize) -> GfNum { let field = Gf::clone(field); let value = value % field.base(); GfNum {field, num: value } } pub fn from_vec(field: &Gf, value: Vec<usize>) -> Vec<GfNum> { value.iter().map(|x| GfNum::new(field, *x) ).collect() } pub fn into_num(self) -> usize { self.num } pub fn field(&self) -> &Gf { &self.field } pub fn num(&self) -> usize { self.num } pub fn mul_inv(&self) -> Self { let field = Gf::clone(self.field()); let value = field.mul_inv(self.num) .expect("Multiplicative inverse for zero do not exist"); GfNum {field, num: value } } pub fn is_zero(&self) -> bool { self.num == 0 } } impl ops::Add<GfNum> for GfNum { type Output = GfNum; fn add(self, rhs: GfNum) -> GfNum { debug_assert_eq!(self.field(), rhs.field(), "Numbers from different fields"); let base = self.field().base(); GfNum { field: self.field, num: (self.num + rhs.num) % base } } } impl ops::Add<usize> for GfNum { type Output = GfNum; fn add(self, rhs: usize) -> GfNum { let base = self.field().base(); GfNum { field: self.field, num: (self.num + (rhs % base)) % base } } } impl ops::Add<GfNum> for usize { type Output = GfNum; fn add(self, rhs: GfNum) -> GfNum { let base = rhs.field().base(); GfNum { field: rhs.field, num: (rhs.num + (self % base)) % base } } } impl ops::AddAssign<GfNum> for GfNum { fn add_assign(&mut self, other: GfNum) { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); let base = self.field().base(); self.num = (self.num + other.num) % base; } } impl ops::AddAssign<usize> for GfNum { fn add_assign(&mut self, other: usize) { let base = self.field().base(); self.num = (self.num + (other % base)) % base; } } impl ops::Sub<GfNum> for GfNum { type Output = GfNum; fn sub(self, rhs: GfNum) -> GfNum { debug_assert_eq!(self.field(), rhs.field(), "Numbers from different fields"); let base = self.field().base(); GfNum { field: self.field, num: (base + self.num - rhs.num) % base } } } impl ops::Sub<usize> for GfNum { type Output = GfNum; fn sub(self, rhs: usize) -> GfNum { let base = self.field().base(); GfNum { field: self.field, num: (base + self.num - (rhs % base)) % base } } } impl ops::Sub<GfNum> for usize { type Output = GfNum; fn sub(self, rhs: GfNum) -> GfNum { let base = rhs.field().base(); GfNum { field: rhs.field, num: (base + rhs.num - (self % base)) % base } } } impl ops::SubAssign<GfNum> for GfNum { fn sub_assign(&mut self, other: GfNum) { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); let base = self.field().base(); self.num = (base + self.num - other.num) % base; } } impl ops::SubAssign<usize> for GfNum { fn sub_assign(&mut self, other: usize) { let base = self.field().base(); self.num = (base + self.num - (other % base)) % base; } } impl ops::Neg for GfNum { type Output = GfNum; fn neg(self) -> GfNum { let base = self.field().base(); GfNum { field: self.field, num: base - self.num } } } impl ops::Mul<GfNum> for GfNum { type Output = GfNum; fn mul(self, rhs: GfNum) -> GfNum { debug_assert_eq!(self.field(), rhs.field(), "Numbers from different fields"); let base = self.field().base(); GfNum { field: self.field, num: (self.num * rhs.num) % base } } } impl ops::Mul<usize> for GfNum { type Output = GfNum; fn mul(self, rhs: usize) -> GfNum { let base = self.field().base(); GfNum { field: self.field, num: (self.num * (rhs % base)) % base } } } impl ops::Mul<GfNum> for usize { type Output = GfNum; fn mul(self, rhs: GfNum) -> GfNum { let base = rhs.field().base(); GfNum { field: rhs.field, num: (rhs.num * (self % base)) % base } } } impl ops::MulAssign<GfNum> for GfNum { fn mul_assign(&mut self, other: GfNum) { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); let base = self.field().base(); self.num = (self.num * other.num) % base; } } impl ops::MulAssign<usize> for GfNum { fn mul_assign(&mut self, other: usize) { let base = self.field().base(); self.num = (self.num * (other % base)) % base; } } impl ops::Div<GfNum> for GfNum { type Output = GfNum; fn div(self, rhs: GfNum) -> GfNum { debug_assert_eq!(self.field(), rhs.field(), "Numbers from different fields"); let base = self.field().base(); let inv = self.field().mul_inv(rhs.num).unwrap(); GfNum { field: self.field, num: (self.num * inv) % base } } } impl ops::Div<usize> for GfNum { type Output = GfNum; fn div(self, rhs: usize) -> GfNum { let base = self.field().base(); let inv = self.field().mul_inv(rhs).unwrap(); GfNum { field: self.field, num: (self.num * inv) % base } } } impl ops::Div<GfNum> for usize { type Output = GfNum; fn div(self, rhs: GfNum) -> GfNum { let base = rhs.field().base(); let inv = rhs.field().mul_inv(rhs.num).unwrap(); GfNum { field: rhs.field, num: (self * inv) % base } } } impl ops::DivAssign<GfNum> for GfNum { fn div_assign(&mut self, other: GfNum) { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); let base = self.field().base(); let inv = self.field().mul_inv(other.num).unwrap(); self.num = (self.num * inv) % base; } } impl ops::DivAssign<usize> for GfNum { fn div_assign(&mut self, other: usize) { let base = self.field().base(); let inv = self.field().mul_inv(other).unwrap(); self.num = (self.num * inv) % base; } } impl cmp::PartialEq<GfNum> for GfNum { fn eq (&self, other: &GfNum) -> bool { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); self.num() == other.num() } } impl cmp::PartialEq<usize> for GfNum { fn eq (&self, other: &usize) -> bool { let base = self.field().base(); self.num() == other % base } } impl cmp::PartialEq<GfNum> for usize { fn eq (&self, other: &GfNum) -> bool { let base = other.field().base(); self % base == other.num() } } impl cmp::Eq for GfNum {} impl cmp::PartialOrd<GfNum> for GfNum { fn partial_cmp(&self, other: &GfNum) -> Option<cmp::Ordering> { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); self.num.partial_cmp(&other.num) } } impl cmp::PartialOrd<usize> for GfNum { fn partial_cmp(&self, other: &usize) -> Option<cmp::Ordering> { let base = self.field().base(); self.num.partial_cmp(&(other % base)) } } impl cmp::PartialOrd<GfNum> for usize { fn partial_cmp(&self, other: &GfNum) -> Option<cmp::Ordering> { let base = other.field().base(); (self % base).partial_cmp(&other.num) } } impl cmp::Ord for GfNum { fn cmp(&self, other: &GfNum) -> cmp::Ordering { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); self.num.cmp(&other.num) } }
use crate::Gf; use std::vec::Vec; use std::fmt; use std::ops; use std::cmp; #[derive(Debug, Clone)] pub struct GfNum { field: Gf, num: usize, } impl fmt::Display for GfNum { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.num) } } impl GfNum { pub fn new(field: &Gf, value: usize) -> GfNum { let field = Gf::clone(field); let value = value % field.base(); GfNum {field, num: value } } pub fn from_vec(field: &Gf, value: Vec<usize>) -> Vec<GfNum> { value.iter().map(|x| GfNum::new(field, *x) ).collect() } pub fn into_num(self) -> usize { self.num } pub fn field(&self) -> &Gf { &self.field } pub fn num(&self) -> usize { self.num } pub fn mul_inv(&self) -> Self { let field = Gf::clone(self.field()); let value = field.mul_inv(self.num) .expect("Multiplicative inverse for zero do not exist"); GfNum {field, num: value } } pub fn is_zero(&self) -> bool { self.num == 0 } } impl ops::Add<GfNum> for GfNum { type Output = GfNum; fn add(self, rhs: GfNum) -> GfNum { debug_assert_eq!(self.field(), rhs.field(), "Numbers from different fields"); let base = self.field().base(); GfNum { field: self.field, num: (self.num + rhs.num) % base } } } impl ops::Add<usize> for GfNum { type Output = GfNum; fn add(self, rhs: usize) -> GfNum { let base = self.field().base(); GfNum { field: self.field, num: (self.num + (rhs % base)) % base } } } impl ops::Add<GfNum> for usize { type Output = GfNum; fn add(self, rhs: GfNum) -> GfNum { let base = rhs.field().base(); GfNum { field: rhs.field, num: (rhs.num + (self % base)) % base } } } impl ops::AddAssign<GfNum> for GfNum { fn add_assign(&mut self, other: GfNum) { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); let base = self.field().base(); self.num = (self.num + other.num) % base; } } impl ops::AddAssign<usize> for GfNum { fn add_assign(&mut self, other: usize) { let base = self.field().base(); self.num = (self.num + (other % base)) % base; } } impl ops::Sub<GfNum> for GfNum { type Output = GfNum; fn sub(self, rhs: GfNum) -> GfNum { debug_assert_eq!(self.field(), rhs.field(), "Numbers from different fields"); let base = self.field().base(); GfNum { field: self.field, num: (base + self.num - rhs.num) % base } } } impl ops::Sub<usize> for GfNum { type Output = GfNum; fn sub(self, rhs: usize) -> GfNum { let base = self.field().base(); GfNum { field: self.field, num: (base + self.num - (rhs % base)) % base } } } impl ops::Sub<GfNum> for usize { type Output = GfNum; fn sub(self, rhs: GfNum) -> GfNum { let base = rh
} impl ops::SubAssign<GfNum> for GfNum { fn sub_assign(&mut self, other: GfNum) { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); let base = self.field().base(); self.num = (base + self.num - other.num) % base; } } impl ops::SubAssign<usize> for GfNum { fn sub_assign(&mut self, other: usize) { let base = self.field().base(); self.num = (base + self.num - (other % base)) % base; } } impl ops::Neg for GfNum { type Output = GfNum; fn neg(self) -> GfNum { let base = self.field().base(); GfNum { field: self.field, num: base - self.num } } } impl ops::Mul<GfNum> for GfNum { type Output = GfNum; fn mul(self, rhs: GfNum) -> GfNum { debug_assert_eq!(self.field(), rhs.field(), "Numbers from different fields"); let base = self.field().base(); GfNum { field: self.field, num: (self.num * rhs.num) % base } } } impl ops::Mul<usize> for GfNum { type Output = GfNum; fn mul(self, rhs: usize) -> GfNum { let base = self.field().base(); GfNum { field: self.field, num: (self.num * (rhs % base)) % base } } } impl ops::Mul<GfNum> for usize { type Output = GfNum; fn mul(self, rhs: GfNum) -> GfNum { let base = rhs.field().base(); GfNum { field: rhs.field, num: (rhs.num * (self % base)) % base } } } impl ops::MulAssign<GfNum> for GfNum { fn mul_assign(&mut self, other: GfNum) { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); let base = self.field().base(); self.num = (self.num * other.num) % base; } } impl ops::MulAssign<usize> for GfNum { fn mul_assign(&mut self, other: usize) { let base = self.field().base(); self.num = (self.num * (other % base)) % base; } } impl ops::Div<GfNum> for GfNum { type Output = GfNum; fn div(self, rhs: GfNum) -> GfNum { debug_assert_eq!(self.field(), rhs.field(), "Numbers from different fields"); let base = self.field().base(); let inv = self.field().mul_inv(rhs.num).unwrap(); GfNum { field: self.field, num: (self.num * inv) % base } } } impl ops::Div<usize> for GfNum { type Output = GfNum; fn div(self, rhs: usize) -> GfNum { let base = self.field().base(); let inv = self.field().mul_inv(rhs).unwrap(); GfNum { field: self.field, num: (self.num * inv) % base } } } impl ops::Div<GfNum> for usize { type Output = GfNum; fn div(self, rhs: GfNum) -> GfNum { let base = rhs.field().base(); let inv = rhs.field().mul_inv(rhs.num).unwrap(); GfNum { field: rhs.field, num: (self * inv) % base } } } impl ops::DivAssign<GfNum> for GfNum { fn div_assign(&mut self, other: GfNum) { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); let base = self.field().base(); let inv = self.field().mul_inv(other.num).unwrap(); self.num = (self.num * inv) % base; } } impl ops::DivAssign<usize> for GfNum { fn div_assign(&mut self, other: usize) { let base = self.field().base(); let inv = self.field().mul_inv(other).unwrap(); self.num = (self.num * inv) % base; } } impl cmp::PartialEq<GfNum> for GfNum { fn eq (&self, other: &GfNum) -> bool { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); self.num() == other.num() } } impl cmp::PartialEq<usize> for GfNum { fn eq (&self, other: &usize) -> bool { let base = self.field().base(); self.num() == other % base } } impl cmp::PartialEq<GfNum> for usize { fn eq (&self, other: &GfNum) -> bool { let base = other.field().base(); self % base == other.num() } } impl cmp::Eq for GfNum {} impl cmp::PartialOrd<GfNum> for GfNum { fn partial_cmp(&self, other: &GfNum) -> Option<cmp::Ordering> { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); self.num.partial_cmp(&other.num) } } impl cmp::PartialOrd<usize> for GfNum { fn partial_cmp(&self, other: &usize) -> Option<cmp::Ordering> { let base = self.field().base(); self.num.partial_cmp(&(other % base)) } } impl cmp::PartialOrd<GfNum> for usize { fn partial_cmp(&self, other: &GfNum) -> Option<cmp::Ordering> { let base = other.field().base(); (self % base).partial_cmp(&other.num) } } impl cmp::Ord for GfNum { fn cmp(&self, other: &GfNum) -> cmp::Ordering { debug_assert_eq!(self.field(), other.field(), "Numbers from different fields"); self.num.cmp(&other.num) } }
s.field().base(); GfNum { field: rhs.field, num: (base + rhs.num - (self % base)) % base } }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn not_a_field() {\n\n assert!(Gf::new(0).is_err());\n\n assert!(Gf::new(1).is_err());\n\n assert!(Gf::new(4).is_err());\n\n assert!(Gf::new(isize::max_value() as usize).is_err());\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 0, "score": 54802.981125424296 }, { "content": "#[test]\n\n#[should_panic]\n\nfn gfn_division_by_zero() {\n\n let gf2 = Gf::new(2).unwrap();\n\n let mut num = GfNum::new(&gf2, 0);\n\n num /= 0;\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 1, "score": 50194.56280996447 }, { "content": "#[test]\n\n#[should_panic]\n\nfn mul_inv_for_zero() {\n\n let gf2 = Gf::new(2).unwrap();\n\n let num = GfNum::new(&gf2, 0);\n\n num.mul_inv();\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 2, "score": 50194.56280996447 }, { "content": "#[test]\n\n#[should_panic]\n\nfn gf_poly_degree_for_zero() {\n\n let gf5 = Gf::new(5).unwrap();\n\n let poly = GfPoly::new(&gf5, 5);\n\n poly.deg();\n\n}\n", "file_path": "src/gf_poly/tests.rs", "rank": 3, "score": 49286.28424458546 }, { "content": "#[test]\n\nfn gf_works() {\n\n let gf2 = Gf::new(2);\n\n assert!(gf2.is_ok());\n\n let gf2 = gf2.unwrap();\n\n assert_eq!(gf2.base(), 2);\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 4, "score": 34862.578204158606 }, { "content": "type Result<T> = std::result::Result<T, GfError>;\n\n\n\n#[derive(Debug)]\n\npub struct GfBase {\n\n base: usize,\n\n inv: Box<[usize]>,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Gf(Arc<GfBase>);\n\n\n\nimpl ops::Deref for Gf {\n\n type Target = Arc<GfBase>;\n\n\n\n fn deref(&self) -> &Self::Target { &self.0 }\n\n}\n\n\n\nimpl Gf {\n\n pub fn new(base: usize) -> Result<Gf> {\n\n let field = GfBase::new(base)?;\n", "file_path": "src/gf/gf.rs", "rank": 5, "score": 32704.07879118123 }, { "content": "#[test]\n\nfn gf_poly_works() {\n\n let gf5 = Gf::new(5).unwrap();\n\n\n\n let mut output = String::new();\n\n let poly = GfPoly::new(&gf5, 3);\n\n write!(&mut output, \"{}\", poly).unwrap();\n\n assert_eq!(output, \"[ 3 ]\");\n\n\n\n let mut output = String::new();\n\n let num = GfNum::new(&gf5, 5);\n\n let poly = GfPoly::from_gf_num(num);\n\n write!(&mut output, \"{}\", poly).unwrap();\n\n assert_eq!(output, \"[ ]\");\n\n\n\n let mut output = String::new();\n\n let poly = GfPoly::from_vec(&gf5, vec![1, 2, 3, 0, 0]);\n\n write!(&mut output, \"{}\", poly).unwrap();\n\n assert_eq!(output, \"[ 1, 2, 3 ]\");\n\n assert_eq!(poly.field(), &gf5);\n\n assert_eq!(poly.poly(), &GfNum::from_vec(&gf5, vec![1, 2, 3])[..]);\n", "file_path": "src/gf_poly/tests.rs", "rank": 6, "score": 32652.8048974268 }, { "content": "#[test]\n\nfn gfn_works() {\n\n let gf5 = Gf::new(5).unwrap();\n\n let num = GfNum::new(&gf5, 7);\n\n assert_eq!(num.field(), &gf5);\n\n assert_eq!(num.num(), 2);\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 7, "score": 32217.252159617918 }, { "content": "#[test]\n\nfn gfn_comparison() {\n\n let gf5 = Gf::new(5).unwrap();\n\n\n\n let gfn3 = GfNum::new(&gf5, 3);\n\n let gfn4 = GfNum::new(&gf5, 4);\n\n\n\n assert_eq!(gfn3, gfn3);\n\n assert_eq!(gfn3, 8);\n\n assert_eq!(8, gfn3);\n\n\n\n assert!(gfn3 < gfn4);\n\n assert!(gfn3 < 9);\n\n assert!(8 < gfn4);\n\n\n\n assert!(!gfn3.is_zero())\n\n}", "file_path": "src/gf/tests.rs", "rank": 8, "score": 32217.252159617918 }, { "content": "#[test]\n\nfn inverse_correct() {\n\n let gf5 = Gf::new(5).unwrap();\n\n assert!(gf5.mul_inv(0).is_none());\n\n assert_eq!(gf5.mul_inv(1).unwrap(), 1);\n\n assert_eq!(gf5.mul_inv(2).unwrap(), 3);\n\n assert_eq!(gf5.mul_inv(3).unwrap(), 2);\n\n assert_eq!(gf5.mul_inv(4).unwrap(), 4);\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 9, "score": 32217.252159617918 }, { "content": "#[test]\n\nfn gfn_sum_works() {\n\n let gf5 = Gf::new(5).unwrap();\n\n\n\n let gfn3 = GfNum::new(&gf5, 3);\n\n let gfn4 = GfNum::new(&gf5, 4);\n\n\n\n let gfn2 = gfn3 + gfn4;\n\n assert_eq!(gfn2.num(), 2);\n\n let gfn3 = gfn2 + 1;\n\n assert_eq!(gfn3.num(), 3);\n\n let gfn0 = 2 + gfn3;\n\n assert_eq!(gfn0.num(), 0);\n\n\n\n let mut num = gfn0;\n\n let gfn1 = GfNum::new(&gf5, 1);\n\n\n\n num += gfn1;\n\n assert_eq!(num.num(), 1);\n\n num += 1;\n\n assert_eq!(num.num(), 2);\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 10, "score": 31023.157211942747 }, { "content": "#[test]\n\nfn gfn_sub_works() {\n\n let gf5 = Gf::new(5).unwrap();\n\n\n\n let gfn3 = GfNum::new(&gf5, 3);\n\n let gfn4 = GfNum::new(&gf5, 4);\n\n\n\n let gfn4 = gfn3 - gfn4;\n\n assert_eq!(gfn4.num(), 4);\n\n let gfn3 = gfn4 - 1;\n\n assert_eq!(gfn3.num(), 3);\n\n let gfn1 = 2 - gfn3;\n\n assert_eq!(gfn1.num(), 1);\n\n\n\n let mut num = gfn1;\n\n let gfn1 = GfNum::new(&gf5, 1);\n\n\n\n num -= gfn1;\n\n assert_eq!(num.num(), 0);\n\n num -= 2;\n\n assert_eq!(num.num(), 3);\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 11, "score": 31023.157211942747 }, { "content": "#[test]\n\nfn gfn_div_works() {\n\n let gf5 = Gf::new(5).unwrap();\n\n\n\n let gfn3 = GfNum::new(&gf5, 3);\n\n let gfn4 = GfNum::new(&gf5, 4);\n\n\n\n let gfn2 = gfn3 / gfn4;\n\n assert_eq!(gfn2.num(), 2);\n\n let gfn1 = gfn2 / 2;\n\n assert_eq!(gfn1.num(), 1);\n\n let gfn3 = 3 / gfn1;\n\n assert_eq!(gfn3.num(), 3);\n\n\n\n let mut num = gfn3;\n\n let gfn2 = GfNum::new(&gf5, 2);\n\n\n\n num /= gfn2;\n\n assert_eq!(num.num(), 4);\n\n num /= 2;\n\n assert_eq!(num.num(), 2);\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 12, "score": 31023.157211942747 }, { "content": "#[test]\n\nfn gfn_neg_works() {\n\n let gf5 = Gf::new(5).unwrap();\n\n\n\n let gfn4 = GfNum::new(&gf5, 4);\n\n\n\n let gfn1 = -gfn4;\n\n assert_eq!(gfn1.num(), 1);\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 13, "score": 31023.157211942747 }, { "content": "#[test]\n\nfn gfn_mul_works() {\n\n let gf5 = Gf::new(5).unwrap();\n\n\n\n let gfn3 = GfNum::new(&gf5, 3);\n\n let gfn4 = GfNum::new(&gf5, 4);\n\n\n\n let gfn2 = gfn3 * gfn4;\n\n assert_eq!(gfn2.num(), 2);\n\n let gfn4 = gfn2 * 2;\n\n assert_eq!(gfn4.num(), 4);\n\n let gfn3 = 2 * gfn4;\n\n assert_eq!(gfn3.num(), 3);\n\n\n\n let mut num = gfn3;\n\n let gfn2 = GfNum::new(&gf5, 2);\n\n\n\n num *= gfn2;\n\n assert_eq!(num.num(), 1);\n\n num *= 0;\n\n assert_eq!(num.num(), 0);\n\n}\n\n\n", "file_path": "src/gf/tests.rs", "rank": 14, "score": 31023.157211942747 }, { "content": " Ok(Gf(Arc::new(field)))\n\n }\n\n pub fn clone(field: &Gf) -> Self { Gf(Arc::clone(&field.0)) }\n\n}\n\n\n\nimpl cmp::PartialEq for Gf {\n\n fn eq (&self, other: &Gf) -> bool { self.base == other.base }\n\n}\n\n\n\nimpl cmp::Eq for Gf {}\n\n\n\nimpl GfBase {\n\n fn new(base: usize) -> Result<GfBase> {\n\n if base == 0 { return Err(GfError::EmptyField) }\n\n if base == 1 { return Err(GfError::ZeroField) }\n\n // guarantees that base could be safely converted to isize\n\n if usize::max_value() / (base - 1) < (base - 1) {\n\n return Err(GfError::TooLargeField(base))\n\n }\n\n\n", "file_path": "src/gf/gf.rs", "rank": 31, "score": 11761.879141269344 }, { "content": "use std::boxed::Box;\n\nuse std::sync::Arc;\n\nuse std::ops;\n\nuse std::fmt;\n\nuse std::vec;\n\nuse std::cmp;\n\n\n\n#[derive(Debug)]\n\npub enum GfError {\n\n EmptyField,\n\n ZeroField,\n\n TooLargeField(usize),\n\n NotAField(usize),\n\n}\n\n\n\nimpl fmt::Display for GfError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n GfError::EmptyField =>\n\n write!(f, \"GF[0] is empty\"),\n", "file_path": "src/gf/gf.rs", "rank": 32, "score": 11758.843323798412 }, { "content": " GfError::ZeroField =>\n\n write!(f, \"GF[1] can contain only zero\"),\n\n GfError::TooLargeField(base) =>\n\n write!(f, \"GF[{}] is too large\", base),\n\n GfError::NotAField(base) =>\n\n write!(f, \"field GF[{}] do not exist\", base),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gf/gf.rs", "rank": 33, "score": 11758.260691374599 }, { "content": " inv[i] = tmp;\n\n inv[tmp] = i;\n\n }\n\n\n\n let inv = inv.into_boxed_slice();\n\n Ok(GfBase {base, inv})\n\n }\n\n\n\n pub fn base(&self) -> usize { self.base }\n\n\n\n pub fn mul_inv(&self, val: usize) -> Option<usize> {\n\n match val % self.base {\n\n 0 => None,\n\n i => Some(self.inv[i]),\n\n }\n\n }\n\n}\n", "file_path": "src/gf/gf.rs", "rank": 34, "score": 11755.252285111323 }, { "content": " let i_base = base as isize;\n\n fn inv_calc(base: isize, val: isize) -> Result<usize> {\n\n let (mut u0, mut u1, mut u2) = (base, 1isize, 0isize);\n\n let (mut v0, mut v1, mut v2) = (val, 0isize, 1isize);\n\n let (mut w0, mut w1, mut w2); let mut q;\n\n\n\n while v0 > 0 {\n\n q = u0 / v0;\n\n w0 = u0 - q * v0; w1 = u1 - q * v1; w2 = u2 - q * v2;\n\n u0 = v0; u1 = v1; u2 = v2; v0 = w0; v1 = w1; v2 = w2;\n\n }\n\n if u0 > 1 { return Err(GfError::NotAField(base as usize)) }\n\n Ok(if u2 < 0 { base + u2 } else { u2 } as usize)\n\n }\n\n\n\n let mut inv = vec![0; base];\n\n inv[1] = 1;\n\n for i in 2..base {\n\n if inv[i] != 0 { continue; }\n\n let tmp = inv_calc(i_base, i as isize)?;\n", "file_path": "src/gf/gf.rs", "rank": 35, "score": 11754.889078997085 }, { "content": " write!(f, \" ]\")\n\n }\n\n}\n\n\n\nimpl GfPoly {\n\n pub fn new(field: &Gf, value: usize) -> GfPoly {\n\n let mut poly = GfPoly {\n\n field: Gf::clone(field),\n\n poly: vec![GfNum::new(field, value)]\n\n };\n\n poly.normalize();\n\n poly\n\n }\n\n\n\n pub fn from_gf_num(value: GfNum) -> GfPoly {\n\n let mut poly = GfPoly {\n\n field: Gf::clone(value.field()),\n\n poly: vec![value]\n\n };\n\n poly.normalize();\n", "file_path": "src/gf_poly/gf_poly.rs", "rank": 36, "score": 11046.088548658334 }, { "content": " poly\n\n }\n\n\n\n pub fn from_vec(field: &Gf, value: Vec<usize>) -> GfPoly {\n\n let mut poly = GfPoly {\n\n field: Gf::clone(field),\n\n poly: GfNum::from_vec(field, value)\n\n };\n\n poly.normalize();\n\n poly\n\n }\n\n\n\n pub fn into_vec(self) -> Vec<usize> {\n\n self.poly.into_iter().map(|x| x.into_num() ).collect()\n\n }\n\n\n\n pub fn into_gf_num_vec(self) -> Vec<GfNum> { self.poly }\n\n\n\n pub fn field(&self) -> &Gf { &self.field }\n\n\n", "file_path": "src/gf_poly/gf_poly.rs", "rank": 37, "score": 11045.847927471043 }, { "content": " pub fn poly(&self) -> &[GfNum] { &self.poly }\n\n\n\n pub fn len(&self) -> usize { self.poly.len() }\n\n\n\n pub fn deg(&self) -> usize {\n\n assert!(self.poly.len() > 0, \"Degree is undefined for zero polynomial\");\n\n self.poly.len() - 1\n\n }\n\n\n\n pub fn is_zero(&self) -> bool { self.poly.is_empty() }\n\n\n\n pub fn set_zero(&mut self) { self.poly.clear(); }\n\n\n\n pub fn normalize(&mut self) {\n\n if let Some(i) = self.poly.iter().rposition(|x| !x.is_zero()) {\n\n self.poly.truncate(i + 1);\n\n } else { self.poly.clear(); }\n\n }\n\n}\n", "file_path": "src/gf_poly/gf_poly.rs", "rank": 38, "score": 11044.412890654716 }, { "content": "use crate::{Gf, GfNum};\n\n\n\nuse std::vec::Vec;\n\nuse std::fmt;\n\nuse std::ops;\n\nuse std::cmp;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct GfPoly {\n\n field: Gf,\n\n poly: Vec<GfNum>,\n\n}\n\n\n\nimpl fmt::Display for GfPoly {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"[ \")?;\n\n for i in 0..self.poly.len() {\n\n if i > 0 { write!(f, \", \")?; }\n\n write!(f, \"{}\", self.poly[i])?;\n\n }\n", "file_path": "src/gf_poly/gf_poly.rs", "rank": 39, "score": 11043.462311455904 }, { "content": "mod gf;\n\nmod gf_num;\n\n\n\npub use gf::*;\n\npub use gf_num::*;\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "src/gf/mod.rs", "rank": 40, "score": 9251.524598405866 }, { "content": "use crate::{Gf, GfNum};\n\n\n\n#[test]\n", "file_path": "src/gf/tests.rs", "rank": 41, "score": 9249.153544166593 }, { "content": "use crate::{Gf, GfNum, GfPoly};\n\n\n\nuse std::fmt::Write;\n\nuse std::vec;\n\n\n\n#[test]\n", "file_path": "src/gf_poly/tests.rs", "rank": 42, "score": 8801.440056793263 }, { "content": " assert_eq!(poly.len(), 3);\n\n assert_eq!(poly.deg(), 2);\n\n assert_eq!(poly.into_vec(), vec![1, 2, 3]);\n\n\n\n let mut poly = GfPoly::from_vec(&gf5, vec![1, 2, 3, 0, 0]);\n\n assert_eq!(poly.into_gf_num_vec(), GfNum::from_vec(&gf5, vec![1, 2, 3]));\n\n\n\n let mut poly = GfPoly::from_vec(&gf5, vec![1, 2, 3, 0, 0]);\n\n poly.set_zero();\n\n assert!(poly.is_zero());\n\n}\n\n\n", "file_path": "src/gf_poly/tests.rs", "rank": 43, "score": 8801.283784768946 }, { "content": "mod gf_poly;\n\n\n\npub use gf_poly::*;\n\n\n\n#[cfg(test)]\n\nmod tests;", "file_path": "src/gf_poly/mod.rs", "rank": 44, "score": 8800.967237698107 }, { "content": "mod gf;\n\nmod gf_poly;\n\n\n\npub use gf::*;\n\npub use gf_poly::*;\n", "file_path": "src/lib.rs", "rank": 45, "score": 7.171115839365112 }, { "content": "# Разработка приостановлена\n\nВ данный момент реализация приостановлена, поскольку на сегодняшний день в Rust\n\nотсутствует возможность контроля значения после присваивания. В C++ для этой\n\nцели можно перегрузить `operator=`, в Rust на данный момент сущесвует\n\n[Pre-RFC на введение trait IndexAssing](https://internals.rust-lang.org/t/pre-rfc-split-indexmut-into-indexmut-and-indexassign/10399), который бы позволил контролировать\n\nприсваиваемое значение. В данном проекте это является необходимостью, поскольку\n\nмногочлен над некоторым конечным полем должен состоять из чисел только этого поля.\n\nВ данный момент при реализации IndexMut была бы возможность присвоить\n\nмногочлену над полем `Gf::new(5)` например число из поля `Gf::new(3)`, если\n\nреализовывать многочлен как `Vec<GfNum>`. Если же реализовывать многочлен как\n\n`Vec<usize>` с операциями над заданным полем, то при присвоении нового значения\n\nпо индексу будет возможность присвоить ненормированное значение, что может при\n\nпоследующем выполнении операций приводить к переполнению, к тому же такая реализация\n\nбыла бы очень трудоёмкой. Единственным выходом во втором случае было бы нормирование\n\nчисел в векторе перед выполнением любых операций, но это значительно повысило бы\n\nвремя выполнения программы, по сравнению с возможностью проверки значений в момент\n\nприсвоения, а значит не дало бы выигрыша в производительности по сравнению с\n", "file_path": "README.md", "rank": 46, "score": 4.872207067151128 }, { "content": "MIT License\n\n\n\nCopyright (c) 2020 Vadim Piven\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "LICENSE.md", "rank": 47, "score": 1.0188892796498012 } ]
Rust
src/equalizer.rs
joaocarvalhoopen/Audio_filters_in_Rust
00639f0c30078a5fd3f39a690243f5742743ae02
use crate::iir_filter::ProcessingBlock; use crate::iir_filter::IIRFilter; use crate::butterworth_filter::make_peak_eq_constant_q; pub struct Equalizer { sample_rate: u32, bands_vec: Vec<f64>, bands_gain_vec: Vec<f64>, gain_max_db: f64, gain_min_db: f64, q_factor: f64, iir_filters_vec: Vec<IIRFilter>, } impl Equalizer { pub fn new(sample_rate: u32, bands_vec: & Vec<f64>, gain_max_db:f64, gain_min_db:f64, q_factor:f64 ) -> Self { let mut equalizer = Equalizer{ sample_rate, bands_vec: bands_vec.clone(), bands_gain_vec: vec![0.0; bands_vec.len()], gain_max_db, gain_min_db, q_factor, iir_filters_vec: Vec::with_capacity(bands_vec.len()) }; equalizer.gen_chain_filters(); equalizer } fn gen_chain_filters(& mut self) { for band in & self.bands_vec { let frequency_center = *band; let gain_db = 0.0; let iir_filter = make_peak_eq_constant_q(frequency_center, self.sample_rate, gain_db, Some(self.q_factor)); self.iir_filters_vec.push(iir_filter); } } fn change_filter(& mut self, index: usize) { assert!(index < self.bands_vec.len()); let frequency_center = self.bands_vec[index]; let gain_db = self.bands_gain_vec[index]; let q_factor = Some(self.q_factor); let iir_filter_tmp = make_peak_eq_constant_q(frequency_center, self.sample_rate, gain_db, q_factor); let _ = self.iir_filters_vec[index].set_coefficients(& iir_filter_tmp.a_coeffs, & iir_filter_tmp.b_coeffs); } pub fn get_bands_freq(& self, index: usize) -> f64 { assert!(index < self.bands_vec.len()); self.bands_vec[index] } pub fn get_band_gain(& self, index: usize) -> f64 { assert!(index < self.bands_vec.len()); self.bands_gain_vec[index] } pub fn set_band_gain(& mut self, index: usize, gain_db: f64) -> Result<(), String> { assert!(index < self.bands_vec.len()); if gain_db < self.gain_min_db || gain_db > self.gain_max_db { return Err(format!("Error: invalid gain value {}, must be in the interval [{}, {}]", gain_db, self.gain_min_db, self.gain_max_db)); } self.bands_gain_vec[index] = gain_db; self.change_filter(index); Ok(()) } pub fn make_equalizer_10_band(sample_rate: u32) -> Equalizer { let bands_vec = vec![ 29.0, 59.0, 119.0, 237.0, 474.0, 947.0, 1889.0, 3770.0, 7523.0, 15011.0 ]; let gain_max_db = 12.0; let gain_min_db = -24.0; let q_factor = 2.0 * f64::sqrt(2.0); let equalizer_10_band = Equalizer::new(sample_rate, & bands_vec, gain_max_db, gain_min_db, q_factor); equalizer_10_band } } impl ProcessingBlock for Equalizer { fn process(& mut self, sample: f64) -> f64 { let mut sample_t = sample; for iir_filter in & mut self.iir_filters_vec { sample_t = iir_filter.process(sample_t); } sample_t } }
use crate::iir_filter::ProcessingBlock; use crate::iir_filter::IIRFilter; use crate::butterworth_filter::make_peak_eq_constant_q; pub struct Equalizer { sample_rate: u32, bands_vec: Vec<f64>, bands_gain_vec: Vec<f64>, gain_max_db: f64, gain_min_db: f64, q_factor: f64, iir_filters_vec: Vec<IIRFilter>, } impl Equalizer { pub fn new(sample_rate: u32, bands_vec: & Vec<f64>, gain_max_db:f64, gain_min_db:f64, q_factor:f64 ) -> Self { let mut equalizer = Equalizer{ sample_rate, bands_vec: bands_vec.clone(), bands_gain_vec: vec![0.0; bands_vec.len()], gain_max_db, gain_min_db, q_factor, iir_filters_vec: Vec::with_capacity(bands_vec.len()) }; equalizer.gen_chain_filters(); equalizer } fn gen_chain_filters(& mut self) { f
self.iir_filters_vec.push(iir_filter); } } fn change_filter(& mut self, index: usize) { assert!(index < self.bands_vec.len()); let frequency_center = self.bands_vec[index]; let gain_db = self.bands_gain_vec[index]; let q_factor = Some(self.q_factor); let iir_filter_tmp = make_peak_eq_constant_q(frequency_center, self.sample_rate, gain_db, q_factor); let _ = self.iir_filters_vec[index].set_coefficients(& iir_filter_tmp.a_coeffs, & iir_filter_tmp.b_coeffs); } pub fn get_bands_freq(& self, index: usize) -> f64 { assert!(index < self.bands_vec.len()); self.bands_vec[index] } pub fn get_band_gain(& self, index: usize) -> f64 { assert!(index < self.bands_vec.len()); self.bands_gain_vec[index] } pub fn set_band_gain(& mut self, index: usize, gain_db: f64) -> Result<(), String> { assert!(index < self.bands_vec.len()); if gain_db < self.gain_min_db || gain_db > self.gain_max_db { return Err(format!("Error: invalid gain value {}, must be in the interval [{}, {}]", gain_db, self.gain_min_db, self.gain_max_db)); } self.bands_gain_vec[index] = gain_db; self.change_filter(index); Ok(()) } pub fn make_equalizer_10_band(sample_rate: u32) -> Equalizer { let bands_vec = vec![ 29.0, 59.0, 119.0, 237.0, 474.0, 947.0, 1889.0, 3770.0, 7523.0, 15011.0 ]; let gain_max_db = 12.0; let gain_min_db = -24.0; let q_factor = 2.0 * f64::sqrt(2.0); let equalizer_10_band = Equalizer::new(sample_rate, & bands_vec, gain_max_db, gain_min_db, q_factor); equalizer_10_band } } impl ProcessingBlock for Equalizer { fn process(& mut self, sample: f64) -> f64 { let mut sample_t = sample; for iir_filter in & mut self.iir_filters_vec { sample_t = iir_filter.process(sample_t); } sample_t } }
or band in & self.bands_vec { let frequency_center = *band; let gain_db = 0.0; let iir_filter = make_peak_eq_constant_q(frequency_center, self.sample_rate, gain_db, Some(self.q_factor));
function_block-random_span
[ { "content": "/// Creates a low-pass filter\n\n///\n\n/// In Python: \n\n/// >>> filter = make_lowpass(1000, 48000)\n\n/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.004277569313094809,\n\n/// 0.008555138626189618, 0.004277569313094809]\n\n/// \n\n/// In Rust:\n\n/// >>> let filter = make_lowpass(1000, 48000);\n\n/// >>> let res_coeffs: Vec<f64> = filter.a_coeffs.iter.extends(filter.b_coeffs).collect();\n\n/// >>> println!(\"{}\", res_coeffs);\n\n/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.004277569313094809,\n\n/// 0.008555138626189618, 0.004277569313094809]\n\n///\n\npub fn make_lowpass(frequency: f64, sample_rate: u32, q_factor: Option<f64>) -> IIRFilter {\n\n let q_factor: f64 = if q_factor.is_none() {\n\n 1.0 / f64::sqrt(2.0)\n\n } else {\n\n q_factor.unwrap()\n\n };\n\n\n\n let w0 = TAU * frequency / sample_rate as f64;\n\n let _sin = f64::sin(w0);\n\n let _cos = f64::cos(w0);\n\n let alpha = _sin / (2.0 * q_factor);\n\n \n\n let b0 = (1.0 - _cos) / 2.0;\n\n let b1 = 1.0 - _cos;\n\n \n\n let a0 = 1.0 + alpha;\n\n let a1 = -2.0 * _cos;\n\n let a2 = 1.0 - alpha;\n\n \n\n let filter_order = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b0]);\n\n \n\n filter\n\n}\n\n\n", "file_path": "src/butterworth_filter.rs", "rank": 0, "score": 115298.8387382214 }, { "content": "/// Creates a notch filter\n\n///\n\n/// In Python: \n\n/// >>> filter = make_notch(1000, 48000, 10)\n\n/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n/// [, , , ,\n\n/// , ]\n\n/// \n\npub fn make_notch(frequency: f64, sample_rate: u32, q_factor: Option<f64>) -> IIRFilter {\n\n let q_factor: f64 = if q_factor.is_none() {\n\n 1.0 / f64::sqrt(2.0)\n\n } else {\n\n q_factor.unwrap()\n\n };\n\n\n\n let w0 = TAU * frequency / sample_rate as f64;\n\n let _sin = f64::sin(w0);\n\n let _cos = f64::cos(w0);\n\n use std::f64::consts::E;\n\n let alpha = _sin * f64::sinh((f64::log(2.0,E) / 2.0) * q_factor * (w0 /_sin ));\n\n \n\n let b0 = 1.0;\n\n let b1 = -2.0 * _cos;\n\n\n\n let a0 = 1.0 + alpha;\n\n let a1 = -2.0 * _cos;\n\n let a2 = 1.0 - alpha;\n\n \n", "file_path": "src/butterworth_filter.rs", "rank": 1, "score": 115293.30429982745 }, { "content": "/// Creates a high-pass filter\n\n/// \n\n/// In Python:\n\n/// >>> filter = make_highpass(1000, 48000)\n\n/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.9957224306869052,\n\n/// -1.9914448613738105, 0.9957224306869052]\n\n/// \n\npub fn make_highpass(frequency: f64, sample_rate: u32, q_factor: Option<f64>) -> IIRFilter {\n\n let q_factor: f64 = if q_factor.is_none() {\n\n 1.0 / f64::sqrt(2.0)\n\n } else {\n\n q_factor.unwrap()\n\n };\n\n\n\n let w0 = TAU * frequency / sample_rate as f64; \n\n let _sin = f64::sin(w0);\n\n let _cos = f64::cos(w0);\n\n let alpha = _sin / (2.0 * q_factor);\n\n\n\n let b0 = (1.0 + _cos) / 2.0;\n\n let b1 = -1.0 - _cos;\n\n\n\n let a0 = 1.0 + alpha;\n\n let a1 = -2.0 * _cos;\n\n let a2 = 1.0 - alpha;\n\n\n\n let filter_order = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b0]);\n\n \n\n filter\n\n}\n\n\n", "file_path": "src/butterworth_filter.rs", "rank": 2, "score": 115293.30429982746 }, { "content": "/// Creates an all-pass filter\n\n/// \n\n/// In Python:\n\n/// >>> filter = make_allpass(1000, 48000)\n\n/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.9077040443587427,\n\n/// -1.9828897227476208, 1.0922959556412573]\n\n///\n\npub fn make_allpass(frequency: f64, sample_rate: u32, q_factor: Option<f64>) -> IIRFilter {\n\n let q_factor: f64 = if q_factor.is_none() {\n\n 1.0 / f64::sqrt(2.0)\n\n } else {\n\n q_factor.unwrap()\n\n };\n\n\n\n let w0 = TAU * frequency / sample_rate as f64;\n\n let _sin = f64::sin(w0);\n\n let _cos = f64::cos(w0);\n\n let alpha = _sin / (2.0 * q_factor);\n\n\n\n let b0 = 1.0 - alpha;\n\n let b1 = -2.0 * _cos;\n\n let b2 = 1.0 + alpha;\n\n\n\n let filter_order = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n let _ = filter.set_coefficients(& [b2, b1, b0], & [b0, b1, b2]);\n\n \n\n filter\n\n}\n\n\n", "file_path": "src/butterworth_filter.rs", "rank": 3, "score": 115293.30429982746 }, { "content": "/// Creates a band-pass filter\n\n/// \n\n/// In Python:\n\n/// >>> filter = make_bandpass(1000, 48000)\n\n/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.06526309611002579,\n\n/// 0, -0.06526309611002579]\n\n/// \n\npub fn make_bandpass(frequency: f64, sample_rate: u32, q_factor: Option<f64>) -> IIRFilter {\n\n let q_factor: f64 = if q_factor.is_none() {\n\n 1.0 / f64::sqrt(2.0)\n\n } else {\n\n q_factor.unwrap()\n\n };\n\n\n\n let w0 = TAU * frequency / sample_rate as f64;\n\n let _sin = f64::sin(w0);\n\n let _cos = f64::cos(w0);\n\n let alpha = _sin / (2.0 * q_factor);\n\n\n\n let b0 = _sin / 2.0;\n\n let b1 = 0.0;\n\n let b2 = -b0;\n\n\n\n let a0 = 1.0 + alpha;\n\n let a1 = -2.0 * _cos;\n\n let a2 = 1.0 - alpha;\n\n\n\n let filter_order = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b2]);\n\n \n\n filter\n\n}\n\n\n", "file_path": "src/butterworth_filter.rs", "rank": 4, "score": 115293.30429982745 }, { "content": "/// Creates a peak filter\n\n///\n\n/// In Python: \n\n/// >>> filter = make_peak(1000, 48000, 6)\n\n/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n/// [1.0653405327119334, -1.9828897227476208, 0.9346594672880666, 1.1303715025601122,\n\n/// -1.9828897227476208, 0.8696284974398878]\n\n///\n\npub fn make_peak(frequency: f64, sample_rate: u32, gain_db: f64, q_factor: Option<f64>) -> IIRFilter {\n\n let q_factor: f64 = if q_factor.is_none() {\n\n 1.0 / f64::sqrt(2.0)\n\n } else {\n\n q_factor.unwrap()\n\n };\n\n\n\n let w0 = TAU * frequency / sample_rate as f64;\n\n let _sin = f64::sin(w0);\n\n let _cos = f64::cos(w0);\n\n let alpha = _sin / (2.0 * q_factor);\n\n let big_a = 10.0_f64.powf(gain_db / 40.0);\n\n\n\n let b0 = 1.0 + alpha * big_a;\n\n let b1 = -2.0 * _cos;\n\n let b2 = 1.0 - alpha * big_a;\n\n let a0 = 1.0 + alpha / big_a;\n\n let a1 = -2.0 * _cos;\n\n let a2 = 1.0 - alpha / big_a;\n\n\n\n let filter_order = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b2]);\n\n \n\n filter\n\n\n\n}\n\n\n", "file_path": "src/butterworth_filter.rs", "rank": 5, "score": 112936.55210617889 }, { "content": "/// Creates a low-shelf filter\n\n/// \n\n/// In Python:\n\n/// >>> filter = make_lowshelf(1000, 48000, 6)\n\n/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n/// [3.0409336710888786, -5.608870992220748, 2.602157875636628, 3.139954022810743,\n\n/// -5.591841778072785, 2.5201667380627257]\n\n/// \n\npub fn make_lowshelf(frequency: f64, sample_rate: u32, gain_db: f64, q_factor: Option<f64>) -> IIRFilter {\n\n let q_factor: f64 = if q_factor.is_none() {\n\n 1.0 / f64::sqrt(2.0)\n\n } else {\n\n q_factor.unwrap()\n\n };\n\n\n\n let w0 = TAU * frequency / sample_rate as f64;\n\n let _sin = f64::sin(w0);\n\n let _cos = f64::cos(w0);\n\n let alpha = _sin / (2.0 * q_factor);\n\n let big_a = 10.0_f64.powf(gain_db / 40.0);\n\n let pmc = (big_a + 1.0) - (big_a - 1.0) * _cos;\n\n let ppmc = (big_a + 1.0) + (big_a - 1.0) * _cos;\n\n let mpc = (big_a - 1.0) - (big_a + 1.0) * _cos;\n\n let pmpc = (big_a - 1.0) + (big_a + 1.0) * _cos;\n\n let aa2 = 2.0 * f64::sqrt(big_a) * alpha;\n\n\n\n let b0 = big_a * (pmc + aa2);\n\n let b1 = 2.0 * big_a * mpc;\n", "file_path": "src/butterworth_filter.rs", "rank": 6, "score": 112936.55210617889 }, { "content": "/// Creates a high-shelf filter\n\n///\n\n/// In Python: \n\n/// >>> filter = make_highshelf(1000, 48000, 6)\n\n/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n/// [2.2229172136088806, -3.9587208137297303, 1.7841414181566304, 4.295432981120543,\n\n/// -7.922740859457287, 3.6756456963725253]\n\n///\n\npub fn make_highshelf(frequency: f64, sample_rate: u32, gain_db: f64, q_factor: Option<f64>) -> IIRFilter {\n\n let q_factor: f64 = if q_factor.is_none() {\n\n 1.0 / f64::sqrt(2.0)\n\n } else {\n\n q_factor.unwrap()\n\n };\n\n\n\n let w0 = TAU * frequency / sample_rate as f64;\n\n let _sin = f64::sin(w0);\n\n let _cos = f64::cos(w0);\n\n let alpha = _sin / (2.0 * q_factor);\n\n let big_a = 10.0_f64.powf(gain_db / 40.0);\n\n let pmc = (big_a + 1.0) - (big_a - 1.0) * _cos;\n\n let ppmc = (big_a + 1.0) + (big_a - 1.0) * _cos;\n\n let mpc = (big_a - 1.0) - (big_a + 1.0) * _cos;\n\n let pmpc = (big_a - 1.0) + (big_a + 1.0) * _cos;\n\n let aa2 = 2.0 * f64::sqrt(big_a) * alpha;\n\n\n\n let b0 = big_a * (ppmc + aa2);\n\n let b1 = -2.0 * big_a * pmpc;\n", "file_path": "src/butterworth_filter.rs", "rank": 7, "score": 112936.55210617889 }, { "content": "// This is a peak_eq filter similar to the above peak filter but with constant Q and the gain\n\n// is taken at -3dB like a analog peak_eq filter would be.\n\n// This filter is ideal to make equalizers, like a 10 band parametric equalizer.\n\n//\n\n// See:\n\n// 1. Peak / notch filter design\n\n// https://www.dsprelated.com/showcode/169.php#commax_container\n\n//\n\n// and\n\n//\n\n// 2. Making an EQ from cascading filters\n\n// https://dsp.stackexchange.com/questions/10309/making-an-eq-from-cascading-filters \n\n//\n\n// and\n\n//\n\n// 3. The Equivalence of Various Methods of Computing\n\n// Biquad Coefficients for Audio Parametric Equalizers\n\n// http://www.thesounddesign.com/MIO/EQ-Coefficients.pdf\n\n//\n\npub fn make_peak_eq_constant_q(frequency_center: f64, sample_rate: u32, gain_db: f64, q_factor: Option<f64>) -> IIRFilter {\n\n // This specific filter is a port to Rust with modifications from the following example code:\n\n // PEAK/NOTCH FILTER DESIGN\n\n // https://www.dsprelated.com/showcode/169.php#commax_container\n\n //\n\n // Derive coefficients for a peaking filter with a given amplitude and\n\n // bandwidth. All coefficients are calculated as described in Zolzer's\n\n // DAFX book (p. 50 - 55). This algorithm assumes a constant Q-term\n\n // is used through the equation.\n\n //\n\n // Original Author: sparafucile17 08/22/05\n\n //\n\n \n\n let q_factor: f64 = if q_factor.is_none() {\n\n 1.0 / f64::sqrt(2.0)\n\n } else {\n\n q_factor.unwrap()\n\n };\n\n\n\n let q = q_factor;\n", "file_path": "src/butterworth_filter.rs", "rank": 8, "score": 108489.38163178213 }, { "content": "/// Show phase response of a filter\n\n/// \n\n/// In Python:\n\n/// >>> from audio_filters.iir_filter import IIRFilter\n\n/// >>> filt = IIRFilter(4)\n\n/// >>> show_phase_response(filt, 48000)\n\n/// \n\npub fn show_phase_response(processing_block: & mut dyn ProcessingBlock, sample_rate: usize, path: & str, line_name: & str) {\n\n\n\n let size = 512_usize;\n\n // Excites the filter with an input of only a peak value (1.0) in the first sample, and the rest with (0.0) zero, as samples.\n\n // It's a Dirac Impulse. \n\n let inputs = { let mut inputs = vec![0.0; size - 1 + 1];\n\n inputs[0] = 1.0; \n\n inputs\n\n };\n\n let mut outputs: Vec<f64> = Vec::with_capacity(size);\n\n for i in 0..size {\n\n outputs.push(processing_block.process(inputs[i]));\n\n }\n\n // zero-padding.\n\n let filler = vec![0.0; sample_rate - size];\n\n outputs.extend(filler);\n\n\n\n // Perform a forward FFT of size 1234\n\n use rustfft::{FftPlanner, num_complex::Complex};\n\n\n", "file_path": "src/show_response.rs", "rank": 9, "score": 61630.94304320555 }, { "content": "/// Show frequency response of a filter\n\n///\n\n/// In Python:\n\n/// >>> from audio_filters.iir_filter import IIRFilter\n\n/// >>> filt = IIRFilter(4)\n\n/// >>> show_frequency_response(filt, 48000)\n\n///\n\npub fn show_frequency_response(processing_block: & mut dyn ProcessingBlock, sample_rate: usize, path: & str, line_name: & str) {\n\n\n\n let size = 512_usize;\n\n // Excites the filter with an input of only a peak value (1.0) in the first sample, and the rest with (0.0) zero, as samples.\n\n // It's a Dirac Impulse. \n\n let inputs = { let mut inputs = vec![0.0; size - 1 + 1];\n\n inputs[0] = 1.0; \n\n inputs\n\n };\n\n let mut outputs: Vec<f64> = Vec::with_capacity(size);\n\n for i in 0..size {\n\n outputs.push(processing_block.process(inputs[i]));\n\n }\n\n // zero-padding.\n\n let filler = vec![0.0; sample_rate - size];\n\n outputs.extend(filler);\n\n\n\n // Perform a forward FFT of size 1234\n\n use rustfft::{FftPlanner, num_complex::Complex};\n\n\n", "file_path": "src/show_response.rs", "rank": 10, "score": 61630.94304320555 }, { "content": "fn generate_plot_equalizer_10_bands_02() {\n\n println!(\"\\n10 Band Equalizer\\n\");\n\n let sample_rate = 48_000;\n\n let mut eq: Equalizer = Equalizer::make_equalizer_10_band(sample_rate);\n\n // Set the gains for each_frequency band.\n\n let _= eq.set_band_gain(0, -10.0);\n\n let _= eq.set_band_gain(2, -5.0);\n\n let _= eq.set_band_gain(1, 0.0);\n\n let _= eq.set_band_gain(3, 5.0);\n\n let _= eq.set_band_gain(4, 0.0);\n\n let _= eq.set_band_gain(5, -5.0);\n\n let _= eq.set_band_gain(6, 0.0);\n\n let _= eq.set_band_gain(7, 5.0);\n\n let _= eq.set_band_gain(8, 10.0);\n\n let _= eq.set_band_gain(9, 12.0);\n\n for i in 0..10 {\n\n println!(\"{} Hz : {} dB\", eq.get_bands_freq(i), eq.get_band_gain(i));\n\n }\n\n println!(\"\\n\");\n\n show_frequency_response(& mut eq, sample_rate as usize, \"plots/equalizer_10_band_gain.svg\", \"equ_10_bands\");\n\n show_phase_response(& mut eq, sample_rate as usize, \"plots/equalizer_10_band_phase.svg\", \"equ_10_bands\");\n\n}\n", "file_path": "src/main.rs", "rank": 11, "score": 60905.56034251199 }, { "content": "#[allow(dead_code)]\n\nfn generate_plot_equalizer_10_bands_01() {\n\n println!(\"\\n10 Band Equalizer\\n\");\n\n let sample_rate = 48_000;\n\n let mut eq: Equalizer = Equalizer::make_equalizer_10_band(sample_rate);\n\n // Set the gains for each_frequency band.\n\n let _= eq.set_band_gain(0, -15.0);\n\n let _= eq.set_band_gain(2, -10.0);\n\n let _= eq.set_band_gain(1, -5.0);\n\n let _= eq.set_band_gain(3, 0.0);\n\n let _= eq.set_band_gain(4, -5.0);\n\n let _= eq.set_band_gain(5, 10.0);\n\n let _= eq.set_band_gain(6, -15.0);\n\n let _= eq.set_band_gain(7, 0.0);\n\n let _= eq.set_band_gain(8, 5.0);\n\n let _= eq.set_band_gain(9, -10.0);\n\n for i in 0..10 {\n\n println!(\"{} Hz : {} dB\", eq.get_bands_freq(i), eq.get_band_gain(i));\n\n }\n\n println!(\"\\n\");\n\n show_frequency_response(& mut eq, sample_rate as usize, \"plots/equalizer_10_band_gain.svg\", \"equ_10_bands\");\n\n show_phase_response(& mut eq, sample_rate as usize, \"plots/equalizer_10_band_phase.svg\", \"equ_10_bands\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 12, "score": 60905.56034251199 }, { "content": "/// Get bounds for printing fft results\n\n/// \n\n/// In Python:\n\n/// >>> import numpy\n\n/// >>> array = numpy.linspace(-20.0, 20.0, 1000)\n\n/// >>> get_bounds(array, 1000)\n\n/// (-20, 20)\n\n///\n\npub fn get_bounds(fft_results: & [f32], _sample_rate: usize, x_bound_max: usize) -> (f32, f32) {\n\n // let slice_upper_bound = (sample_rate / 2) - 1;\n\n // let slice_upper_bound = (sample_rate / 2) - 1 - 100;\n\n let slice_upper_bound = x_bound_max;\n\n // This will remove the bounds checks from the array at each access.\n\n assert!(slice_upper_bound <= fft_results.len());\n\n let mut min_t = -20.0; // f64::MAX;\n\n let mut max_t = 20.0; // f64::MIN;\n\n for i in 1..slice_upper_bound{\n\n min_t = f32::min(fft_results[i], min_t);\n\n max_t = f32::max(fft_results[i], max_t);\n\n }\n\n let lowest = min_t;\n\n let highest = max_t;\n\n \n\n (lowest, highest)\n\n} \n\n\n", "file_path": "src/show_response.rs", "rank": 13, "score": 45133.41971788301 }, { "content": "fn test_a() {\n\n let mut filter = IIRFilter::new(2);\n\n let res = filter.process(0.0);\n\n println!(\"filter res: {} should be 0.0 .\", res);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 14, "score": 42902.902070198346 }, { "content": "fn main() {\n\n println!(\"***************************\");\n\n println!(\"** Audio filters in Rust **\");\n\n println!(\"***************************\");\n\n\n\n test_a();\n\n test_b();\n\n\n\n generate_plots();\n\n // generate_plot_equalizer_10_bands_01();\n\n generate_plot_equalizer_10_bands_02();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 15, "score": 42902.902070198346 }, { "content": "fn test_b() {\n\n let frequency = 200.0; // Hz\n\n let sample_rate = 44100; // Hz\n\n let mut filter = make_lowpass(frequency, sample_rate, None);\n\n let sample = 0.0;\n\n let res = filter.process(sample);\n\n\n\n println!(\"filter res: {} should be ?? .\", res);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 16, "score": 42902.902070198346 }, { "content": "fn generate_plots() {\n\n print!(\"\\nStarting generating the SVG plots...\");\n\n\n\n // low-pass\n\n let frequency = 5_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let mut filter = make_lowpass(frequency, sample_rate, None);\n\n show_frequency_response(& mut filter, sample_rate as usize, \"plots/lowpass_gain.svg\", \"lowpass\");\n\n show_phase_response(& mut filter, sample_rate as usize, \"plots/lowpass_phase.svg\", \"lowpass\");\n\n\n\n // high-pass\n\n let frequency = 5_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let mut filter = make_highpass(frequency, sample_rate, None);\n\n show_frequency_response(& mut filter, sample_rate as usize, \"plots/highpass_gain.svg\", \"highpass\");\n\n show_phase_response(& mut filter, sample_rate as usize, \"plots/highpass_phase.svg\", \"highpass\");\n\n\n\n // band-pass\n\n let frequency = 10_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n", "file_path": "src/main.rs", "rank": 17, "score": 41679.391626412325 }, { "content": "pub trait ProcessingBlock {\n\n fn process(& mut self, sample: f64) -> f64;\n\n}\n\n\n\n\n\n/// N-Order IIR filter\n\n/// Assumes working with float samples normalized on [-1, 1]\n\n///\n\n/// Implementation details:\n\n/// Based on the 2nd-order function from\n\n/// https://en.wikipedia.org/wiki/Digital_biquad_filter,\n\n/// this generalized N-order function was made.\n\n///\n\n/// Using the following transfer function\n\n/// H(z)=\\frac{b_{0}+b_{1}z^{-1}+b_{2}z^{-2}+...+b_{k}z^{-k}}{a_{0}+a_{1}z^{-1}+a_{2}z^{-2}+...+a_{k}z^{-k}}\n\n/// we can rewrite this to\n\n/// y[n]={\\frac{1}{a_{0}}}\\left(\\left(b_{0}x[n]+b_{1}x[n-1]+b_{2}x[n-2]+...+b_{k}x[n-k]\\right)-\\left(a_{1}y[n-1]+a_{2}y[n-2]+...+a_{k}y[n-k]\\right)\\right)\n\n///\n\npub struct IIRFilter {\n\n pub order: usize,\n", "file_path": "src/iir_filter.rs", "rank": 18, "score": 38438.041250418675 }, { "content": " // a_{0} ... a_{k}\n\n pub a_coeffs: Vec<f64>,\n\n // b_{0} ... b_{k}\n\n pub b_coeffs: Vec<f64>,\n\n // x[n-1] ... x[n-k]\n\n input_history: Vec<f64>,\n\n // y[n-1] ... y[n-k]\n\n output_history: Vec<f64>,\n\n}\n\n\n\nimpl IIRFilter {\n\n pub fn new(order: usize) -> Self {\n\n IIRFilter {\n\n order: order,\n\n // a_{0} ... a_{k}\n\n a_coeffs: { let mut a_coeffs = vec![0.0; 1 + order];\n\n a_coeffs[0] = 1.0;\n\n a_coeffs },\n\n // b_{0} ... b_{k}\n\n b_coeffs: { let mut b_coeffs = vec![0.0; 1 + order];\n", "file_path": "src/iir_filter.rs", "rank": 30, "score": 14.515874929748593 }, { "content": " ///\n\n /// In Rust\n\n /// >>> let a_coeffs = [0.1, 0.2, 0.3]\n\n /// >>> let b_coeffs = [0.15, 0.25, 0.35]\n\n /// >>> let filter_order: u32 = 2;\n\n /// >>> let iir_filter = IIR_Filter::new(filter_order);\n\n /// >>> iir_filter.set_coefficients(& a_coeffs[], & b_coeffs[]);\n\n /// \n\n pub fn set_coefficients(& mut self, a_coeffs: &[f64], b_coeffs: &[f64]) -> Result<(), String> {\n\n if a_coeffs.len() != self.order + 1 && a_coeffs.len() != self.order {\n\n return Err(\n\n r\"Expected a_coeffs to have {self.order + 1} elements for {self.order} /\n\n -order filter, got {len(a_coeffs)}\".to_string());\n\n }\n\n if b_coeffs.len() != self.order + 1 {\n\n return Err(\n\n r\"Expected b_coeffs to have {self.order + 1} elements for {self.order} /\n\n -order filter, got {len(a_coeffs)}\".to_string());\n\n }\n\n self.a_coeffs.clear();\n", "file_path": "src/iir_filter.rs", "rank": 31, "score": 8.526992075927435 }, { "content": " let filter_order = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b0]);\n\n \n\n filter\n\n}\n\n\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn print_values(target_vec: & Vec<f64>, res_coeffs: & Vec<&f64>) {\n\n println!(\"\\n >>>> target_coefficents\");\n\n for str_t in target_vec {\n\n print!(\"{}, \", str_t); \n\n }\n\n println!(\"\\n >>>> res_coefficents\");\n\n for str_t in res_coeffs {\n", "file_path": "src/butterworth_filter.rs", "rank": 32, "score": 7.0731280528574105 }, { "content": "\n\n// Module definition\n\nmod iir_filter;\n\nmod butterworth_filter;\n\nmod show_response;\n\nmod equalizer;\n\n\n\n// Imports\n\nuse crate::iir_filter::ProcessingBlock; // Trait\n\nuse crate::iir_filter::IIRFilter;\n\nuse crate::butterworth_filter::make_lowpass;\n\nuse crate::butterworth_filter::make_highpass;\n\nuse crate::butterworth_filter::make_bandpass;\n\nuse crate::butterworth_filter::make_allpass;\n\nuse crate::butterworth_filter::make_peak;\n\nuse crate::butterworth_filter::make_peak_eq_constant_q;\n\nuse crate::butterworth_filter::make_lowshelf;\n\nuse crate::butterworth_filter::make_highshelf;\n\nuse crate::butterworth_filter::make_notch;\n\n\n\nuse crate::show_response::show_frequency_response;\n\nuse crate::show_response::show_phase_response;\n\n\n\nuse crate::equalizer::Equalizer;\n\n\n\n\n", "file_path": "src/main.rs", "rank": 33, "score": 6.553565462039288 }, { "content": " /// >>> filt.process(0)\n\n /// 0.0\n\n /// \n\n /// In Rust\n\n /// >>> let filt = IIRFilter::new(2)\n\n /// >>> filt.process(0.0)\n\n /// 0.0\n\n ///\n\n fn process(& mut self, sample: f64) -> f64 {\n\n let mut result: f64 = 0.0;\n\n\n\n // Start at index 1 and do index 0 at the end.\n\n for i in 1..(self.order + 1) {\n\n result += self.b_coeffs[i] * self.input_history[i - 1]\n\n - self.a_coeffs[i] * self.output_history[i - 1];\n\n }\n\n \n\n result = (result + self.b_coeffs[0] * sample) / self.a_coeffs[0];\n\n\n\n let input_len = self.input_history.len();\n", "file_path": "src/iir_filter.rs", "rank": 34, "score": 6.216356583348826 }, { "content": "\n\nuse crate::iir_filter::IIRFilter;\n\nuse std::f64::consts::TAU;\n\nuse std::f64::consts::PI;\n\n\n\n/// Create 2nd-order IIR filters with Butterworth design.\n\n/// \n\n/// Code based on https://webaudio.github.io/Audio-EQ-Cookbook/audio-eq-cookbook.html\n\n/// Alternatively you can use scipy.signal.butter, which should yield the same results.\n\n/// \n\n\n\n\n\n/// Creates a low-pass filter\n\n///\n\n/// In Python: \n\n/// >>> filter = make_lowpass(1000, 48000)\n\n/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.004277569313094809,\n\n/// 0.008555138626189618, 0.004277569313094809]\n\n/// \n\n/// In Rust:\n\n/// >>> let filter = make_lowpass(1000, 48000);\n\n/// >>> let res_coeffs: Vec<f64> = filter.a_coeffs.iter.extends(filter.b_coeffs).collect();\n\n/// >>> println!(\"{}\", res_coeffs);\n\n/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.004277569313094809,\n\n/// 0.008555138626189618, 0.004277569313094809]\n\n///\n", "file_path": "src/butterworth_filter.rs", "rank": 35, "score": 6.161811141006847 }, { "content": " let k = f64::tan((PI * frequency_center) / sample_rate as f64);\n\n let mut v0 = 10.0_f64.powf(gain_db / 20.0);\n\n \n\n // Invert gain if a cut\n\n if v0 < 1.0 {\n\n v0 = 1.0 / v0;\n\n }\n\n \n\n let b0: f64;\n\n let b1: f64;\n\n let b2: f64;\n\n let a1: f64;\n\n let a2: f64;\n\n\n\n let _k_sqr = k.powf(2.0);\n\n //***********\n\n // BOOST\n\n //***********\n\n if gain_db > 0.0 {\n\n b0 = (1.0 + ((v0 / q) * k) + _k_sqr) / (1.0 + ((1.0 / q) * k) + _k_sqr);\n", "file_path": "src/butterworth_filter.rs", "rank": 36, "score": 6.045790776383819 }, { "content": "\n\n // peak_eq_constant_q positive and negative gain.\n\n let frequency = 10_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let gain_db = 5.0; // dB\n\n // A good value for a 10 band equalizer.\n\n // See: The second reference on the function make_peak_eq_constant_q.\n\n let q_factor = Some(2.0 * f64::sqrt(2.0));\n\n let mut filter = make_peak_eq_constant_q(frequency, sample_rate, gain_db, q_factor);\n\n show_frequency_response(& mut filter, sample_rate as usize, \"plots/peak_eq_pos_g_gain.svg\", \"peakEQ_G+\");\n\n show_phase_response(& mut filter, sample_rate as usize, \"plots/peak_eq_pos_g_phase.svg\", \"peakEQ_G+\");\n\n let gain_db = -5.0; // dB\n\n let mut filter = make_peak_eq_constant_q(frequency, sample_rate, gain_db, q_factor);\n\n show_frequency_response(& mut filter, sample_rate as usize, \"plots/peak_eq_neg_g_gain.svg\", \"peakEQ_G-\");\n\n show_phase_response(& mut filter, sample_rate as usize, \"plots/peak_eq_neg_g_phase.svg\", \"peakEQ_G-\");\n\n\n\n // low-shelf\n\n let frequency = 10_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let gain_db = 6.0; // dB\n", "file_path": "src/main.rs", "rank": 37, "score": 5.700478741502632 }, { "content": " let output_len = self.output_history.len();\n\n self.input_history.copy_within(0..(input_len - 1), 1);\n\n self.output_history.copy_within(0..(output_len - 1), 1);\n\n\n\n self.input_history[0] = sample;\n\n self.output_history[0] = result;\n\n\n\n result\n\n }\n\n\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_iir_filter_000() {\n\n\n\n let mut filter = IIRFilter::new(2);\n", "file_path": "src/iir_filter.rs", "rank": 38, "score": 4.581160787628104 }, { "content": " let mut filter = make_lowshelf(frequency, sample_rate, gain_db, None);\n\n show_frequency_response(& mut filter, sample_rate as usize, \"plots/lowshelf_gain.svg\", \"lowshelf\");\n\n show_phase_response(& mut filter, sample_rate as usize, \"plots/lowshelf_phase.svg\", \"lowshelf\");\n\n\n\n // high-shelf\n\n let frequency = 10_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let gain_db = 6.0; // dB\n\n let mut filter = make_highshelf(frequency, sample_rate, gain_db, None);\n\n show_frequency_response(& mut filter, sample_rate as usize, \"plots/highshelf_gain.svg\", \"highshelf\");\n\n show_phase_response(& mut filter, sample_rate as usize, \"plots/highshelf_phase.svg\", \"highshelf\");\n\n\n\n // notch\n\n let frequency = 10_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let q_factor = 0.05;\n\n let mut filter = make_notch(frequency, sample_rate, Some(q_factor));\n\n show_frequency_response(& mut filter, sample_rate as usize, \"plots/notch_gain.svg\", \"notch\");\n\n show_phase_response(& mut filter, sample_rate as usize, \"plots/notch_phase.svg\", \"notch\");\n\n\n\n println!(\"\\n ... ended generating the SVG plots.\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 39, "score": 3.3737662439818843 }, { "content": " // Note: I have put a larger q_factor then the default so that the band pass is more accentuated. \n\n let q_factor = Some(1.0);\n\n let mut filter = make_bandpass(frequency, sample_rate, q_factor);\n\n show_frequency_response(& mut filter, sample_rate as usize, \"plots/bandpass_gain.svg\", \"bandpass\");\n\n show_phase_response(& mut filter, sample_rate as usize, \"plots/bandpass_phase.svg\", \"bandpass\");\n\n\n\n // all-pass\n\n let frequency = 10_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let mut filter = make_allpass(frequency, sample_rate, None);\n\n show_frequency_response(& mut filter, sample_rate as usize, \"plots/allpass_gain.svg\", \"allpass\");\n\n show_phase_response(& mut filter, sample_rate as usize, \"plots/allpass_phase.svg\", \"allpass\");\n\n\n\n // peak\n\n let frequency = 10_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let gain_db = 6.0; // dB\n\n let mut filter = make_peak(frequency, sample_rate, gain_db, None);\n\n show_frequency_response(& mut filter, sample_rate as usize, \"plots/peak_gain.svg\", \"peak\");\n\n show_phase_response(& mut filter, sample_rate as usize, \"plots/peak_phase.svg\", \"peak\");\n", "file_path": "src/main.rs", "rank": 40, "score": 3.3737662439818843 }, { "content": " let bounds = get_bounds(& fft_db, sample_rate, x_bound_max);\n\n let (y_bound_min, y_bound_max) = (f32::max(-80.0, bounds.0), f32::min(80.0, bounds.1) );\n\n\n\n // Frequencies on log scale from 24 to nyquist frequency\n\n use plotters::prelude::*;\n\n //fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = SVGBackend::new(path /* \"plots/0.svg\" */, (400, 300)).into_drawing_area();\n\n root.fill(&WHITE).unwrap();\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(line_name.to_string() + \" - Gain(dB) vs Freq\", (\"sans-serif\", 25).into_font())\n\n .margin(5)\n\n .x_label_area_size(30)\n\n .y_label_area_size(30)\n\n .build_cartesian_2d(x_bound_min..x_bound_max, y_bound_min..y_bound_max )\n\n .unwrap();\n\n \n\n chart.configure_mesh().draw().unwrap();\n\n \n\n chart\n\n .draw_series(LineSeries::new(\n", "file_path": "src/show_response.rs", "rank": 41, "score": 3.2664627784381053 }, { "content": " // NOTE: Remember that TAU = 2 * PI.\n\n let (y_bound_min, y_bound_max) = (f32::max(-TAU_f32, bounds.0), f32::min(TAU_f32, bounds.1) );\n\n\n\n // Frequencies on log scale from 24 to nyquist frequency\n\n use plotters::prelude::*;\n\n //fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = SVGBackend::new(path /* \"plots/0.svg\" */, (400, 300)).into_drawing_area();\n\n root.fill(&WHITE).unwrap();\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(line_name.to_string() + \" - Phase shift(Rad) vs Freq\", (\"sans-serif\", 25).into_font())\n\n .margin(5)\n\n .x_label_area_size(30)\n\n .y_label_area_size(30)\n\n .build_cartesian_2d(x_bound_min..x_bound_max, y_bound_min..y_bound_max )\n\n .unwrap();\n\n \n\n chart.configure_mesh().draw().unwrap();\n\n \n\n chart\n\n .draw_series(LineSeries::new(\n", "file_path": "src/show_response.rs", "rank": 42, "score": 3.2664627784381053 }, { "content": "\n\nuse crate::iir_filter::ProcessingBlock; // Trait\n\nuse std::f32::consts::TAU as TAU_f32;\n\n\n\n\n\n/// Get bounds for printing fft results\n\n/// \n\n/// In Python:\n\n/// >>> import numpy\n\n/// >>> array = numpy.linspace(-20.0, 20.0, 1000)\n\n/// >>> get_bounds(array, 1000)\n\n/// (-20, 20)\n\n///\n", "file_path": "src/show_response.rs", "rank": 43, "score": 3.223986939398533 }, { "content": " fn test_show_frequency_response() {\n\n let frequency = 5_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let mut filter = make_lowpass(frequency, sample_rate, None);\n\n // show_frequency_response(& mut filter, sample_rate as usize, \"plots/lowpass.svg\", \"lowpass\");\n\n \n\n // assert_eq!(true, false);\n\n }\n\n}\n\n\n", "file_path": "src/show_response.rs", "rank": 44, "score": 3.0147007686748917 }, { "content": " fft_out.iter().enumerate().map(|pair| (pair.0, *pair.1 ) ),\n\n &BLUE,\n\n )).unwrap()\n\n .label(line_name)\n\n .legend(|(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], &BLUE));\n\n \n\n chart\n\n .configure_series_labels()\n\n .background_style(&WHITE.mix(0.8))\n\n .border_style(&BLACK)\n\n .draw().unwrap();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use crate::butterworth_filter::make_lowpass;\n\n\n\n #[test]\n", "file_path": "src/show_response.rs", "rank": 45, "score": 2.9747615521659974 }, { "content": "# Audio filters in Rust\n\nA port of the WebAudio API filters to Rust.\n\n\n\n## Description \n\nAudio or DSP filters, allow you to attenuate or accentuate some frequencies or range of frequencies in a signal. The signal can be of any kind, but in here, we will focus on 1D signals. Like audio signals. There can also occur differences in the signal phases, that vary with the filter and the frequency components of the signal. <br>\n\nThis is a port of Audio filters, from Python to Rust. My port was made from TheAlgorithms GitHub in Python (see references below). That is, by it self a port from WebAudio API C++ filters implementation used by the browsers. <br>\n\n\n\n\n\n## The following filters are implemented over a BiQuad IIR filter\n\n1. **low-pass**\n\n2. **high-pass**\n\n3. **band-pass**\n\n4. **all-pass**\n\n5. **peak**\n\n6. **low-shelf**\n\n7. **high-shelf** \n\n8. **notch**\n\n9. **10 band equalizer**\n\n\n\n## Plots of the filters Gain (dB) and phase shift response \n\n\n\n### low-pass - freq = 5.000 Hz - sample_rate = 48.000 samples/sec \n\n\n\n![Plot gain dB response](./plots/lowpass_gain.svg)\n\n![Plot phase shift response](./plots/lowpass_phase.svg) <br>\n\n\n\n### high-pass - freq = 5.000 Hz - sample_rate = 48.000 samples/sec\n\n\n\n![Plot gain dB response](./plots/highpass_gain.svg)\n\n![Plot phase shift response](./plots/highpass_phase.svg) <br>\n\n\n\n### band-pass - freq = 10.000 Hz - sample_rate = 48.000 samples/sec - q_factor = 1.0\n\n\n\n![Plot gain dB response](./plots/bandpass_gain.svg)\n\n![Plot phase shift response](./plots/bandpass_phase.svg) <br>\n\n\n\n### all-pass - freq = 10.000 Hz - sample_rate = 48.000 samples/sec\n\n\n\n![Plot gain dB response](./plots/allpass_gain.svg)\n\n![Plot phase shift response](./plots/allpass_phase.svg) <br>\n\n\n\n### peak - freq = 10.000 Hz - sample_rate = 48.000 samples/sec - gain = 6 dB\n\n\n\n![Plot gain dB response](./plots/peak_gain.svg)\n\n![Plot phase shift response](./plots/peak_phase.svg) <br>\n\n\n", "file_path": "README.md", "rank": 46, "score": 2.9167655106882004 }, { "content": " let mut planner = FftPlanner::<f32>::new();\n\n let fft = planner.plan_fft_forward(sample_rate);\n\n\n\n let mut buffer = vec![Complex{ re: 0.0_f32, im: 0.0_f32 }; sample_rate];\n\n\n\n for i in 0..outputs.len() {\n\n buffer[i].re = outputs[i] as f32;\n\n }\n\n\n\n fft.process(& mut buffer[..]);\n\n\n\n // Calculates the absolute value or the norm. \n\n let fft_out = buffer.iter().map(|c| c.norm() ).collect::<Vec<f32>>();\n\n // Transform the result into dB's.\n\n let fft_db = fft_out.iter().map(|val| 20.0 * f32::log10(*val) ).collect::<Vec<f32>>();\n\n\n\n\n\n // Display within reasonable bounds\n\n let (x_bound_min, x_bound_max) = (0_usize, sample_rate / 2 - 1 - 100 );\n\n let fft_db = & fft_db[x_bound_min..x_bound_max];\n", "file_path": "src/show_response.rs", "rank": 47, "score": 2.81991270568186 }, { "content": " let mut planner = FftPlanner::<f32>::new();\n\n let fft = planner.plan_fft_forward(sample_rate);\n\n\n\n let mut buffer = vec![Complex{ re: 0.0_f32, im: 0.0_f32 }; sample_rate];\n\n\n\n for i in 0..outputs.len() {\n\n buffer[i].re = outputs[i] as f32;\n\n }\n\n\n\n fft.process(& mut buffer[..]);\n\n\n\n // Calculates the phase angle or the atan(b/a) for a complex number c = a + bj . \n\n // let fft_out = buffer.iter().map(|c| c.atan().re ).collect::<Vec<f32>>();\n\n let fft_out = buffer.iter().map(|c| f32::atan2(c.im, c.re) ).collect::<Vec<f32>>();\n\n\n\n // Display within reasonable bounds\n\n let (x_bound_min, x_bound_max) = (0_usize, sample_rate / 2 - 1 - 150 );\n\n let fft_out = & fft_out[x_bound_min..x_bound_max];\n\n let bounds = get_bounds(& fft_out, sample_rate, x_bound_max);\n\n // let (y_bound_min, y_bound_max) = (f32::max(-80.0, bounds.0), f32::min(80.0, bounds.1) );\n", "file_path": "src/show_response.rs", "rank": 48, "score": 2.683301681575874 }, { "content": " if a_coeffs.len() < self.order + 1 {\n\n self.a_coeffs.push(1.0);\n\n self.a_coeffs.extend(a_coeffs);\n\n } else {\n\n self.a_coeffs.extend(a_coeffs);\n\n }\n\n self.b_coeffs.clear();\n\n self.b_coeffs.extend(b_coeffs);\n\n \n\n Ok(())\n\n }\n\n\n\n}\n\n\n\nimpl ProcessingBlock for IIRFilter {\n\n\n\n /// Calculate y[n]\n\n /// \n\n /// In Python\n\n /// >>> filt = IIRFilter(2)\n", "file_path": "src/iir_filter.rs", "rank": 49, "score": 2.6258649820395767 }, { "content": "/// 11. How to learn modern Rust\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust\n\n///\n\n/// \n\n/// 10 Band Equalizer\n\n/// \n\n/// 12. Making an EQ from cascading filters\n\n/// https://dsp.stackexchange.com/questions/10309/making-an-eq-from-cascading-filters\n\n/// \n\n/// 13. PEAK/NOTCH FILTER DESIGN\n\n/// https://www.dsprelated.com/showcode/169.php\n\n/// \n\n/// 14. The Equivalence of Various Methods of Computing\n\n/// Biquad Coefficients for Audio Parametric Equalizers\n\n/// http://www.thesounddesign.com/MIO/EQ-Coefficients.pdf\n\n///\n\n/// 15. How to learn modern Rust\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust\n\n///\n\n\n", "file_path": "src/show_response.rs", "rank": 50, "score": 2.583676947925235 }, { "content": "/// 11. How to learn modern Rust\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust\n\n///\n\n/// \n\n/// 10 Band Equalizer\n\n/// \n\n/// 12. Making an EQ from cascading filters\n\n/// https://dsp.stackexchange.com/questions/10309/making-an-eq-from-cascading-filters\n\n/// \n\n/// 13. PEAK/NOTCH FILTER DESIGN\n\n/// https://www.dsprelated.com/showcode/169.php\n\n/// \n\n/// 14. The Equivalence of Various Methods of Computing\n\n/// Biquad Coefficients for Audio Parametric Equalizers\n\n/// http://www.thesounddesign.com/MIO/EQ-Coefficients.pdf\n\n///\n\n/// 15. How to learn modern Rust\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust\n\n///\n\n\n", "file_path": "src/butterworth_filter.rs", "rank": 51, "score": 2.583676947925235 }, { "content": "/// 11. How to learn modern Rust\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust\n\n///\n\n/// \n\n/// 10 Band Equalizer\n\n/// \n\n/// 12. Making an EQ from cascading filters\n\n/// https://dsp.stackexchange.com/questions/10309/making-an-eq-from-cascading-filters\n\n/// \n\n/// 13. PEAK/NOTCH FILTER DESIGN\n\n/// https://www.dsprelated.com/showcode/169.php\n\n/// \n\n/// 14. The Equivalence of Various Methods of Computing\n\n/// Biquad Coefficients for Audio Parametric Equalizers\n\n/// http://www.thesounddesign.com/MIO/EQ-Coefficients.pdf\n\n///\n\n/// 15. How to learn modern Rust\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust\n\n///\n\n\n", "file_path": "src/main.rs", "rank": 52, "score": 2.583676947925235 }, { "content": "/// 11. How to learn modern Rust\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust\n\n///\n\n/// \n\n/// 10 Band Equalizer\n\n/// \n\n/// 12. Making an EQ from cascading filters\n\n/// https://dsp.stackexchange.com/questions/10309/making-an-eq-from-cascading-filters\n\n/// \n\n/// 13. PEAK/NOTCH FILTER DESIGN\n\n/// https://www.dsprelated.com/showcode/169.php\n\n/// \n\n/// 14. The Equivalence of Various Methods of Computing\n\n/// Biquad Coefficients for Audio Parametric Equalizers\n\n/// http://www.thesounddesign.com/MIO/EQ-Coefficients.pdf\n\n///\n\n/// 15. How to learn modern Rust\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust\n\n///\n\n\n\n\n", "file_path": "src/iir_filter.rs", "rank": 53, "score": 2.583676947925235 }, { "content": " let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect(); \n\n print_values(& target_vec, & res_coeffs);\n\n for i in 0..target_vec.len() {\n\n assert_eq!(*(res_coeffs[i]), target_vec[i]);\n\n }\n\n\n\n // assert_eq!(true, false);\n\n }\n\n\n\n}\n\n\n", "file_path": "src/butterworth_filter.rs", "rank": 54, "score": 2.423928089338 }, { "content": " let b2 = big_a * (pmc - aa2);\n\n let a0 = ppmc + aa2;\n\n let a1 = -2.0 * pmpc;\n\n let a2 = ppmc - aa2;\n\n\n\n let filter_order = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b2]);\n\n \n\n filter\n\n}\n\n\n", "file_path": "src/butterworth_filter.rs", "rank": 55, "score": 2.3884184060014437 }, { "content": " let b2 = big_a * (ppmc - aa2);\n\n let a0 = pmc + aa2;\n\n let a1 = 2.0 * mpc;\n\n let a2 = pmc - aa2;\n\n\n\n let filter_order = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b2]);\n\n \n\n filter\n\n}\n\n\n\n\n", "file_path": "src/butterworth_filter.rs", "rank": 56, "score": 2.3884184060014437 }, { "content": "\n\n // 2º case.\n\n let a_coeffs = [1.0, 0.0, 0.0];\n\n let b_coeffs = [0.0, 0.0, 0.0];\n\n let filter_order: usize = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n let res_coef = filter.set_coefficients(& a_coeffs, & b_coeffs);\n\n assert!(res_coef.is_ok());\n\n let res = filter.process(0.0);\n\n assert!((res - 0.0).abs() < 0.00001);\n\n\n\n println!(\"filter res: {} , should be 0.0 .\", res);\n\n\n\n // assert_eq!(true, false);\n\n }\n\n\n\n}\n\n\n", "file_path": "src/iir_filter.rs", "rank": 57, "score": 2.260482778384333 }, { "content": " print!(\"{}, \", str_t); \n\n }\n\n println!(\"\");\n\n }\n\n\n\n #[test]\n\n fn test_make_lowpass() {\n\n // >>> filter = make_lowpass(1000, 48000)\n\n // >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n // [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.004277569313094809,\n\n // 0.008555138626189618, 0.004277569313094809]\n\n \n\n let frequency = 1_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let filter = make_lowpass(frequency, sample_rate, None);\n\n \n\n let target_vec = vec![1.0922959556412573, -1.9828897227476208, 0.9077040443587427,\n\n 0.004277569313094809, 0.008555138626189618, 0.004277569313094809];\n\n \n\n let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();\n", "file_path": "src/butterworth_filter.rs", "rank": 58, "score": 2.22595702334123 }, { "content": "### peakEQ_const_Q G_pos - freq = 10.000 Hz - sample_rate = 48.000 samples/sec - gain = 5 dB\n\nq_factor = 2 * sqrt(2) = 2.828 <br>\n\n\n\n![Plot gain dB response](./plots/peak_eq_pos_g_gain.svg)\n\n![Plot phase shift response](./plots/peak_eq_pos_g_phase.svg) <br>\n\n\n\n### peakEQ_const_Q G_neg - freq = 10.000 Hz - sample_rate = 48.000 samples/sec - gain = -5 dB\n\nq_factor = 2 * sqrt(2) = 2.828 <br>\n\n\n\n![Plot gain dB response](./plots/peak_eq_neg_g_gain.svg)\n\n![Plot phase shift response](./plots/peak_eq_neg_g_phase.svg) <br>\n\n\n\n### low-shelf - freq = 10.000 Hz - sample_rate = 48.000 samples/sec - gain = 6 dB\n\n\n\n![Plot gain dB response](./plots/lowshelf_gain.svg)\n\n![Plot phase shift response](./plots/lowshelf_phase.svg) <br>\n\n\n\n### high-shelf - freq = 10.000 Hz - sample_rate = 48.000 samples/sec - gain = 6 dB\n\n\n\n![Plot gain dB response](./plots/highshelf_gain.svg)\n\n![Plot phase shift response](./plots/highshelf_phase.svg) <br>\n\n\n\n### notch - freq = 10.000 Hz - sample_rate = 48.000 samples/sec - q_factor = 0.05\n\n\n\n![Plot gain dB response](./plots/notch_gain.svg)\n\n![Plot phase shift response](./plots/notch_phase.svg) <br>\n\n\n\n### 10 band Equalizer - sample_rate = 48.000 samples/sec - q_factor = 2.828\n\n```\n\n10 Band Equalizer\n\n\n\nband_0: 29 Hz, gain: -10 dB\n\nband_1: 59 Hz, gain: 0 dB\n\nband_2: 119 Hz, gain: -5 dB\n\nband_3: 237 Hz, gain: 5 dB\n\nband_4: 474 Hz, gain: 0 dB\n\nband_5: 947 Hz, gain: -5 dB\n\nband_6: 1.889 Hz, gain: 0 dB\n\nband_7: 3.770 Hz, gain: 5 dB\n\nband_8: 7.523 Hz, gain: 10 dB\n\nband_9: 15.011 Hz, gain: 12 dB\n\n```\n\n\n\n![Plot gain dB response](./plots/equalizer_10_band_gain.svg)\n\n![Plot phase shift response](./plots/equalizer_10_band_phase.svg) <br>\n\n\n\n\n", "file_path": "README.md", "rank": 59, "score": 2.2050192973473397 }, { "content": " 0.9957224306869052, -1.9914448613738105, 0.9957224306869052];\n\n \n\n let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();\n\n print_values(& target_vec, & res_coeffs);\n\n for i in 0..target_vec.len() {\n\n assert_eq!(*(res_coeffs[i]), target_vec[i]);\n\n }\n\n\n\n // assert_eq!(true, false);\n\n }\n\n\n\n #[test]\n\n fn test_make_bandpass() {\n\n // >>> filter = make_bandpass(1000, 48000)\n\n // >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n // [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.06526309611002579,\n\n // 0, -0.06526309611002579]\n\n \n\n let frequency = 1_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n", "file_path": "src/butterworth_filter.rs", "rank": 60, "score": 2.1159807359636633 }, { "content": " \n\n let frequency = 1_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let filter = make_allpass(frequency, sample_rate, None);\n\n \n\n let target_vec = vec![1.0922959556412573, -1.9828897227476208, 0.9077040443587427,\n\n 0.9077040443587427, -1.9828897227476208, 1.0922959556412573];\n\n \n\n let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();\n\n print_values(& target_vec, & res_coeffs);\n\n for i in 0..target_vec.len() {\n\n assert_eq!(*(res_coeffs[i]), target_vec[i]);\n\n }\n\n\n\n // assert_eq!(true, false);\n\n }\n\n\n\n #[test]\n\n fn test_make_peak() {\n\n // >>> filter = make_peak(1000, 48000, 6)\n", "file_path": "src/butterworth_filter.rs", "rank": 61, "score": 2.0865271434709243 }, { "content": " b_coeffs[0] = 1.0;\n\n b_coeffs },\n\n // x[n-1] ... x[n-k]\n\n input_history: vec![0.0; order],\n\n // y[n-1] ... y[n-k]\n\n output_history: vec![0.0; order],\n\n }\n\n }\n\n\n\n /// Set the coefficients for the IIR filter. These should both be of size order + 1.\n\n /// a_0 may be left out, and it will use 1.0 as default value.\n\n ///\n\n /// This method works well with scipy's filter design functions\n\n /// >>> # Make a 2nd-order 1000Hz butterworth lowpass filter\n\n /// >>> import scipy.signal\n\n /// >>> b_coeffs, a_coeffs = scipy.signal.butter(2, 1000,\n\n /// ... btype='lowpass',\n\n /// ... fs=48000)\n\n /// >>> filt = IIRFilter(2)\n\n /// >>> filt.set_coefficients(a_coeffs, b_coeffs)\n", "file_path": "src/iir_filter.rs", "rank": 62, "score": 2.084560662282179 }, { "content": " let filter = make_bandpass(frequency, sample_rate, None);\n\n\n\n let target_vec = vec![1.0922959556412573, -1.9828897227476208, 0.9077040443587427,\n\n 0.06526309611002579, 0.0, -0.06526309611002579];\n\n\n\n let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();\n\n print_values(& target_vec, & res_coeffs);\n\n for i in 0..target_vec.len() {\n\n assert_eq!(*(res_coeffs[i]), target_vec[i]);\n\n }\n\n\n\n // assert_eq!(true, false);\n\n }\n\n\n\n #[test]\n\n fn test_make_allpass() {\n\n // >>> filter = make_allpass(1000, 48000)\n\n // >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n // [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.9077040443587427,\n\n // -1.9828897227476208, 1.0922959556412573]\n", "file_path": "src/butterworth_filter.rs", "rank": 63, "score": 2.057882258193523 }, { "content": " let res = filter.process(0.0);\n\n assert!((res - 0.0).abs() < 0.00001);\n\n\n\n println!(\"filter res: {} , should be 0.0 .\", res);\n\n // assert_eq!(true, false);\n\n }\n\n\n\n #[test]\n\n fn test_iir_filter_001() {\n\n // 1º case.\n\n let a_coeffs = [0.0, 0.0];\n\n let b_coeffs = [0.0, 0.0, 0.0];\n\n let filter_order: usize = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n let res_coef = filter.set_coefficients(& a_coeffs, & b_coeffs);\n\n assert!(res_coef.is_ok());\n\n let res = filter.process(0.0);\n\n assert!((res - 0.0).abs() < 0.00001);\n\n\n\n println!(\"filter res: {} , should be 0.0 .\", res);\n", "file_path": "src/iir_filter.rs", "rank": 64, "score": 2.041750097763309 }, { "content": " [http://ethanwiner.com/filters.html](http://ethanwiner.com/filters.html)\n\n\n\n9. **Audio filter - Wikipedia** <br>\n\n [https://en.wikipedia.org/wiki/Audio_filter](https://en.wikipedia.org/wiki/Audio_filter)\n\n\n\n11. **Electronic filter - Wikipedia** <br>\n\n [https://en.wikipedia.org/wiki/Electronic_filter](https://en.wikipedia.org/wiki/Electronic_filter)\n\n\n\n**10 Bands Equalizer** <br>\n\n\n\n12. **Making an EQ from cascading filters** <br>\n\n [https://dsp.stackexchange.com/questions/10309/making-an-eq-from-cascading-filters](https://dsp.stackexchange.com/questions/10309/making-an-eq-from-cascading-filters)\n\n \n\n13. **Peak / Notch filter design** <br>\n\n [https://www.dsprelated.com/showcode/169.php](https://www.dsprelated.com/showcode/169.php)\n\n\n\n14. **The Equivalence of Various Methods of Computing** <br>\n\n **Biquad Coefficients for Audio Parametric Equalizers** <br>\n\n [http://www.thesounddesign.com/MIO/EQ-Coefficients.pdf](http://www.thesounddesign.com/MIO/EQ-Coefficients.pdf)\n\n\n\n15. **How to learn modern Rust** <br>\n\n [https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust](https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust)\n\n\n\n\n", "file_path": "README.md", "rank": 65, "score": 2.0274232705183395 }, { "content": " // >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n // [1.0653405327119334, -1.9828897227476208, 0.9346594672880666, 1.1303715025601122,\n\n // -1.9828897227476208, 0.8696284974398878]\n\n\n\n let frequency = 1_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let gain_db = 6.0; // dB\n\n let filter = make_peak(frequency, sample_rate, gain_db, None);\n\n \n\n let target_vec = vec![1.0653405327119334, -1.9828897227476208, 0.9346594672880666,\n\n 1.1303715025601122, -1.9828897227476208, 0.8696284974398878];\n\n \n\n let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();\n\n print_values(& target_vec, & res_coeffs);\n\n for i in 0..target_vec.len() {\n\n assert_eq!(*(res_coeffs[i]), target_vec[i]);\n\n }\n\n\n\n // assert_eq!(true, false);\n\n }\n", "file_path": "src/butterworth_filter.rs", "rank": 66, "score": 2.0028889415397626 }, { "content": "\n\n #[test]\n\n fn test_make_lowshelf() {\n\n // >>> filter = make_lowshelf(1000, 48000, 6)\n\n // >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE\n\n // [3.0409336710888786, -5.608870992220748, 2.602157875636628, 3.139954022810743,\n\n // -5.591841778072785, 2.5201667380627257]\n\n\n\n let frequency = 1_000.0; // Hz\n\n let sample_rate = 48_000; // Samples\n\n let gain_db = 6.0; // dB\n\n let filter = make_lowshelf(frequency, sample_rate, gain_db, None);\n\n\n\n let target_vec = vec![3.0409336710888786, -5.608870992220748, 2.602157875636628,\n\n 3.139954022810743, -5.591841778072785, 2.5201667380627257];\n\n\n\n let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();\n\n print_values(& target_vec, & res_coeffs);\n\n for i in 0..target_vec.len() {\n\n assert_eq!(*(res_coeffs[i]), target_vec[i]);\n", "file_path": "src/butterworth_filter.rs", "rank": 67, "score": 1.9764799506096642 }, { "content": " b1 = (2.0 * (_k_sqr - 1.0)) / (1.0 + ((1.0 / q) * k) + _k_sqr);\n\n b2 = (1.0 - ((v0 / q) * k) + _k_sqr) / (1.0 + ((1.0 / q) * k) + _k_sqr);\n\n a1 = b1;\n\n a2 = (1.0 - ((1.0 / q) * k) + _k_sqr) / (1.0 + ((1.0 / q) * k) + _k_sqr);\n\n \n\n //***********\n\n // CUT\n\n //***********\n\n } else { \n\n b0 = (1.0 + ((1.0 / q) * k) + _k_sqr) / (1.0 + ((v0 / q) * k) + _k_sqr);\n\n b1 = (2.0 * (_k_sqr - 1.0)) / (1.0 + ((v0 / q) * k) + _k_sqr);\n\n b2 = (1.0 - ((1.0 / q) * k) + _k_sqr) / (1.0 + ((v0 / q) * k) + _k_sqr);\n\n a1 = b1;\n\n a2 = (1.0 - ((v0 / q) * k) + _k_sqr) / (1.0 + ((v0 / q) * k) + _k_sqr);\n\n }\n\n\n\n let filter_order = 2;\n\n let mut filter = IIRFilter::new(filter_order);\n\n // Note: The BiQuad filter fill's in the a0 with i.0 automatically.\n\n let _ = filter.set_coefficients(& [a1, a2], & [b0, b1, b2]);\n\n \n\n filter\n\n}\n\n\n", "file_path": "src/butterworth_filter.rs", "rank": 68, "score": 1.9735473317844199 }, { "content": "/// -low-shelf\n\n/// -high-shelf \n\n/// -notch\n\n/// -10 band equalizer\n\n/// \n\n/// License: MIT Open Source License, like the original license from\n\n/// GitHub - TheAlgorithms / Python / audio_filters\n\n/// https://github.com/TheAlgorithms/Python/tree/master/audio_filters\n\n///\n\n/// How to run the code. \n\n/// \n\n/// To make a project for this files do:\n\n/// -Install Rust your computer (Linux, Win, Mac, Raspberry Pi).\n\n/// \n\n/// cargo new audio_filters_in_rust\n\n/// cd audio_filters_in_rust\n\n/// \n\n/// -Copy the repository files to this directory and overlap them.\n\n/// \n\n/// To compile do:\n", "file_path": "src/iir_filter.rs", "rank": 69, "score": 1.9502973675920736 }, { "content": "/// -low-shelf\n\n/// -high-shelf \n\n/// -notch\n\n/// -10 band equalizer\n\n/// \n\n/// License: MIT Open Source License, like the original license from\n\n/// GitHub - TheAlgorithms / Python / audio_filters\n\n/// https://github.com/TheAlgorithms/Python/tree/master/audio_filters\n\n///\n\n/// How to run the code. \n\n/// \n\n/// To make a project for this files do:\n\n/// -Install Rust your computer (Linux, Win, Mac, Raspberry Pi).\n\n/// \n\n/// cargo new audio_filters_in_rust\n\n/// cd audio_filters_in_rust\n\n/// \n\n/// -Copy the repository files to this directory and overlap them.\n\n/// \n\n/// To compile do:\n", "file_path": "src/butterworth_filter.rs", "rank": 70, "score": 1.9502973675920736 }, { "content": "/// -low-shelf\n\n/// -high-shelf \n\n/// -notch\n\n/// -10 band equalizer\n\n/// \n\n/// License: MIT Open Source License, like the original license from\n\n/// GitHub - TheAlgorithms / Python / audio_filters\n\n/// https://github.com/TheAlgorithms/Python/tree/master/audio_filters\n\n///\n\n/// How to run the code. \n\n/// \n\n/// To make a project for this files do:\n\n/// -Install Rust your computer (Linux, Win, Mac, Raspberry Pi).\n\n/// \n\n/// cargo new audio_filters_in_rust\n\n/// cd audio_filters_in_rust\n\n/// \n\n/// -Copy the repository files to this directory and overlap them.\n\n/// \n\n/// To compile do:\n", "file_path": "src/show_response.rs", "rank": 71, "score": 1.9502973675920736 }, { "content": "/// -low-shelf\n\n/// -high-shelf \n\n/// -notch\n\n/// -10 band equalizer\n\n/// \n\n/// License: MIT Open Source License, like the original license from\n\n/// GitHub - TheAlgorithms / Python / audio_filters\n\n/// https://github.com/TheAlgorithms/Python/tree/master/audio_filters\n\n///\n\n/// How to run the code. \n\n/// \n\n/// To make a project for this files do:\n\n/// -Install Rust your computer (Linux, Win, Mac, Raspberry Pi).\n\n/// \n\n/// cargo new audio_filters_in_rust\n\n/// cd audio_filters_in_rust\n\n/// \n\n/// -Copy the repository files to this directory and overlap them.\n\n/// \n\n/// To compile do:\n", "file_path": "src/main.rs", "rank": 72, "score": 1.9502973675920736 }, { "content": "/// cargo build --release\n\n/// \n\n/// To run do:\n\n/// cargo run --release\n\n/// \n\n/// to run the tests do:\n\n/// cargo test\n\n/// \n\n/// References:\n\n/// 1. GitHub - TheAlgorithms / Python / audio_filters\n\n/// https://github.com/TheAlgorithms/Python/tree/master/audio_filters\n\n///\n\n/// 2. WebAudio - Cookbook formulae for audio equalizer biquad filter coefficients\n\n/// https://webaudio.github.io/Audio-EQ-Cookbook/audio-eq-cookbook.html \n\n/// \n\n/// 3. Good resources on DSP – Digital Signal Programming\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_electronics#dsp--digital-signal-programming\n\n///\n\n/// 4. Biquads - EarLevel\n\n/// http://www.earlevel.com/main/2003/02/28/biquads/\n", "file_path": "src/butterworth_filter.rs", "rank": 73, "score": 1.7619565013046903 }, { "content": "/// cargo build --release\n\n/// \n\n/// To run do:\n\n/// cargo run --release\n\n/// \n\n/// to run the tests do:\n\n/// cargo test\n\n/// \n\n/// References:\n\n/// 1. GitHub - TheAlgorithms / Python / audio_filters\n\n/// https://github.com/TheAlgorithms/Python/tree/master/audio_filters\n\n///\n\n/// 2. WebAudio - Cookbook formulae for audio equalizer biquad filter coefficients\n\n/// https://webaudio.github.io/Audio-EQ-Cookbook/audio-eq-cookbook.html \n\n/// \n\n/// 3. Good resources on DSP – Digital Signal Programming\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_electronics#dsp--digital-signal-programming\n\n///\n\n/// 4. Biquads - EarLevel\n\n/// http://www.earlevel.com/main/2003/02/28/biquads/\n", "file_path": "src/main.rs", "rank": 74, "score": 1.7619565013046903 }, { "content": "/// cargo build --release\n\n/// \n\n/// To run do:\n\n/// cargo run --release\n\n/// \n\n/// to run the tests do:\n\n/// cargo test\n\n/// \n\n/// References:\n\n/// 1. GitHub - TheAlgorithms / Python / audio_filters\n\n/// https://github.com/TheAlgorithms/Python/tree/master/audio_filters\n\n///\n\n/// 2. WebAudio - Cookbook formulae for audio equalizer biquad filter coefficients\n\n/// https://webaudio.github.io/Audio-EQ-Cookbook/audio-eq-cookbook.html \n\n/// \n\n/// 3. Good resources on DSP – Digital Signal Programming\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_electronics#dsp--digital-signal-programming\n\n///\n\n/// 4. Biquads - EarLevel\n\n/// http://www.earlevel.com/main/2003/02/28/biquads/\n", "file_path": "src/iir_filter.rs", "rank": 75, "score": 1.7619565013046903 }, { "content": "/// cargo build --release\n\n/// \n\n/// To run do:\n\n/// cargo run --release\n\n/// \n\n/// to run the tests do:\n\n/// cargo test\n\n/// \n\n/// References:\n\n/// 1. GitHub - TheAlgorithms / Python / audio_filters\n\n/// https://github.com/TheAlgorithms/Python/tree/master/audio_filters\n\n///\n\n/// 2. WebAudio - Cookbook formulae for audio equalizer biquad filter coefficients\n\n/// https://webaudio.github.io/Audio-EQ-Cookbook/audio-eq-cookbook.html \n\n/// \n\n/// 3. Good resources on DSP – Digital Signal Programming\n\n/// https://github.com/joaocarvalhoopen/How_to_learn_modern_electronics#dsp--digital-signal-programming\n\n///\n\n/// 4. Biquads - EarLevel\n\n/// http://www.earlevel.com/main/2003/02/28/biquads/\n", "file_path": "src/show_response.rs", "rank": 76, "score": 1.7619565013046903 }, { "content": "## How to run the code \n\n```\n\nTo make a project for this files do:\n\n -Install Rust your computer (Linux, Win, Mac, Raspberry Pi).\n\n \n\n cargo new audio_filters_in_rust\n\n cd audio_filters_in_rust\n\n \n\n -Copy the repository files to this directory and overlap them.\n\n \n\nTo compile do:\n\n cargo build --release\n\n \n\nTo run do:\n\n cargo run --release\n\n \n\nto run the tests do:\n\n cargo test\n\n```\n\n\n\n\n\n## References:\n\n\n\n1. **GitHub - TheAlgorithms / Python / audio_filters** <br>\n\n [https://github.com/TheAlgorithms/Python/tree/master/audio_filters](https://github.com/TheAlgorithms/Python/tree/master/audio_filters)\n\n\n\n2. **WebAudio - Cookbook formulae for audio equalizer biquad filter coefficients** <br>\n\n [https://webaudio.github.io/Audio-EQ-Cookbook/audio-eq-cookbook.html](https://webaudio.github.io/Audio-EQ-Cookbook/audio-eq-cookbook.html)\n\n\n\n3. **Good resources on DSP** – Digital Signal Processing <br> \n\n [https://github.com/joaocarvalhoopen/How_to_learn_modern_electronics#dsp--digital-signal-programming](https://github.com/joaocarvalhoopen/How_to_learn_modern_electronics#dsp--digital-signal-programming)\n\n\n\n4. **Biquads - EarLevel** <br>\n\n [http://www.earlevel.com/main/2003/02/28/biquads/](http://www.earlevel.com/main/2003/02/28/biquads/)\n\n\n\n5. **Biquad C++ source code - EarLevel** <br>\n\n [https://www.earlevel.com/main/2012/11/26/biquad-c-source-code/](https://www.earlevel.com/main/2012/11/26/biquad-c-source-code/)\n\n\n\n6. **A biquad calculator v3 - EarLevel** <br>\n\n [https://www.earlevel.com/main/2021/09/02/biquad-calculator-v3/](https://www.earlevel.com/main/2021/09/02/biquad-calculator-v3/)\n\n\n\n7. **WebAudio API - Mozilla Docs** <br>\n\n [https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API](https://developer.mozilla.org/pt-BR/docs/Web/API/Web_Audio_API)\n\n \n\n8. **Audio Filters - Theory and Practice** <br>\n\n by Ethan Winer <br>\n", "file_path": "README.md", "rank": 77, "score": 0.8908783042355926 } ]
Rust
sandbox/src/tilemap.rs
aengusmcmillin/Gouda
4aed65c94ad4147372d71c08622dacf01f5eac4e
use gouda::rendering::{drawable::{TextureDrawable, QuadDrawable}, Renderer, Scene, texture::RenderableTexture}; use gouda::ecs::{ECS, Entity, GenIndex}; use std::rc::Rc; use crate::camera::Camera; use gouda::bmp::Bitmap; use gouda::mouse_capture::{MouseCaptureArea, MouseCaptureLayer, ActiveCaptureLayer}; use gouda::types::{Bounds, Direction}; use gouda::images::Image; use gouda::images::png::PNG; use crate::hearth::Hearth; const GRASS_COLOR: [f32; 3] = [0.2, 0.4, 0.3]; const HEARTH_COLOR: [f32; 3] = [0.5, 0.2, 0.2]; const BORDER_COLOR: [f32; 3] = [0.5, 0.5, 0.5]; #[derive(Debug)] pub struct Tile { pub x: i32, pub y: i32, pub occupied: bool, neighbors: [Option<Entity>; 4], color_drawable: Option<QuadDrawable>, texture_drawable: Option<TextureDrawable>, } impl Tile { pub fn create_image_tile(image: &Image, ecs: &mut ECS, x: usize, y: usize) -> Entity { Self::create_texture_tile(ecs, image, x, y) } pub fn neighbor(&self, direction: Direction) -> Option<Entity> { self.neighbors[direction as usize] } fn create_texture_tile(ecs: &mut ECS, image: &Image, x: usize, y: usize) -> Entity { let renderer = ecs.read_res::<Rc<Renderer>>(); let drawable = TextureDrawable::new(false, renderer, RenderableTexture::new(renderer, image), [-5. + x as f32, -3. + y as f32, 0.], [0.52, 0.52, 1.], [0.; 3]); let tile = Tile { color_drawable: None, texture_drawable: Some(drawable), occupied: false, x: x as i32 - 5, y: y as i32 - 3, neighbors: [None; 4], }; ecs.build_entity().add(tile).add(MouseCaptureArea::new(Bounds{x: x as i32 * 80, y: y as i32 * 80 + 160, w: 80, h: 80})).entity() } fn create_tile(ecs: &mut ECS, color: [f32; 3], x: usize, y: usize) -> Entity { let renderer = ecs.read_res::<Rc<Renderer>>(); let quad = QuadDrawable::new(false, renderer, color, [-5. + x as f32 * 1., -3. + y as f32 * 1., 0.], [0.5, 0.5, 1.], [0.; 3]); let tile = Tile { color_drawable: Some(quad), texture_drawable: None, occupied: false, x: x as i32 - 5, y: y as i32 - 3, neighbors: [None; 4], }; ecs.build_entity().add(tile).add(MouseCaptureArea::new(Bounds{x: x as i32 * 80, y: y as i32 * 80 + 160, w: 80, h: 80})).entity() } pub fn draw(&self, scene: &Scene, camera: &Camera) { if let Some(drawable) = &self.color_drawable { drawable.draw_with_projection(&scene, &camera.projection_buffer); } if let Some(drawable) = &self.texture_drawable { drawable.draw_with_projection(&scene, &camera.projection_buffer); } } } pub struct Tilemap { tiles: Vec<Vec<Entity>>, borders: Vec<Entity>, } fn set_neighbors(tile: &mut Tile, x: usize, y: usize, tiles: &Vec<Vec<Entity>>) { tile.neighbors = [ if y > 0 { Some(tiles[x][y - 1]) } else { None }, if x < (tiles.len() - 1) { Some(tiles[x + 1][y]) } else { None }, if y < (tiles[x].len() - 1) { Some(tiles[x][y + 1]) } else { None }, if x > 0 { Some(tiles[x - 1][y]) } else { None }, ] } impl Tilemap { pub fn borders(&self) -> &Vec<Entity> { return &self.borders; } pub fn create(ecs: &mut ECS) { let mut tiles: Vec<Vec<Entity>> = vec![Vec::with_capacity(9); 11]; let mut center_tile = None; let grass = PNG::from_file("bitmap/grass.png").unwrap().image(); let border = Bitmap::new("bitmap/grass2.bmp").unwrap().image(); let mut borders = vec!(); for x in 0..11 { for y in 0..9 { let tile = if x == 0 || x == 10 || y == 0 || y == 8 { let e = Tile::create_image_tile(&border, ecs, x, y); borders.push(e); e } else { Tile::create_image_tile(&grass, ecs, x, y) }; if x == 5 && y == 4 { center_tile = Some(tile.clone()); } tiles[x].push(tile); } } let mut all_tiles = vec![]; for tiles in &tiles { for tile in tiles { all_tiles.push(tile.clone()); } } for x in 0..11 { for y in 0..9 { let t = ecs.write::<Tile>(&tiles[x][y]).unwrap(); set_neighbors(t, x, y, &tiles); } } let capture_area = MouseCaptureLayer { sort_index: 0, capture_areas: all_tiles, }; ecs.build_entity().add(capture_area).add(ActiveCaptureLayer {}); let res = Tilemap { tiles, borders }; ecs.add_res(res); Hearth::create(ecs, center_tile.unwrap()); ecs.write::<Tile>(&center_tile.unwrap()).unwrap().occupied = true; } pub fn tile_at_pos(&self, x: usize, y: usize) -> Entity { self.tiles[x][y].clone() } pub fn pos_of_tile(&self, tile: Entity) -> (f32, f32) { let mut x = 0.; for column in &self.tiles { let mut y = 0.; for t in column { if tile == *t { return (x - 5., y - 3.); } y += 1.; } x += 1.; } return (0., 0.); } }
use gouda::rendering::{drawable::{TextureDrawable, QuadDrawable}, Renderer, Scene, texture::RenderableTexture}; use gouda::ecs::{ECS, Entity, GenIndex}; use std::rc::Rc; use crate::camera::Camera; use gouda::bmp::Bitmap; use gouda::mouse_capture::{MouseCaptureArea, MouseCaptureLayer, ActiveCaptureLayer}; use gouda::types::{Bounds, Direction}; use gouda::images::Image; use gouda::images::png::PNG; use crate::hearth::Hearth; const GRASS_COLOR: [f32; 3] = [0.2, 0.4, 0.3]; const HEARTH_COLOR: [f32; 3] = [0.5, 0.2, 0.2]; const BORDER_COLOR: [f32; 3] = [0.5, 0.5, 0.5]; #[derive(Debug)] pub struct Tile { pub x: i32, pub y: i32, pub occupied: bool, neighbors: [Option<Entity>; 4], color_drawable: Option<QuadDrawable>, texture_drawable: Option<TextureDrawable>, } impl Tile { pub fn create_image_tile(image: &Image, ecs: &mut ECS, x: usize, y: usize) -> Entity { Self::create_texture_tile(ecs, image, x, y) } pub fn neighbor(&self, direction: Direction) -> Option<Entity> { self.neighbors[direction as usize] } fn create_texture_tile(ecs: &mut ECS, image: &Image, x: usize, y: usize) -> Entity { let renderer = ecs.read_res::<Rc<Renderer>>(); let drawable = TextureDrawable::new(false, renderer, RenderableTexture::new(renderer, image), [-5. + x as f32, -3. + y as f32, 0.], [0.52, 0.52, 1.], [0.; 3]); let tile = Tile { color_drawable: None, texture_drawable: Some(drawable),
fn create_tile(ecs: &mut ECS, color: [f32; 3], x: usize, y: usize) -> Entity { let renderer = ecs.read_res::<Rc<Renderer>>(); let quad = QuadDrawable::new(false, renderer, color, [-5. + x as f32 * 1., -3. + y as f32 * 1., 0.], [0.5, 0.5, 1.], [0.; 3]); let tile = Tile { color_drawable: Some(quad), texture_drawable: None, occupied: false, x: x as i32 - 5, y: y as i32 - 3, neighbors: [None; 4], }; ecs.build_entity().add(tile).add(MouseCaptureArea::new(Bounds{x: x as i32 * 80, y: y as i32 * 80 + 160, w: 80, h: 80})).entity() } pub fn draw(&self, scene: &Scene, camera: &Camera) { if let Some(drawable) = &self.color_drawable { drawable.draw_with_projection(&scene, &camera.projection_buffer); } if let Some(drawable) = &self.texture_drawable { drawable.draw_with_projection(&scene, &camera.projection_buffer); } } } pub struct Tilemap { tiles: Vec<Vec<Entity>>, borders: Vec<Entity>, } fn set_neighbors(tile: &mut Tile, x: usize, y: usize, tiles: &Vec<Vec<Entity>>) { tile.neighbors = [ if y > 0 { Some(tiles[x][y - 1]) } else { None }, if x < (tiles.len() - 1) { Some(tiles[x + 1][y]) } else { None }, if y < (tiles[x].len() - 1) { Some(tiles[x][y + 1]) } else { None }, if x > 0 { Some(tiles[x - 1][y]) } else { None }, ] } impl Tilemap { pub fn borders(&self) -> &Vec<Entity> { return &self.borders; } pub fn create(ecs: &mut ECS) { let mut tiles: Vec<Vec<Entity>> = vec![Vec::with_capacity(9); 11]; let mut center_tile = None; let grass = PNG::from_file("bitmap/grass.png").unwrap().image(); let border = Bitmap::new("bitmap/grass2.bmp").unwrap().image(); let mut borders = vec!(); for x in 0..11 { for y in 0..9 { let tile = if x == 0 || x == 10 || y == 0 || y == 8 { let e = Tile::create_image_tile(&border, ecs, x, y); borders.push(e); e } else { Tile::create_image_tile(&grass, ecs, x, y) }; if x == 5 && y == 4 { center_tile = Some(tile.clone()); } tiles[x].push(tile); } } let mut all_tiles = vec![]; for tiles in &tiles { for tile in tiles { all_tiles.push(tile.clone()); } } for x in 0..11 { for y in 0..9 { let t = ecs.write::<Tile>(&tiles[x][y]).unwrap(); set_neighbors(t, x, y, &tiles); } } let capture_area = MouseCaptureLayer { sort_index: 0, capture_areas: all_tiles, }; ecs.build_entity().add(capture_area).add(ActiveCaptureLayer {}); let res = Tilemap { tiles, borders }; ecs.add_res(res); Hearth::create(ecs, center_tile.unwrap()); ecs.write::<Tile>(&center_tile.unwrap()).unwrap().occupied = true; } pub fn tile_at_pos(&self, x: usize, y: usize) -> Entity { self.tiles[x][y].clone() } pub fn pos_of_tile(&self, tile: Entity) -> (f32, f32) { let mut x = 0.; for column in &self.tiles { let mut y = 0.; for t in column { if tile == *t { return (x - 5., y - 3.); } y += 1.; } x += 1.; } return (0., 0.); } }
occupied: false, x: x as i32 - 5, y: y as i32 - 3, neighbors: [None; 4], }; ecs.build_entity().add(tile).add(MouseCaptureArea::new(Bounds{x: x as i32 * 80, y: y as i32 * 80 + 160, w: 80, h: 80})).entity() }
function_block-function_prefix_line
[ { "content": "pub fn win32_process_keyboard(keyboard: &mut KeyboardInput, vkcode: i32, was_down: bool, is_down: bool) {\n\n if was_down != is_down {\n\n if vkcode == VK_UP {\n\n win32_process_keyboard_message(\n\n &mut keyboard.special_keys[SpecialKeys::UpArrow],\n\n is_down,\n\n );\n\n } else if vkcode == VK_LEFT {\n\n win32_process_keyboard_message(\n\n &mut keyboard.special_keys[SpecialKeys::LeftArrow],\n\n is_down,\n\n );\n\n } else if vkcode == VK_DOWN {\n\n win32_process_keyboard_message(\n\n &mut keyboard.special_keys[SpecialKeys::DownArrow],\n\n is_down,\n\n );\n\n } else if vkcode == VK_RIGHT {\n\n win32_process_keyboard_message(\n\n &mut keyboard.special_keys[SpecialKeys::RightArrow],\n", "file_path": "src/platform/win32/win32_input.rs", "rank": 1, "score": 222205.92440643694 }, { "content": "pub fn change_stone_text(ecs: &mut ECS) {\n\n let e = ecs.get2::<StoneText, GuiText>().first().unwrap().clone();\n\n let font = ecs.read_res::<Rc<Font>>().clone();\n\n let renderer = ecs.read_res::<Rc<Renderer>>().clone();\n\n let gold = ecs.read_res::<Supplies>().stone;\n\n ecs.write::<GuiText>(&e).unwrap().change_text(&renderer, format!(\"STONE: {}\", gold), font);\n\n}\n\n\n\npub struct UpdateResourceTextMutation {\n\n\n\n}\n\n\n\nimpl Mutation for UpdateResourceTextMutation {\n\n fn apply(&self, ecs: &mut ECS) {\n\n change_gold_text(ecs);\n\n change_wood_text(ecs);\n\n change_stone_text(ecs);\n\n }\n\n}\n\n\n", "file_path": "sandbox/src/gui.rs", "rank": 2, "score": 189950.36876607488 }, { "content": "pub fn change_gold_text(ecs: &mut ECS) {\n\n let e = ecs.get2::<GoldText, GuiText>().first().unwrap().clone();\n\n let font = ecs.read_res::<Rc<Font>>().clone();\n\n let renderer = ecs.read_res::<Rc<Renderer>>().clone();\n\n let gold = ecs.read_res::<Supplies>().gold;\n\n ecs.write::<GuiText>(&e).unwrap().change_text(&renderer, format!(\"GOLD: {}\", gold), font);\n\n}\n\n\n", "file_path": "sandbox/src/gui.rs", "rank": 3, "score": 189950.36876607488 }, { "content": "pub fn change_wood_text(ecs: &mut ECS) {\n\n let e = ecs.get2::<WoodText, GuiText>().first().unwrap().clone();\n\n let font = ecs.read_res::<Rc<Font>>().clone();\n\n let renderer = ecs.read_res::<Rc<Renderer>>().clone();\n\n let gold = ecs.read_res::<Supplies>().wood;\n\n ecs.write::<GuiText>(&e).unwrap().change_text(&renderer, format!(\"WOOD: {}\", gold), font);\n\n}\n\n\n", "file_path": "sandbox/src/gui.rs", "rank": 4, "score": 189950.36876607488 }, { "content": "pub fn i32_from_bytes(bytes: [u8; 4]) -> i32 {\n\n ((bytes[0] as i32) << 24)\n\n + ((bytes[1] as i32) << 16)\n\n + ((bytes[2] as i32) << 8)\n\n + ((bytes[3] as i32) << 0)\n\n}\n", "file_path": "src/utils.rs", "rank": 5, "score": 183126.7007148657 }, { "content": "pub fn create_transformation_matrix(translate: [f32; 3], rot: [f32; 3], scale: [f32; 3]) -> Mat4x4 {\n\n let transform_mat = Mat4x4 {\n\n data: [\n\n [1., 0., 0., translate[0]],\n\n [0., 1., 0., translate[1]],\n\n [0., 0., 1., translate[2]],\n\n [0., 0., 0., 1.],\n\n ]\n\n };\n\n\n\n let xrot = Mat4x4::x_rot_matrix(rot[0]);\n\n let yrot = Mat4x4::y_rot_matrix(rot[1]);\n\n let zrot = Mat4x4::z_rot_matrix(rot[2]);\n\n\n\n let scale_mat = Mat4x4 {\n\n data: [\n\n [scale[0], 0., 0., 0.],\n\n [0., scale[1], 0., 0.],\n\n [0., 0., scale[2], 0.],\n\n [0., 0., 0., 1.],\n\n ]\n\n };\n\n\n\n let transform_mat = zrot * yrot * xrot * transform_mat * scale_mat;\n\n return transform_mat;\n\n}\n", "file_path": "src/math.rs", "rank": 6, "score": 180871.82287601335 }, { "content": "pub fn change_stage_text(ecs: &mut ECS, text: &str) {\n\n let e = ecs.get2::<StageText, GuiText>().first().unwrap().clone();\n\n let font = ecs.read_res::<Rc<Font>>().clone();\n\n let renderer = ecs.read_res::<Rc<Renderer>>().clone();\n\n ecs.write::<GuiText>(&e).unwrap().change_text(&renderer, String::from(text), font);\n\n}\n\n\n", "file_path": "sandbox/src/gui.rs", "rank": 7, "score": 177631.2114996994 }, { "content": "pub fn osx_process_keyboard_message(new_state: &mut GameButtonState, is_down: bool) {\n\n if new_state.ended_down != is_down {\n\n new_state.ended_down = is_down;\n\n new_state.half_transition_count += 1;\n\n }\n\n}\n\n\n", "file_path": "src/platform/osx/osx_input.rs", "rank": 8, "score": 174956.6671528549 }, { "content": "pub fn win32_process_keyboard_message(new_state: &mut GameButtonState, is_down: bool) {\n\n if new_state.ended_down != is_down {\n\n new_state.ended_down = is_down;\n\n new_state.half_transition_count += 1;\n\n }\n\n}\n\n\n", "file_path": "src/platform/win32/win32_input.rs", "rank": 9, "score": 174956.66715285493 }, { "content": "pub fn create_view_matrix(pitch: f32, yaw: f32, camera_pos: [f32; 3]) -> Mat4x4 {\n\n let pitch_rotate = Mat4x4::x_rot_matrix(pitch);\n\n let yaw_rotate = Mat4x4::y_rot_matrix(yaw);\n\n let mut rotated = Mat4x4::identity() * pitch_rotate * yaw_rotate;\n\n rotated.data[0][3] = -camera_pos[0];\n\n rotated.data[1][3] = -camera_pos[1];\n\n rotated.data[2][3] = -camera_pos[2];\n\n return rotated;\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 10, "score": 172278.1465613702 }, { "content": "fn create_top_bar(ecs: &mut ECS, mouse_layer: Entity) -> Entity {\n\n const TOP_BAR_PERCENT_HEIGHT: f32 = 0.03;\n\n let top_bar = GuiComponent::create_hoverable(\n\n ecs,\n\n None,\n\n None,\n\n GuiConstraints::new(\n\n Constraint::CenterConstraint,\n\n Constraint::RelativeConstraint {size: 1.0 - TOP_BAR_PERCENT_HEIGHT},\n\n RelativeConstraint {size: 1.0},\n\n RelativeConstraint {size: TOP_BAR_PERCENT_HEIGHT},\n\n ),\n\n 0.0,\n\n Color::from_u8(0x22, 0x22, 0x22, 0xFF),\n\n Color::from_u8(0x55, 0x55, 0x55, 0xFF));\n\n\n\n let bounds = ecs.read::<GuiComponent>(&top_bar).unwrap().calculated_bounds;\n\n let font = ecs.read_res::<Rc<Font>>().clone();\n\n let gold_text = GuiText::create(\n\n ecs,\n", "file_path": "sandbox/src/gui.rs", "rank": 11, "score": 167318.9050717581 }, { "content": "fn create_bottom_panel(ecs: &mut ECS, mouse_layer: Entity) -> Entity {\n\n let mut bottom_panel_entity = GuiComponent::create(\n\n ecs,\n\n None,\n\n None,\n\n GuiConstraints::new(\n\n Constraint::CenterConstraint,\n\n Constraint::RelativeConstraint { size: 0.0 },\n\n Constraint::RelativeConstraint {size: 1.},\n\n Constraint::PixelConstraint {size: 160}\n\n ),\n\n 0.,\n\n Color::from_u8(0x22, 0x22, 0x22, 0xFF));\n\n let bottom_panel = ecs.read::<GuiComponent>(&bottom_panel_entity).unwrap();\n\n let mut buttons_box_entity = GuiComponent::create(\n\n ecs,\n\n None,\n\n Some(bottom_panel.calculated_bounds),\n\n GuiConstraints::new(\n\n Constraint::CenterConstraint,\n", "file_path": "sandbox/src/gui.rs", "rank": 12, "score": 167318.9050717581 }, { "content": "fn add_menu_button(button_id: StartMenuButtonId, text: &str, menu_layer: Entity, bounds: Bounds, y: f32, ecs: &mut ECS, menu_screen_entity: Entity) {\n\n let button = GuiComponent::create_hoverable(\n\n ecs,\n\n Some(menu_layer),\n\n Some(bounds),\n\n GuiConstraints::new(\n\n Constraint::CenterConstraint,\n\n RelativeConstraint {size: y},\n\n RelativeConstraint {size: 0.6},\n\n RelativeConstraint {size: 0.12},\n\n ),\n\n 10.,\n\n Color::from_u8(0x33, 0x33, 0x33, 0xAA),\n\n Color::from_u8(0x88, 0x33, 0x33, 0xAA),\n\n );\n\n ecs.add_component(&button, button_id);\n\n\n\n let comp = ecs.read::<GuiComponent>(&button).unwrap();\n\n let font = ecs.read_res::<Rc<Font>>();\n\n let text = GuiText::create(\n", "file_path": "sandbox/src/start_menu.rs", "rank": 13, "score": 165355.56336017128 }, { "content": "fn add_menu_button(button_id: MainMenuButtonId, text: &str, menu_layer: Entity, bounds: Bounds, y: f32, ecs: &mut ECS, menu_screen_entity: Entity) {\n\n let button = GuiComponent::create_hoverable(\n\n ecs,\n\n Some(menu_layer),\n\n Some(bounds),\n\n GuiConstraints::new(\n\n Constraint::CenterConstraint,\n\n RelativeConstraint {size: y},\n\n RelativeConstraint {size: 0.6},\n\n RelativeConstraint {size: 0.12},\n\n ),\n\n 10.,\n\n Color::from_u8(0x33, 0x33, 0x33, 0xAA),\n\n Color::from_u8(0x88, 0x33, 0x33, 0xAA),\n\n );\n\n ecs.add_component(&button, button_id);\n\n\n\n let comp = ecs.read::<GuiComponent>(&button).unwrap();\n\n let font = ecs.read_res::<Rc<Font>>();\n\n let text = GuiText::create(\n", "file_path": "sandbox/src/main_menu.rs", "rank": 14, "score": 165355.56336017128 }, { "content": "fn draw_everything(ecs: &ECS, scene: &Scene) {\n\n let camera = ecs.read_res::<Camera>();\n\n for (tile, _) in ecs.read1::<Tile>() {\n\n tile.draw(&scene, &camera);\n\n }\n\n\n\n ecs.read_res::<Cursor>().draw(&scene, &camera);\n\n\n\n for (monster, _) in ecs.read1::<Monster>() {\n\n monster.draw(&scene, &camera);\n\n }\n\n\n\n for (player, _) in ecs.read1::<Player>() {\n\n player.draw(&scene, &camera);\n\n }\n\n\n\n for (turret, _) in ecs.read1::<Turret>() {\n\n turret.draw(&scene, &camera);\n\n }\n\n\n", "file_path": "sandbox/src/main.rs", "rank": 15, "score": 160127.0346150935 }, { "content": "pub fn create_projection_matrix(aspect: f32, fov: f32, zfar: f32, znear: f32) -> Mat4x4 {\n\n let fov = fov * f32::consts::PI/180.0;\n\n let zm = zfar - znear;\n\n let zp = zfar + znear;\n\n let xscale = (1./(fov / 2.).tan()) / aspect;\n\n let yscale = 1./(fov / 2.).tan();\n\n let zscale = -zp / zm;\n\n let ztranslate = -(2. * zfar * znear)/zm;\n\n\n\n return Mat4x4 {\n\n data: [\n\n [xscale, 0., 0., 0.],\n\n [0., yscale, 0., 0.],\n\n [0., 0., zscale, ztranslate],\n\n [0., 0., -1., 0.],\n\n ]\n\n };\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 16, "score": 157698.6439224983 }, { "content": "fn paeth(a: i32, b: i32, c: i32) -> u8 {\n\n let p = a + b - c;\n\n let pa = (p - a).abs();\n\n let pb = (p - b).abs();\n\n let pc = (p - c).abs();\n\n\n\n if pa <= pb && pa <= pc {\n\n return a as u8;\n\n } else if pb <= pc {\n\n return b as u8;\n\n } else {\n\n return c as u8;\n\n }\n\n}\n\n\n\nimpl PNG {\n\n pub fn image(&self) -> Image {\n\n let w = self.header_chunk.width as usize;\n\n let h = self.header_chunk.height as usize;\n\n\n", "file_path": "src/images/png.rs", "rank": 17, "score": 156364.89109294701 }, { "content": "fn next_day(ecs: &mut ECS) {\n\n change_stage_text(ecs, \"Day\");\n\n\n\n let game_day = ecs.write_res::<Vec<GameDay>>().remove(0);\n\n ecs.add_res(game_day);\n\n ecs.write_res::<StateTimer>().countdown_s = ecs.read_res::<GameDay>().day_length;\n\n\n\n\n\n let tilemap = ecs.read_res::<Tilemap>();\n\n let borders: Vec<(f32, f32)> = tilemap.borders().iter().map(|border| {\n\n tilemap.pos_of_tile(*border)\n\n }).collect();\n\n\n\n let waves = &ecs.read_res::<GameDay>().waves.clone();\n\n for wave in waves {\n\n let border_index = thread_rng().gen_range(0, borders.len());\n\n let border = borders.get(border_index).unwrap();\n\n WaveSpawner::create(ecs, wave.wave.clone(), border.0, border.1, 1.);\n\n }\n\n\n\n let tilemap = ecs.read_res::<Tilemap>();\n\n Tree::create(ecs, tilemap.tile_at_pos(2, 4));\n\n let tilemap = ecs.read_res::<Tilemap>();\n\n Tree::create(ecs, tilemap.tile_at_pos(8, 2));\n\n}\n\n\n", "file_path": "sandbox/src/main.rs", "rank": 18, "score": 146637.94369736 }, { "content": "fn next_night(ecs: &mut ECS) {\n\n change_stage_text(ecs, \"Night\");\n\n\n\n ecs.write_res::<StateTimer>().countdown_s = ecs.read_res::<GameDay>().night_length;\n\n}\n\n\n\npub const DAY_GAME_STATE: GameStateId = 10;\n\n\n\npub struct DayGameState {\n\n}\n\n\n\nimpl GameState for DayGameState {\n\n fn on_state_start(&self, ecs: &mut ECS) {\n\n register_core_systems(ecs);\n\n ecs.add_system(Box::new(mouse_click_system));\n\n ecs.add_system(Box::new(mouse_cursor_system));\n\n ecs.add_system(Box::new(day_state_countdown));\n\n\n\n if ecs.read_res::<StateTimer>().countdown_s <= 0. {\n\n next_day(ecs);\n", "file_path": "sandbox/src/main.rs", "rank": 19, "score": 146637.94369736 }, { "content": "fn register_core_systems(ecs: &mut ECS) {\n\n ecs.add_system(Box::new(player_move_system));\n\n ecs.add_system(Box::new(mouse_capture_system));\n\n ecs.add_system(Box::new(game_gui_system));\n\n}\n\n\n", "file_path": "sandbox/src/main.rs", "rank": 20, "score": 144126.83922696934 }, { "content": "pub fn player_move_system(ecs: &ECS) -> Mutations {\n\n let input = ecs.read_res::<GameInput>();\n\n let mut mutations: Mutations = Vec::new();\n\n for (p, ent) in ecs.read1::<Player>() {\n\n let tile = ecs.read::<Tile>(&p.current_tile);\n\n if input.keyboard.letter_pressed(LetterKeys::A) {\n\n mutations.push(Box::new(MoveMutation {entity: ent, dx: -1, dy: 0}))\n\n } else if input.keyboard.letter_pressed(LetterKeys::D) {\n\n mutations.push(Box::new(MoveMutation {entity: ent, dx: 1, dy: 0}))\n\n }\n\n if input.keyboard.letter_pressed(LetterKeys::W) {\n\n mutations.push(Box::new(MoveMutation {entity: ent, dx: 0, dy: 1}))\n\n } else if input.keyboard.letter_pressed(LetterKeys::S) {\n\n mutations.push(Box::new(MoveMutation {entity: ent, dx: 0, dy: -1}))\n\n }\n\n }\n\n return mutations;\n\n}\n", "file_path": "sandbox/src/player.rs", "rank": 21, "score": 138116.1915894177 }, { "content": "pub fn game_gui_system(ecs: &ECS) -> Mutations {\n\n let mut mutations: Mutations = vec![];\n\n mutations.push(Box::new(UpdateResourceTextMutation {}));\n\n for (capture, gui, entity) in ecs.read2::<MouseCaptureArea, GuiComponent>() {\n\n mutations.push(Box::new(GuiHoveredMutation{entity, hovered: capture.is_hovered}));\n\n\n\n if capture.clicked_buttons[0] {\n\n println!(\"Clicked a button\");\n\n }\n\n }\n\n return mutations;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct StageText {}\n\n\n\n#[derive(Debug)]\n\npub struct GoldText {}\n\n\n\n#[derive(Debug)]\n", "file_path": "sandbox/src/gui.rs", "rank": 22, "score": 138116.1915894177 }, { "content": "pub fn arrow_move_system(ecs: &ECS) -> Mutations {\n\n let mut mutations: Mutations = vec![];\n\n let dt = ecs.read_res::<GameInput>().seconds_to_advance_over_update;\n\n for (arrow, entity) in ecs.read1::<Arrow>() {\n\n let target = ecs.read::<Monster>(&arrow.target);\n\n if let Some(monster) = target {\n\n let v = (monster.x - arrow.x, monster.y - arrow.y);\n\n let dist = (v.0 * v.0 + v.1 * v.1).sqrt();\n\n if dist < 0.5 {\n\n mutations.push(Box::new(ArrowCollisionMutation {\n\n arrow: entity,\n\n }));\n\n } else {\n\n mutations.push(Box::new(MoveArrowTowardsMutation {\n\n arrow: entity,\n\n dx: v.0 * dt / dist * arrow.speed,\n\n dy: v.1 * dt / dist * arrow.speed,\n\n }))\n\n }\n\n } else {\n", "file_path": "sandbox/src/building.rs", "rank": 23, "score": 138116.1915894177 }, { "content": "pub fn wave_spawner_system(ecs: &ECS) -> Mutations {\n\n let mut mutations: Mutations = Vec::new();\n\n let input = ecs.read_res::<GameInput>();\n\n\n\n for (spawner, entity) in ecs.read1::<WaveSpawner>() {\n\n mutations.push(Box::new(ProcessSpawnerMutation {entity, dt: input.seconds_to_advance_over_update, x: spawner.x, y: spawner.y}));\n\n }\n\n\n\n return mutations;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct WaveSpawner {\n\n wave_spec: WaveSpec,\n\n num_monsters: usize,\n\n current_monster_index: usize,\n\n spawn_max_cd: f32,\n\n spawn_current_cd: f32,\n\n x: f32,\n\n y: f32,\n", "file_path": "sandbox/src/spawners.rs", "rank": 24, "score": 138116.1915894177 }, { "content": "pub fn mouse_capture_system(ecs: &ECS) -> Mutations {\n\n let mut layers = ecs.read2::<MouseCaptureLayer, ActiveCaptureLayer>();\n\n layers.sort_by(|a, b| b.0.sort_index.cmp(&a.0.sort_index));\n\n\n\n let input = ecs.read_res::<GameInput>();\n\n let mouse_x = input.mouse.x;\n\n let mouse_y = 900 - input.mouse.y;\n\n let down_buttons = [\n\n input.mouse.buttons[0].ended_down,\n\n input.mouse.buttons[1].ended_down,\n\n input.mouse.buttons[2].ended_down,\n\n input.mouse.buttons[3].ended_down,\n\n input.mouse.buttons[4].ended_down,\n\n ];\n\n let clicked_buttons = [\n\n input.mouse.buttons[0].ended_down && input.mouse.buttons[0].half_transition_count > 0,\n\n input.mouse.buttons[1].ended_down && input.mouse.buttons[1].half_transition_count > 0,\n\n input.mouse.buttons[2].ended_down && input.mouse.buttons[2].half_transition_count > 0,\n\n input.mouse.buttons[3].ended_down && input.mouse.buttons[3].half_transition_count > 0,\n\n input.mouse.buttons[4].ended_down && input.mouse.buttons[4].half_transition_count > 0,\n", "file_path": "src/mouse_capture.rs", "rank": 25, "score": 138116.1915894177 }, { "content": "pub fn monster_damage_system(ecs: &ECS) -> Mutations {\n\n let mut mutations: Mutations = vec![];\n\n for (monster, damage, entity) in ecs.read2::<Monster, DamageDealt>() {\n\n mutations.push(Box::new(MonsterDamageMutation {monster: entity, damage: damage.damage}));\n\n }\n\n return mutations;\n\n}", "file_path": "sandbox/src/monster.rs", "rank": 26, "score": 138116.1915894177 }, { "content": "pub fn turret_attack_system(ecs: &ECS) -> Mutations {\n\n let mut mutations: Mutations = vec![];\n\n let dt = ecs.read_res::<GameInput>().seconds_to_advance_over_update;\n\n\n\n let mut monster_positions: Vec<(Entity, f32, f32)> = vec![];\n\n for (monster, entity) in ecs.read1::<Monster>() {\n\n monster_positions.push((entity, monster.x, monster.y));\n\n }\n\n\n\n let input = ecs.read_res::<GameInput>();\n\n for (turret, e) in ecs.read1::<Turret>() {\n\n let mut closest: Option<(Entity, f32)> = None;\n\n for (monster, x, y) in &monster_positions {\n\n let (x, y) = (turret.x - x, turret.y - y);\n\n let dist = (x * x + y * y).sqrt();\n\n\n\n if let Some((_, closest_dist)) = closest {\n\n if dist < closest_dist {\n\n closest = Some((monster.clone(), dist));\n\n }\n", "file_path": "sandbox/src/building.rs", "rank": 27, "score": 138116.1915894177 }, { "content": "pub fn monster_move_system(ecs: &ECS) -> Mutations {\n\n let mut mutations: Mutations = Vec::new();\n\n for monster in ecs.get1::<Monster>() {\n\n mutations.push(Box::new(MonsterMoveMutation {monster}));\n\n }\n\n return mutations;\n\n}\n\n\n", "file_path": "sandbox/src/monster.rs", "rank": 28, "score": 138116.1915894177 }, { "content": "pub fn spawner_blink_system(ecs: &ECS) -> Mutations {\n\n let mut mutations = vec![];\n\n return mutations;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct WaveSpec {\n\n pub monsters: Vec<MonsterSpec>,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct MonsterSpec {\n\n pub monster_type: MonsterType,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum MonsterType {\n\n Wolf,\n\n}\n\n\n", "file_path": "sandbox/src/spawners.rs", "rank": 29, "score": 138116.1915894177 }, { "content": "pub fn menu_mouse_system(ecs: &ECS) -> Mutations {\n\n let menu = ecs.read_res::<MenuScreen>();\n\n let mut mutations: Mutations = vec![];\n\n for (capture_area, button, entity) in ecs.read2::<MouseCaptureArea, MainMenuButtonId>() {\n\n if capture_area.clicked_buttons[0] {\n\n mutations.push(Box::new(MenuClickMutation {\n\n buttonid: *button,\n\n }));\n\n }\n\n }\n\n return mutations;\n\n}\n\n\n", "file_path": "sandbox/src/main_menu.rs", "rank": 30, "score": 135842.440884383 }, { "content": "pub fn start_menu_mouse_system(ecs: &ECS) -> Mutations {\n\n let mut mutations: Mutations = vec![];\n\n for (capture_area, button, entity) in ecs.read2::<MouseCaptureArea, StartMenuButtonId>() {\n\n if capture_area.clicked_buttons[0] {\n\n mutations.push(Box::new(MenuClickMutation {\n\n buttonid: *button,\n\n }));\n\n }\n\n }\n\n return mutations;\n\n}\n\n\n\n\n", "file_path": "sandbox/src/start_menu.rs", "rank": 31, "score": 133672.41213409457 }, { "content": "pub fn u32_from_bytes(bytes: [u8; 4]) -> u32 {\n\n ((bytes[0] as u32) << 24)\n\n + ((bytes[1] as u32) << 16)\n\n + ((bytes[2] as u32) << 8)\n\n + ((bytes[3] as u32) << 0)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 32, "score": 120190.87168979071 }, { "content": "fn win32_process_xinput_stick_values(value: f32, dead_zone_threshold: f32) -> f32 {\n\n if value < -dead_zone_threshold {\n\n return (value + dead_zone_threshold) / (32768f32 - dead_zone_threshold);\n\n } else if value > dead_zone_threshold {\n\n return (value - dead_zone_threshold) / (32767f32 - dead_zone_threshold);\n\n } else {\n\n return 0f32;\n\n }\n\n}\n", "file_path": "src/platform/win32/win32_input.rs", "rank": 33, "score": 115425.66916529271 }, { "content": "fn parse_chunk(c: &Vec<u8>, i: usize) -> Chunk {\n\n let length = u32_from_bytes([c[i], c[i + 1], c[i + 2], c[i + 3]]);\n\n let mut chunk_type = String::from(\"\");\n\n chunk_type.push(c[i + 4].into());\n\n chunk_type.push(c[i + 5].into());\n\n chunk_type.push(c[i + 6].into());\n\n chunk_type.push(c[i + 7].into());\n\n\n\n let mut bytes = vec![];\n\n for j in 0..length {\n\n let byte = c[i + 8 + j as usize];\n\n bytes.push(byte);\n\n }\n\n\n\n return Chunk {\n\n length,\n\n chunk_type,\n\n chunk_data: bytes,\n\n crc: 0,\n\n }\n\n}", "file_path": "src/images/png.rs", "rank": 34, "score": 110122.09033924865 }, { "content": "pub fn osx_process_key(\n\n keyboard: &mut KeyboardInput,\n\n u16_char: u16,\n\n key_down: bool,\n\n _alt_down: bool,\n\n _ctrl_down: bool,\n\n cmd_down: bool,\n\n) {\n\n match u16_char {\n\n 0xF700 => {\n\n osx_process_keyboard_message(&mut keyboard.special_keys[SpecialKeys::UpArrow], key_down)\n\n }\n\n 0xF701 => osx_process_keyboard_message(\n\n &mut keyboard.special_keys[SpecialKeys::DownArrow],\n\n key_down,\n\n ),\n\n 0xF702 => osx_process_keyboard_message(\n\n &mut keyboard.special_keys[SpecialKeys::LeftArrow],\n\n key_down,\n\n ),\n", "file_path": "src/platform/osx/osx_input.rs", "rank": 35, "score": 108179.3197929227 }, { "content": "type XInputGetStateFunc = unsafe extern \"system\" fn(DWORD, *mut XINPUT_STATE) -> DWORD;\n", "file_path": "src/platform/win32/window.rs", "rank": 36, "score": 106729.24889623598 }, { "content": "type XInputSetStateFunc = unsafe extern \"system\" fn(DWORD, *mut XINPUT_VIBRATION) -> DWORD;\n\nstatic mut OPT_XINPUT_GET_STATE_FUNC: Option<XInputGetStateFunc> = None;\n\nstatic mut OPT_XINPUT_SET_STATE_FUNC: Option<XInputSetStateFunc> = None;\n\n\n", "file_path": "src/platform/win32/window.rs", "rank": 37, "score": 106729.24889623598 }, { "content": "pub fn generate_days() -> Vec<GameDay> {\n\n return vec![\n\n GameDay {day_length: 5., night_length: 20., waves: vec![SpawnerSpec {wave: WaveSpec {monsters: vec![MonsterSpec {monster_type: Wolf}; 15]}}]},\n\n GameDay {day_length: 5., night_length: 30., waves: vec![SpawnerSpec {wave: WaveSpec {monsters: vec![MonsterSpec {monster_type: Wolf}; 20]}}]},\n\n GameDay {day_length: 5., night_length: 40., waves: vec![SpawnerSpec {wave: WaveSpec {monsters: vec![MonsterSpec {monster_type: Wolf}; 25]}}]},\n\n GameDay {day_length: 5., night_length: 50., waves: vec![SpawnerSpec {wave: WaveSpec {monsters: vec![MonsterSpec {monster_type: Wolf}; 30]}}]},\n\n GameDay {day_length: 5., night_length: 60., waves: vec![SpawnerSpec {wave: WaveSpec {monsters: vec![MonsterSpec {monster_type: Wolf}; 35]}}]},\n\n GameDay {day_length: 5., night_length: 70., waves: vec![SpawnerSpec {wave: WaveSpec {monsters: vec![MonsterSpec {monster_type: Wolf}; 40]}}]},\n\n ]\n\n}\n", "file_path": "sandbox/src/spawners.rs", "rank": 38, "score": 102282.5814553553 }, { "content": "pub fn u16_from_bytes(bytes: [u8; 2]) -> u16 {\n\n ((bytes[0] as u16) << 8) + ((bytes[1] as u16) << 0)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 39, "score": 99504.79260700247 }, { "content": "pub fn i16_from_bytes(bytes: [u8; 2]) -> i16 {\n\n ((bytes[0] as i16) << 8) + ((bytes[1] as i16) << 0)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 40, "score": 99504.79260700247 }, { "content": "struct MissingComponentError;\n\n\n\nimpl From<&str> for MissingComponentError {\n\n fn from(_: &str) -> MissingComponentError {\n\n return MissingComponentError;\n\n }\n\n}\n\n\n\nimpl ECS {\n\n pub fn add_system(&mut self, system: Box<System>) {\n\n self.systems.push(system);\n\n }\n\n\n\n pub fn clear_systems(&mut self) {\n\n self.systems.clear();\n\n }\n\n\n\n pub fn register_component_type<T: 'static + Debug>(&mut self) {\n\n let e: EntityMap<T> = EntityMap::new();\n\n self.components.insert(e);\n", "file_path": "src/ecs.rs", "rank": 41, "score": 98813.50985845883 }, { "content": "pub trait Mutation {\n\n fn apply(&self, ecs: &mut ECS);\n\n}\n\n\n\npub type Mutations = Vec<Box<dyn Mutation>>;\n\npub type System = dyn Fn(&ECS) -> Mutations;\n\npub type GameStateId = u32;\n\n\n\n\n\npub struct ECS {\n\n entity_allocator: GenIndexAllocator,\n\n components: AnyMap,\n\n resources: AnyMap,\n\n systems: Vec<Box<System>>,\n\n queued_events: AnyMap,\n\n processing_events: AnyMap,\n\n}\n\n\n\n#[macro_use]\n\nmacro_rules! impl_read {\n", "file_path": "src/ecs.rs", "rank": 42, "score": 97194.20469626153 }, { "content": "#[derive(Debug)]\n\nstruct ArrayEntry<T> {\n\n value: T,\n\n generation: u64,\n\n}\n\n\n\npub struct GenIndexArray<T>(Vec<Option<ArrayEntry<T>>>);\n\n\n\nimpl<T> GenIndexArray<T> {\n\n pub fn new() -> GenIndexArray<T> {\n\n GenIndexArray(Vec::new())\n\n }\n\n\n\n pub fn set(&mut self, index: GenIndex, value: T) {\n\n while index.index >= self.0.len() {\n\n self.0.push(None);\n\n }\n\n let entry = self.0.get_mut(index.index).unwrap();\n\n *entry = Some(ArrayEntry {value, generation: index.generation})\n\n }\n\n\n", "file_path": "src/ecs.rs", "rank": 43, "score": 96727.8481813819 }, { "content": "#[derive(Eq, PartialEq, Clone)]\n\nstruct GenIndexAllocationEntry {\n\n is_free: bool,\n\n generation: u64,\n\n}\n\n\n\n#[derive(Eq, PartialEq, Clone)]\n\npub struct GenIndexAllocator {\n\n entries: Vec<GenIndexAllocationEntry>,\n\n free: Vec<usize>,\n\n}\n\n\n\nimpl GenIndexAllocator {\n\n pub fn new() -> GenIndexAllocator {\n\n GenIndexAllocator {\n\n entries: Vec::new(),\n\n free: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn allocate(&mut self) -> GenIndex {\n", "file_path": "src/ecs.rs", "rank": 44, "score": 96697.53296420666 }, { "content": "pub trait GameWindowImpl {\n\n fn capture_events(&mut self) -> Vec<WindowEvent>;\n\n fn capture_input(&mut self) -> GameInput;\n\n fn get_width(&self) -> usize;\n\n fn get_height(&self) -> usize;\n\n}\n\n\n\npub struct WindowProps {\n\n pub width: f64,\n\n pub height: f64,\n\n pub title: String,\n\n pub target_ms_per_frame: f32,\n\n}\n\n\n\npub enum WindowEvent {\n\n ResizeEvent {width: f32, height: f32},\n\n CloseEvent,\n\n}", "file_path": "src/window.rs", "rank": 45, "score": 93648.94248241308 }, { "content": "fn mouse_cursor_system(ecs: &ECS) -> Mutations {\n\n let mut mutations: Mutations = vec![];\n\n let mut any_hovered = false;\n\n for (tile, mouse_capture, e) in ecs.read2::<Tile, MouseCaptureArea>() {\n\n if mouse_capture.is_hovered {\n\n mutations.push(Box::new(CursorSetPositionMutation {tile: e}));\n\n any_hovered = true;\n\n }\n\n }\n\n mutations.push(Box::new(CursorVisibilityMutation {visible: any_hovered}));\n\n return mutations;\n\n}\n\n\n", "file_path": "sandbox/src/main.rs", "rank": 46, "score": 90088.44878930994 }, { "content": "fn mouse_click_system(ecs: &ECS) -> Mutations {\n\n let mut mutations: Mutations = Vec::new();\n\n for (tile, mouse_capture, tile_e) in ecs.read2::<Tile, MouseCaptureArea>() {\n\n if mouse_capture.clicked_buttons[0] {\n\n mutations.push(Box::new(TurretDeselectMutation{}));\n\n if !tile.occupied {\n\n println!(\"Unoccupied\");\n\n mutations.push(Box::new(CreateTurretMutation{tile_e}));\n\n } else {\n\n println!(\"Occupied\");\n\n for (turret, e) in ecs.read1::<Turret>() {\n\n if turret.x == tile.x as f32 && turret.y == tile.y as f32 && !turret.selected {\n\n println!(\"Mutating\");\n\n mutations.push(Box::new(TurretSelectMutation{turret_e: e}));\n\n }\n\n }\n\n\n\n for (tree, e) in ecs.read1::<Tree>() {\n\n if tree.x == tile.x as f32 && tree.y == tile.y as f32 {\n\n mutations.push(Box::new(TreeHarvestMutation {tree: e}))\n\n }\n\n }\n\n }\n\n }\n\n }\n\n return mutations;\n\n}\n\n\n", "file_path": "sandbox/src/main.rs", "rank": 47, "score": 90088.44878930994 }, { "content": "fn day_state_countdown(ecs: &ECS) -> Mutations {\n\n let input = ecs.read_res::<GameInput>();\n\n return vec![Box::new(StateCountdownMutation {dt: input.seconds_to_advance_over_update})];\n\n}\n\n\n", "file_path": "sandbox/src/main.rs", "rank": 48, "score": 90088.44878930994 }, { "content": "fn update_buffer<T>(buffer: &Buffer, mut data: Vec<T>) {\n\n unsafe {\n\n std::ptr::copy(data.as_mut_ptr(), mem::transmute(buffer.contents()), data.len());\n\n };\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct IndexBuffer {\n\n pub data: Buffer,\n\n}\n\n\n\nimpl IndexBuffer {\n\n pub fn new(renderer: &Renderer, indices: Vec<i16>) -> IndexBuffer {\n\n return IndexBuffer {\n\n data: create_buffer(renderer, indices),\n\n };\n\n }\n\n\n\n pub fn bind(&self, scene: &Scene) {\n\n\n", "file_path": "src/platform/metal/buffers.rs", "rank": 49, "score": 88918.50287650057 }, { "content": "fn create_buffer<T>(renderer: &Renderer, data: Vec<T>) -> Buffer {\n\n let buffer = renderer.device.new_buffer_with_data(\n\n unsafe { mem::transmute(data.as_ptr()) },\n\n (data.len() * mem::size_of::<T>()) as u64,\n\n MTLResourceOptions::CPUCacheModeDefaultCache,\n\n );\n\n return buffer;\n\n}\n\n\n", "file_path": "src/platform/metal/buffers.rs", "rank": 50, "score": 81195.97706171847 }, { "content": "fn prepare_render_pass_descriptor(descriptor: &RenderPassDescriptorRef, texture: &TextureRef) {\n\n let color_attachment = descriptor.color_attachments().object_at(0).unwrap();\n\n\n\n color_attachment.set_texture(Some(texture));\n\n color_attachment.set_load_action(MTLLoadAction::Clear);\n\n color_attachment.set_clear_color(MTLClearColor::new(0.2, 0.2, 0.2, 1.0));\n\n color_attachment.set_store_action(MTLStoreAction::Store);\n\n}\n\n\n\npub struct Renderer {\n\n device: Device,\n\n layer: CoreAnimationLayer,\n\n command_queue: CommandQueue,\n\n width: usize,\n\n height: usize,\n\n drawables: Vec<Box<Drawable>>,\n\n}\n\n\n\nimpl Renderer {\n\n pub fn new(window: &mut OsxWindow) -> Self {\n", "file_path": "src/platform/metal/mod.rs", "rank": 51, "score": 80081.13271193411 }, { "content": "use crate::rendering::shader::Shader;\n\nuse crate::rendering::buffers::{VertexBuffer, IndexBuffer, VertexConstantBuffer, FragmentConstantBuffer};\n\nuse crate::rendering::{Scene, Renderer};\n\nuse crate::input::{GameInput, LetterKeys};\n\nuse crate::math::{create_transformation_matrix, create_projection_matrix, create_view_matrix, Mat4x4};\n\nuse crate::rendering::texture::RenderableTexture;\n\nuse crate::ecs::Entity;\n\nuse crate::{RenderLayer, RenderOrder};\n\n\n\n#[derive(Debug)]\n\npub struct TextureDrawable {\n\n pub vertex_buffer: VertexBuffer<[f32; 6]>,\n\n pub index_buffer: IndexBuffer,\n\n pub transform_buffer: VertexConstantBuffer<f32>,\n\n pub shader: Shader,\n\n pub identity_buffer: VertexConstantBuffer<f32>,\n\n pub texture: RenderableTexture,\n\n pub position: [f32; 3],\n\n pub scale: [f32; 3],\n\n pub rotation: [f32; 3],\n", "file_path": "src/rendering/drawable.rs", "rank": 52, "score": 76129.55903535528 }, { "content": "\n\n scene.draw_indexed(6, &self.index_buffer);\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct QuadDrawable {\n\n pub vertex_buffer: VertexBuffer<[f32; 4]>,\n\n pub index_buffer: IndexBuffer,\n\n pub transform_buffer: VertexConstantBuffer<f32>,\n\n pub shader: Shader,\n\n pub color_buffer: FragmentConstantBuffer<f32>,\n\n pub identity_buffer: VertexConstantBuffer<f32>,\n\n pub position: [f32; 3],\n\n pub scale: [f32; 3],\n\n pub rotation: [f32; 3],\n\n}\n\n\n\nimpl QuadDrawable {\n\n pub fn new(is_gui: bool, renderer: &Renderer, color: [f32; 3], position: [f32; 3], scale: [f32; 3], rotation: [f32; 3]) -> Self {\n", "file_path": "src/rendering/drawable.rs", "rank": 53, "score": 76127.50177101564 }, { "content": " self.update_transform(renderer);\n\n }\n\n\n\n pub fn set_rotation(&mut self, renderer: &Renderer, rotation: [f32; 3]) {\n\n self.rotation = rotation;\n\n self.update_transform(renderer);\n\n }\n\n\n\n fn update_transform(&self, renderer: &Renderer) {\n\n let transform_mat = create_transformation_matrix(self.position, self.rotation, self.scale);\n\n self.transform_buffer.update_data(renderer, transform_mat.to_vec());\n\n }\n\n\n\n pub fn draw_with_projection(&self, scene: &Scene, camera_projection: &VertexConstantBuffer<f32>) {\n\n camera_projection.bind_to_offset(scene, 1);\n\n self.draw_impl(scene);\n\n }\n\n\n\n pub fn draw(&self, scene: &Scene) {\n\n self.identity_buffer.bind(scene);\n", "file_path": "src/rendering/drawable.rs", "rank": 54, "score": 76125.58058595573 }, { "content": "}\n\n\n\nimpl TextureDrawable {\n\n pub fn new(is_gui: bool, renderer: &Renderer, texture: RenderableTexture, position: [f32; 3], scale: [f32; 3], rotation: [f32; 3]) -> Self {\n\n let vb = VertexBuffer::new(\n\n renderer,\n\n 0,\n\n vec![\n\n [-1., -1., 0., 1., 0., 1.], // bottom left\n\n [1., -1., 0., 1., 1., 1.], // bottom right\n\n [1., 1., 0., 1., 1., 0.], // top right\n\n [-1., 1., 0., 1., 0., 0.], // top left\n\n ]);\n\n\n\n let ib = IndexBuffer::new(\n\n renderer,\n\n vec![\n\n 0, 3, 2,\n\n 0, 1, 2,\n\n ]);\n", "file_path": "src/rendering/drawable.rs", "rank": 55, "score": 76125.12513241221 }, { "content": " rotation,\n\n }\n\n }\n\n\n\n pub fn set_position(&mut self, renderer: &Renderer, position: [f32; 3]) {\n\n self.position = position;\n\n self.update_transform(renderer);\n\n }\n\n\n\n pub fn set_scale(&mut self, renderer: &Renderer, scale: [f32; 3]) {\n\n self.scale = scale;\n\n self.update_transform(renderer);\n\n }\n\n\n\n pub fn set_rotation(&mut self, renderer: &Renderer, rotation: [f32; 3]) {\n\n self.rotation = rotation;\n\n self.update_transform(renderer);\n\n }\n\n\n\n fn update_transform(&self, renderer: &Renderer) {\n", "file_path": "src/rendering/drawable.rs", "rank": 56, "score": 76121.64258647189 }, { "content": " let transform_mat = create_transformation_matrix(self.position, self.rotation, self.scale);\n\n self.transform_buffer.update_data(renderer, transform_mat.to_vec());\n\n }\n\n\n\n pub fn draw_with_projection(&self, scene: &Scene, camera_projection: &VertexConstantBuffer<f32>) {\n\n camera_projection.bind_to_offset(scene, 1);\n\n self.draw_impl(scene);\n\n }\n\n\n\n pub fn draw(&self, scene: &Scene) {\n\n self.identity_buffer.bind(scene);\n\n self.draw_impl(scene);\n\n }\n\n\n\n fn draw_impl(&self, scene: &Scene) {\n\n self.shader.bind(scene);\n\n self.vertex_buffer.bind(scene);\n\n self.transform_buffer.bind(scene);\n\n self.texture.bind(scene);\n\n self.index_buffer.bind(scene);\n", "file_path": "src/rendering/drawable.rs", "rank": 57, "score": 76121.47548278286 }, { "content": " shader,\n\n identity_buffer,\n\n position,\n\n scale,\n\n rotation\n\n }\n\n }\n\n\n\n pub fn translate(&self, renderer: &Renderer, position: [f32; 3], scale: [f32; 3]) {\n\n let transform_mat = create_transformation_matrix(position, [0., 0., 0.], scale);\n\n self.transform_buffer.update_data(renderer, transform_mat.to_vec());\n\n }\n\n\n\n pub fn set_position(&mut self, renderer: &Renderer, position: [f32; 3]) {\n\n self.position = position;\n\n self.update_transform(renderer);\n\n }\n\n\n\n pub fn set_scale(&mut self, renderer: &Renderer, scale: [f32; 3]) {\n\n self.scale = scale;\n", "file_path": "src/rendering/drawable.rs", "rank": 58, "score": 76120.93923760831 }, { "content": " self.draw_impl(scene);\n\n }\n\n\n\n fn draw_impl(&self, scene: &Scene) {\n\n self.shader.bind(scene);\n\n self.vertex_buffer.bind(scene);\n\n self.transform_buffer.bind(scene);\n\n self.index_buffer.bind(scene);\n\n self.color_buffer.bind(&scene);\n\n\n\n scene.draw_indexed(6, &self.index_buffer);\n\n }\n\n}\n", "file_path": "src/rendering/drawable.rs", "rank": 59, "score": 76114.35947302663 }, { "content": " let vb = VertexBuffer::new(\n\n renderer,\n\n 0,\n\n vec![\n\n [-1., -1., 0., 1.], // bottom left\n\n [1., -1., 0., 1.], // bottom right\n\n [1., 1., 0., 1.], // top right\n\n [-1., 1., 0., 1.], // top left\n\n ]);\n\n\n\n let ib = IndexBuffer::new(\n\n renderer,\n\n vec![\n\n 0, 3, 2,\n\n 0, 1, 2,\n\n ]);\n\n\n\n let shader = Shader::new(\n\n renderer,\n\n false,\n", "file_path": "src/rendering/drawable.rs", "rank": 60, "score": 76109.65894615203 }, { "content": "\n\n let shader = Shader::new(\n\n renderer,\n\n true,\n\n \"shaders/textureVertexShader.txt\",\n\n \"shaders/textureFragmentShader.txt\");\n\n\n\n let transform_mat = create_transformation_matrix(position, rotation, scale);\n\n let transform_buffer = VertexConstantBuffer::new(renderer, 0, transform_mat.raw_data().to_vec());\n\n let identity_buffer = VertexConstantBuffer::new(renderer, 1, Mat4x4::identity().to_vec());\n\n\n\n return Self {\n\n vertex_buffer: vb,\n\n index_buffer: ib,\n\n transform_buffer,\n\n shader,\n\n identity_buffer,\n\n texture,\n\n position,\n\n scale,\n", "file_path": "src/rendering/drawable.rs", "rank": 61, "score": 76109.10742727946 }, { "content": " \"shaders/quadVertexShader.txt\",\n\n \"shaders/quadFragmentShader.txt\");\n\n\n\n let transform_mat = create_transformation_matrix(position, rotation, scale);\n\n let transform_buffer = VertexConstantBuffer::new(renderer,0, transform_mat.raw_data().to_vec());\n\n\n\n let alpha = if is_gui {\n\n 0.5\n\n } else {\n\n 1.0\n\n };\n\n let data = [color[0], color[1], color[2], alpha];\n\n let color_buffer = FragmentConstantBuffer::new(renderer, 0, data.to_vec());\n\n\n\n let identity_buffer = VertexConstantBuffer::new(renderer, 1, Mat4x4::identity().to_vec());\n\n return Self {\n\n vertex_buffer: vb,\n\n index_buffer: ib,\n\n transform_buffer,\n\n color_buffer,\n", "file_path": "src/rendering/drawable.rs", "rank": 62, "score": 76109.01415652134 }, { "content": "struct Game {\n\n}\n\n\n\nimpl Game {\n\n pub fn new() -> Self { Game {} }\n\n}\n\n\n\nimpl GameLogic for Game {\n\n fn window_props(&self) -> WindowProps {\n\n WindowProps {\n\n width: 900.0,\n\n height: 900.0,\n\n title: \"Hearth of Hestia\".to_string(),\n\n target_ms_per_frame: 30.0,\n\n }\n\n }\n\n\n\n fn register_components(&self, ecs: &mut ECS) {\n\n ecs.register_component_type::<Tile>();\n\n ecs.register_component_type::<Hearth>();\n", "file_path": "sandbox/src/main.rs", "rank": 63, "score": 64874.04954698867 }, { "content": "#[derive(Debug)]\n\nstruct Pos {\n\n pub x: f32,\n\n pub y: f32,\n\n}\n\n\n\npub struct CreateTurretMutation {\n\n tile_e: Entity,\n\n}\n\n\n\nimpl Mutation for CreateTurretMutation {\n\n fn apply(&self, ecs: &mut ECS) {\n\n if ecs.write_res::<Supplies>().spend_supplies(0, 5, 0) {\n\n Turret::create(ecs, self.tile_e);\n\n\n\n ecs.write::<Tile>(&self.tile_e).unwrap().occupied = true;\n\n } else {\n\n\n\n }\n\n }\n\n}\n", "file_path": "sandbox/src/main.rs", "rank": 64, "score": 64874.04954698867 }, { "content": "fn create_window(class_name: &str, title: &str, width: u32, height: u32) -> Option<HWND> {\n\n let class_name = win32_string(class_name);\n\n let title = win32_string(title);\n\n\n\n let handle_instance = unsafe { GetModuleHandleW(null_mut()) };\n\n\n\n let window_class = WNDCLASSW {\n\n style: CS_OWNDC | CS_HREDRAW | CS_VREDRAW,\n\n lpfnWndProc: Some(win32_handle_proc),\n\n cbClsExtra: 0,\n\n cbWndExtra: 0,\n\n hInstance: handle_instance,\n\n lpszClassName: class_name.as_ptr(),\n\n hIcon: null_mut(),\n\n hCursor: null_mut(),\n\n hbrBackground: null_mut(),\n\n lpszMenuName: null_mut(),\n\n };\n\n unsafe {\n\n match RegisterClassW(&window_class) {\n", "file_path": "src/platform/win32/window.rs", "rank": 65, "score": 64346.41586174465 }, { "content": "struct MoveMutation {\n\n entity: Entity,\n\n dx: i32,\n\n dy: i32,\n\n}\n\n\n\nimpl Mutation for MoveMutation {\n\n fn apply(&self, ecs: &mut ECS) {\n\n let renderer = ecs.read_res::<Rc<Renderer>>().clone();\n\n let player = ecs.read::<Player>(&self.entity).unwrap();\n\n let tilemap = ecs.read_res::<Tilemap>();\n\n let tile = tilemap.tile_at_pos((player.x + self.dx + 5) as usize, (player.y + self.dy + 3) as usize);\n\n\n\n let player = ecs.write::<Player>(&self.entity).unwrap();\n\n player.move_pos(tile, &renderer, self.dx, self.dy);\n\n }\n\n}\n\n\n", "file_path": "sandbox/src/player.rs", "rank": 66, "score": 63738.968434994604 }, { "content": "type EntityMap<T> = GenIndexArray<T>;\n\n\n", "file_path": "src/ecs.rs", "rank": 67, "score": 63113.22642760371 }, { "content": "struct ArrowCollisionMutation {\n\n arrow: Entity,\n\n}\n\n\n\nimpl Mutation for ArrowCollisionMutation {\n\n fn apply(&self, ecs: &mut ECS) {\n\n let arrow = ecs.read::<Arrow>(&self.arrow).unwrap();\n\n let target = arrow.target.clone();\n\n let damage = arrow.damage;\n\n ecs.delete_entity(&self.arrow);\n\n\n\n ecs.add_component(&target, DamageDealt {damage});\n\n }\n\n}\n\n\n", "file_path": "sandbox/src/building.rs", "rank": 68, "score": 62671.768276851566 }, { "content": "struct CursorVisibilityMutation {\n\n visible: bool,\n\n}\n\n\n\nimpl Mutation for CursorVisibilityMutation {\n\n fn apply(&self, ecs: &mut ECS) {\n\n ecs.write_res::<Cursor>().set_visible(self.visible);\n\n }\n\n}\n\n\n", "file_path": "sandbox/src/main.rs", "rank": 69, "score": 62671.768276851566 }, { "content": "struct ArrowDestroyMutation {\n\n arrow: Entity,\n\n}\n\n\n\nimpl Mutation for ArrowDestroyMutation {\n\n fn apply(&self, ecs: &mut ECS) {\n\n ecs.delete_entity(&self.arrow);\n\n }\n\n}\n\n\n", "file_path": "sandbox/src/building.rs", "rank": 70, "score": 62671.768276851566 }, { "content": "struct MonsterDamageMutation {\n\n monster: Entity,\n\n damage: u32,\n\n}\n\n\n\nimpl Mutation for MonsterDamageMutation {\n\n fn apply(&self, ecs: &mut ECS) {\n\n ecs.remove_component::<DamageDealt>(&self.monster);\n\n let monster = ecs.write::<Monster>(&self.monster).unwrap();\n\n monster.take_damage(self.damage);\n\n if monster.is_dead() {\n\n ecs.delete_entity(&self.monster);\n\n }\n\n }\n\n}\n\n\n", "file_path": "sandbox/src/monster.rs", "rank": 71, "score": 62671.768276851566 }, { "content": "struct MoveArrowTowardsMutation {\n\n arrow: Entity,\n\n dx: f32,\n\n dy: f32,\n\n}\n\n\n\nimpl Mutation for MoveArrowTowardsMutation {\n\n fn apply(&self, ecs: &mut ECS) {\n\n let renderer = ecs.read_res::<Rc<Renderer>>().clone();\n\n let arrow = ecs.write::<Arrow>(&self.arrow).unwrap();\n\n arrow.change_pos(&renderer, self.dx, self.dy);\n\n }\n\n}\n\n\n", "file_path": "sandbox/src/building.rs", "rank": 72, "score": 61666.536661780236 }, { "content": "struct CursorSetPositionMutation {\n\n tile: Entity,\n\n}\n\n\n\nimpl Mutation for CursorSetPositionMutation {\n\n fn apply(&self, ecs: &mut ECS) {\n\n let tile = ecs.read::<Tile>(&self.tile).unwrap();\n\n let (x, y) = (tile.x, tile.y);\n\n\n\n let renderer = ecs.read_res::<Rc<Renderer>>().clone();\n\n let cursor = ecs.write_res::<Cursor>();\n\n cursor.set_pos(&renderer, [x as f32, y as f32, 0.]);\n\n }\n\n}\n\n\n", "file_path": "sandbox/src/main.rs", "rank": 73, "score": 61666.536661780236 }, { "content": "struct CocoaEventQueue {\n\n\n\n}\n\n\n\nimpl CocoaEventQueue {\n\n\n\n pub fn new() -> CocoaEventQueue {\n\n CocoaEventQueue {}\n\n }\n\n\n\n pub fn queue_resize_event(&mut self, width: f32, height: f32) {\n\n\n\n }\n\n\n\n pub fn queue_close_event(&mut self) {\n\n\n\n }\n\n}\n\n\n\npub struct OsxWindow {\n", "file_path": "src/platform/osx/osx_window.rs", "rank": 74, "score": 60718.0284321004 }, { "content": "pub trait GameLogic {\n\n fn window_props(&self) -> WindowProps;\n\n fn register_components(&self, ecs: &mut ECS);\n\n fn cleanup_components(&self, ecs: &mut ECS);\n\n fn register_events(&self, ecs: &mut ECS);\n\n fn migrate_events(&self, ecs: &mut ECS);\n\n fn game_states(&self) -> HashMap<GameStateId, Box<dyn GameState>>;\n\n fn initial_game_state(&self) -> GameStateId;\n\n fn setup(&mut self, ecs: &mut ECS);\n\n}\n\n\n\npub struct Gouda<T: GameLogic> {\n\n game_logic: T,\n\n ecs: ECS,\n\n game_states: HashMap<GameStateId, Box<dyn GameState>>,\n\n active_state: Option<GameStateId>,\n\n}\n\n\n\nimpl<T: GameLogic> Gouda<T> {\n\n pub fn new(game_logic: T) -> Self {\n", "file_path": "src/lib.rs", "rank": 75, "score": 59878.10462002457 }, { "content": "pub trait GameState {\n\n fn on_state_start(&self, ecs: &mut ECS);\n\n fn on_state_stop(&self, ecs: &mut ECS);\n\n fn render_state(&self, ecs: &ECS, scene: &Scene);\n\n fn next_state(&self, ecs: &ECS) -> Option<GameStateId>;\n\n fn active_layers(&self) -> Vec<RenderLayer>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 76, "score": 59878.10462002457 }, { "content": "#[derive(Debug)]\n\nstruct ConstantBuffer<T> {\n\n buffer: *mut ID3D11Buffer,\n\n phantom: PhantomData<T>,\n\n}\n\n\n\nimpl <T> ConstantBuffer<T> {\n\n pub fn new(renderer: &Renderer, data: Vec<T>) -> ConstantBuffer<T> {\n\n println!(\"Creating constant buffer\");\n\n unsafe {\n\n let constant_buffer_desc = D3D11_BUFFER_DESC {\n\n ByteWidth: (size_of::<T>() * data.len()) as u32,\n\n Usage: D3D11_USAGE_DYNAMIC,\n\n BindFlags: D3D11_BIND_CONSTANT_BUFFER,\n\n CPUAccessFlags: D3D11_CPU_ACCESS_WRITE,\n\n MiscFlags: 0,\n\n StructureByteStride: size_of::<T>() as u32,\n\n };\n\n let subresource_data = D3D11_SUBRESOURCE_DATA {\n\n pSysMem: mem::transmute(data.as_ptr()),\n\n SysMemPitch: 0,\n", "file_path": "src/platform/d3d/buffers.rs", "rank": 77, "score": 59580.8749847033 }, { "content": "fn main() {\n\n let mut gouda = Gouda::new(Game::new());\n\n gouda.run();\n\n}\n", "file_path": "sandbox/src/main.rs", "rank": 78, "score": 58827.85348068943 }, { "content": "fn win32_load_xinput() {\n\n unsafe {\n\n let dll_14 = win32_string(\"xinput1_4.dll\");\n\n let mut xinput_lib = LoadLibraryW(dll_14.as_ptr());\n\n\n\n if xinput_lib.is_null() {\n\n let dll_910 = win32_string(\"xinput9_1_0.dll\");\n\n xinput_lib = LoadLibraryW(dll_910.as_ptr());\n\n }\n\n\n\n if xinput_lib.is_null() {\n\n let dll_13 = win32_string(\"xinput1_3.dll\");\n\n xinput_lib = LoadLibraryW(dll_13.as_ptr());\n\n }\n\n\n\n if !xinput_lib.is_null() {\n\n let get_state_name_c = CString::new(\"XInputGetState\").unwrap();\n\n let get_state_ptr = GetProcAddress(xinput_lib, get_state_name_c.as_ptr());\n\n OPT_XINPUT_GET_STATE_FUNC = Some(\n\n transmute::<*mut __some_function, XInputGetStateFunc>(get_state_ptr),\n", "file_path": "src/platform/win32/window.rs", "rank": 79, "score": 55687.16309675046 }, { "content": "fn win32_process_xinput_digital_button(\n\n xinput_button_state: u16,\n\n old_state: &GameButtonState,\n\n new_state: &mut GameButtonState,\n\n button_bit: u16,\n\n) {\n\n new_state.ended_down = (xinput_button_state & button_bit) == button_bit;\n\n new_state.half_transition_count = if old_state.ended_down != new_state.ended_down {\n\n 1\n\n } else {\n\n 0\n\n };\n\n}\n\n\n", "file_path": "src/platform/win32/win32_input.rs", "rank": 80, "score": 53049.744294844175 }, { "content": "fn create_menu_bar(title: &String) {\n\n unsafe {\n\n let app_name = NSString::alloc(nil).init_str(title);\n\n let menu_bar = NSMenu::new(nil).autorelease();\n\n let app_menu_item = NSMenuItem::new(nil).autorelease();\n\n menu_bar.addItem_(app_menu_item);\n\n\n\n NSApp().setMainMenu_(menu_bar);\n\n\n\n let app_menu = NSMenu::new(nil).autorelease();\n\n let quit_prefix = NSString::alloc(nil).init_str(\"Quit \");\n\n let quit_title = quit_prefix.stringByAppendingString_(app_name);\n\n let quit_action = selector(\"terminate:\");\n\n let quit_key = NSString::alloc(nil).init_str(\"q\");\n\n let quit_menu_item = NSMenuItem::alloc(nil)\n\n .initWithTitle_action_keyEquivalent_(quit_title, quit_action, quit_key)\n\n .autorelease();\n\n app_menu.addItem_(quit_menu_item);\n\n app_menu_item.setSubmenu_(app_menu);\n\n }\n\n}\n\n\n", "file_path": "src/platform/osx/osx_window.rs", "rank": 81, "score": 49897.882000651065 }, { "content": "fn win32_string(value: &str) -> Vec<u16> {\n\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n\n}\n\n\n", "file_path": "src/platform/d3d/shader.rs", "rank": 82, "score": 47512.51627879045 }, { "content": "fn win32_string(value: &str) -> Vec<u16> {\n\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n\n}\n\n\n", "file_path": "src/platform/win32/window.rs", "rank": 83, "score": 47512.51627879045 }, { "content": "fn win32_string_short(value: &str) -> Vec<i8> {\n\n let wstr = win32_string(value);\n\n let mut result = vec![];\n\n for c in wstr {\n\n result.push(c as i8);\n\n }\n\n return result;\n\n}\n\n\n\nimpl Shader {\n\n pub fn new(renderer: &Renderer, has_textures: bool, vertex_file: &str, fragment_file: &str) -> Shader {\n\n return Shader {\n\n vertex_shader: VertexShader::new(renderer, has_textures, vertex_file),\n\n fragment_shader: FragmentShader::new(renderer, fragment_file),\n\n };\n\n }\n\n\n\n pub fn bind(&self, scene: &Scene) {\n\n self.vertex_shader.bind(scene);\n\n self.fragment_shader.bind(scene);\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Shader {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {\n\n return Ok(());\n\n }\n\n}\n", "file_path": "src/platform/d3d/shader.rs", "rank": 84, "score": 46724.85060438703 }, { "content": "fn collect_elements(line: &String) -> HashMap<String, String> {\n\n let mut result = HashMap::new();\n\n let lines: Vec<&str> = line.split_whitespace().collect();\n\n for line in lines {\n\n let split: Vec<&str> = line.split(\"=\").collect();\n\n if split.len() == 2 {\n\n result.insert(String::from(split[0]), String::from(split[1]));\n\n }\n\n }\n\n return result;\n\n}\n\n\n\nimpl Font {\n\n\n\n pub fn new(renderer: &Renderer, font_file_path: &str, font_png_path: &str) -> Font {\n\n let font_file = File::open(font_file_path).unwrap();\n\n let mut font_file_reader = BufReader::new(font_file);\n\n\n\n let mut font_entries = Vec::new();\n\n let mut line_iter = font_file_reader.lines().filter_map(|result| result.ok());\n", "file_path": "src/font.rs", "rank": 85, "score": 46524.9009443627 }, { "content": " return None;\n\n }\n\n\n\n pub fn write<T: 'static>(&mut self, entity: &Entity) -> Option<&mut T>{\n\n if let Some(map) = self.components.get_mut::<EntityMap<T>>() {\n\n if let Some(Some(val)) = map.0.get_mut(entity.index) {\n\n return Some(&mut val.value);\n\n }\n\n }\n\n return None;\n\n }\n\n\n\n impl_read!(read1, [t1]);\n\n impl_read!(read2, [t1, t2]);\n\n impl_read!(read3, [t1, t2, t3]);\n\n impl_read!(read4, [t1, t2, t3, t4]);\n\n impl_read!(read5, [t1, t2, t3, t4, t5]);\n\n\n\n impl_get!(get1, [t1]);\n\n impl_get!(get2, [t1, t2]);\n", "file_path": "src/ecs.rs", "rank": 86, "score": 38668.49255128113 }, { "content": "}\n\n\n\npub struct EntityBuilder<'a> {\n\n ecs: &'a mut ECS,\n\n entity: Entity,\n\n}\n\n\n\nimpl <'a> EntityBuilder<'a> {\n\n pub fn add<T: 'static + Debug>(mut self, c: T) -> EntityBuilder<'a> {\n\n self.ecs.add_component(&self.entity, c);\n\n self\n\n }\n\n\n\n pub fn entity(&mut self) -> Entity {\n\n self.entity.clone()\n\n }\n\n}\n", "file_path": "src/ecs.rs", "rank": 87, "score": 38668.381093147815 }, { "content": " self.resources.get::<T>().unwrap()\n\n }\n\n\n\n pub fn write_res<T: 'static>(&mut self) -> &mut T {\n\n self.resources.get_mut::<T>().unwrap()\n\n }\n\n\n\n pub fn read<T: 'static>(&self, entity: &Entity) -> Option<&T>{\n\n let i = self.components.get::<EntityMap<T>>().unwrap().0.get(entity.index);\n\n match i {\n\n Some(i) => {\n\n match i {\n\n Some(i) => {\n\n return Some(&i.value);\n\n },\n\n None => {}\n\n }\n\n }\n\n None => {}\n\n }\n", "file_path": "src/ecs.rs", "rank": 88, "score": 38666.60238222991 }, { "content": "}\n\n\n\n#[macro_use]\n\nmacro_rules! impl_get {\n\n ( $fn_name:ident, [$($r:ident),*] ) => {\n\n pub fn $fn_name<$($r: 'static),*>(&self) -> Vec<Entity> {\n\n\n\n let mut minlen = 1000;\n\n $(\n\n let $r = self.components.get::<EntityMap<$r>>().unwrap();\n\n minlen = std::cmp::min(minlen, $r.0.len());\n\n )*\n\n\n\n let mut res = Vec::new();\n\n let num_iter = minlen;\n\n for i in 0..num_iter {\n\n $(\n\n let $r = $r.0.get(i);\n\n )*\n\n match ($($r),*) {\n", "file_path": "src/ecs.rs", "rank": 89, "score": 38663.72194583364 }, { "content": " }\n\n\n\n pub fn run_systems(&mut self) {\n\n let num_systems = self.systems.len();\n\n for i in 0..num_systems {\n\n let mutations = self.systems[i](&self);\n\n for mutation in mutations {\n\n mutation.apply(self);\n\n }\n\n }\n\n }\n\n\n\n pub fn new_entity(&mut self) -> Entity {\n\n self.entity_allocator.allocate()\n\n }\n\n\n\n pub fn build_entity(&mut self) -> EntityBuilder {\n\n let e = self.entity_allocator.allocate();\n\n EntityBuilder {\n\n ecs: self,\n", "file_path": "src/ecs.rs", "rank": 90, "score": 38663.590876928705 }, { "content": " entity: e,\n\n }\n\n }\n\n\n\n pub fn delete_entity(&mut self, entity: &Entity) {\n\n self.entity_allocator.deallocate(entity.clone());\n\n }\n\n\n\n pub fn add_component<T: 'static + Debug>(&mut self, entity: &Entity, component: T) {\n\n let comps = self.components.get_mut::<EntityMap<T>>();\n\n match comps {\n\n Some(comps) => {\n\n comps.set(*entity, component);\n\n },\n\n None => {}\n\n }\n\n }\n\n\n\n pub fn remove_component<T: 'static + Debug>(&mut self, entity: &Entity) {\n\n let comps = self.components.get_mut::<EntityMap<T>>();\n", "file_path": "src/ecs.rs", "rank": 91, "score": 38663.41043359218 }, { "content": " },\n\n None => { None }\n\n }\n\n },\n\n None => { None }\n\n }\n\n }\n\n\n\n pub fn get_mut(&mut self, index: GenIndex) -> Option<&mut T> {\n\n let entry = self.0.get_mut(index.index);\n\n match entry {\n\n Some(entry) => {\n\n match entry {\n\n Some(entry) => {\n\n if entry.generation == index.generation {\n\n Some(&mut entry.value)\n\n } else {\n\n None\n\n }\n\n\n", "file_path": "src/ecs.rs", "rank": 92, "score": 38663.10145276559 }, { "content": " pub fn clear(&mut self, index: GenIndex) {\n\n if index.index >= self.0.len() {\n\n return;\n\n }\n\n let entry = self.0.get_mut(index.index).unwrap();\n\n *entry = None;\n\n }\n\n\n\n pub fn get(&self, index: GenIndex) -> Option<&T> {\n\n let entry = self.0.get(index.index);\n\n match entry {\n\n Some(entry) => {\n\n match entry {\n\n Some(entry) => {\n\n if entry.generation == index.generation {\n\n Some(&entry.value)\n\n } else {\n\n None\n\n }\n\n\n", "file_path": "src/ecs.rs", "rank": 93, "score": 38661.609976275155 }, { "content": " impl_get!(get3, [t1, t2, t3]);\n\n impl_get!(get4, [t1, t2, t3, t4]);\n\n impl_get!(get5, [t1, t2, t3, t4, t5]);\n\n\n\n pub fn new() -> ECS {\n\n ECS {\n\n entity_allocator: GenIndexAllocator::new(),\n\n components: AnyMap::new(),\n\n resources: AnyMap::new(),\n\n systems: Vec::new(),\n\n queued_events: AnyMap::new(),\n\n processing_events: AnyMap::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for ECS {\n\n fn default() -> Self {\n\n ECS::new()\n\n }\n", "file_path": "src/ecs.rs", "rank": 94, "score": 38661.33268381838 }, { "content": " match comps {\n\n Some(comps) => {\n\n comps.clear(*entity);\n\n },\n\n None => {}\n\n }\n\n }\n\n\n\n pub fn cleanup_components<T: 'static + Debug>(&mut self) {\n\n let mut comps_to_remove = vec![];\n\n if let Some(comps) = self.components.get::<EntityMap<T>>() {\n\n let l = comps.0.len();\n\n for i in 0..l {\n\n if let Some(e) = &comps.0[i] {\n\n let e = GenIndex {index: i, generation: e.generation};\n\n if !self.entity_allocator.is_live(e) {\n\n println!(\"Not live\");\n\n comps_to_remove.push(e);\n\n }\n\n }\n", "file_path": "src/ecs.rs", "rank": 95, "score": 38661.21089083702 }, { "content": " },\n\n None => { None }\n\n }\n\n },\n\n None => { None }\n\n }\n\n }\n\n}\n\n\n\npub type Entity = GenIndex;\n", "file_path": "src/ecs.rs", "rank": 96, "score": 38660.613232368334 }, { "content": " ( $fn_name:ident, [$($r:ident),*] ) => {\n\n pub fn $fn_name<$($r: 'static),*>(&self) -> Vec<($(&$r),*, Entity)> {\n\n\n\n let mut minlen = 1000;\n\n $(\n\n let $r = self.components.get::<EntityMap<$r>>().unwrap();\n\n minlen = std::cmp::min(minlen, $r.0.len());\n\n )*\n\n\n\n let mut res = Vec::new();\n\n let num_iter = minlen;\n\n for i in 0..num_iter {\n\n $(\n\n let $r = $r.0.get(i);\n\n )*\n\n match ($($r),*) {\n\n ($(Some($r)),*) => {\n\n match ($($r),*) {\n\n ($(Some($r)),*) => {\n\n let mut generation = 0;\n", "file_path": "src/ecs.rs", "rank": 97, "score": 38658.56738670879 }, { "content": "#![allow(non_camel_case_types)]\n\n#![allow(unused_parens)]\n\n\n\nuse anymap::AnyMap;\n\nuse std::fmt::Debug;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, Eq, PartialEq, Copy, Clone)]\n\npub struct GenIndex {\n\n index: usize,\n\n generation: u64,\n\n}\n\n\n\n#[derive(Eq, PartialEq, Clone)]\n", "file_path": "src/ecs.rs", "rank": 98, "score": 38658.012238196716 }, { "content": " entry.is_free = true;\n\n self.free.push(index.index);\n\n return true;\n\n },\n\n None => {\n\n return false;\n\n }\n\n }\n\n }\n\n\n\n pub fn is_live(&self, index: GenIndex) -> bool {\n\n let e = self.entries.get(index.index).unwrap();\n\n return e.generation == index.generation && !e.is_free;\n\n }\n\n}\n\n\n", "file_path": "src/ecs.rs", "rank": 99, "score": 38657.765319220496 } ]
Rust
src/lib.rs
DownToZero-Cloud/dtz-identity-auth
a93d6ca8472680fd8d1d7f6dc4082e5726aa5438
#![deny(missing_docs)] #![feature(adt_const_params)] use serde::{Serialize,Deserialize}; use axum::{ async_trait, extract::{FromRequest, RequestParts}, http::header::HeaderValue, http::{StatusCode}, }; use uuid::Uuid; use jwt::PKeyWithDigest; use openssl::hash::MessageDigest; use openssl::pkey::PKey; use jwt::algorithm::VerifyingAlgorithm; use jwt::claims::Claims; use jwt::FromBase64; use cookie::Cookie; use hyper::{Body, Request,Client,Method}; use hyper::body; use lru_time_cache::LruCache; use std::sync::Mutex; use once_cell::sync::Lazy; const PUBLIC_KEY: &str = r#"-----BEGIN PUBLIC KEY----- MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0gVBfnAa7748XyjOYXQ5 Yf39yKJ/t3b2wF5F1yPUuyLanwZYTGBV804Vs0YWiiKJ1H/csI3jWX5CWkV5TzMx CIP4kCugsFH6wP8rCt8Vei+rdJFB/LrlYz8Ks8Td60c5t/Hq9yQEz6kIpa5TmZw2 DSDPvOKXW2SJRPCqj3JEk6fHsJ6nZ2BIoFvs6NMRNqgSEHr1x7lUUt9teWM2wOtF ze24D+luvXWhRUjMMvMKkPuxdS6mPbXqoyde3U9tcsC+t2tThqVaREPkj6ew1IcU RnoXLi+43p4j4cQqxRjG3DzzjqAlivFjlGR/vqfLvUrGP9opjI+zs3l4G8IYWsqM KQIDAQAB -----END PUBLIC KEY-----"#; #[derive(Serialize, Deserialize, Debug, Default, Clone)] pub struct DtzProfile { pub identity_id: Uuid, pub context_id: Uuid, #[serde(skip_serializing_if = "Vec::is_empty")] pub roles: Vec<String>, } pub struct DtzRequiredRole<const N: &'static str>(pub DtzProfile); #[async_trait] impl<B, const N: &'static str> FromRequest<B> for DtzRequiredRole<N> where B: Send, { type Rejection = (StatusCode, &'static str); async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> { let result = get_profile_from_request(req).await; match result { Ok(profile) => { let scope = replace_placeholder(N, &profile); if !profile.roles.contains(&scope) { return Err((StatusCode::FORBIDDEN, "no permission")); } Ok(DtzRequiredRole(profile)) }, Err(e) => Err((StatusCode::UNAUTHORIZED, &e)), } } } pub struct DtzRequiredUser(pub DtzProfile); #[async_trait] impl<B> FromRequest<B> for DtzRequiredUser where B: Send, { type Rejection = (StatusCode, &'static str); async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> { let result = get_profile_from_request(req).await; match result { Ok(profile) => Ok(DtzRequiredUser(profile)), Err(e) => Err((StatusCode::UNAUTHORIZED, e)), } } } pub struct DtzOptionalUser(pub Option<DtzProfile>); #[async_trait] impl<B> FromRequest<B> for DtzOptionalUser where B: Send, { type Rejection = (StatusCode, &'static str); async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> { let result = get_profile_from_request(req).await; match result { Ok(profile) => Ok(DtzOptionalUser(Some(profile))), Err(_e) => Ok(DtzOptionalUser(None)), } } } async fn get_profile_from_request<B>(req: &mut RequestParts<B>) -> Result<DtzProfile,&'static str> { let headers = req.headers().clone(); let cookie: Option<&HeaderValue> = headers.get("cookie"); let authorization: Option<&HeaderValue> = headers.get("authorization"); let header_api_key: Option<&HeaderValue> = headers.get("x-api-key"); let header_context_id: Option<&HeaderValue> = headers.get("x-dtz-context"); let profile: DtzProfile; if let Some(cookie) = cookie { match verify_token_from_cookie(cookie.clone()) { Ok(p) => { profile = p; }, Err(_) => { return Err("no valid token found in cookie"); } } }else if let Some(authorization) = authorization { match verify_token_from_bearer(authorization.clone()) { Ok(p) => { profile = p; }, Err(_) => { return Err("not authorized"); } } }else if let Some(header_api_key) = header_api_key { if header_context_id.is_some() { profile = verifiy_api_key(header_api_key.to_str().unwrap(), Some(header_context_id.unwrap().to_str().unwrap())).await.unwrap(); }else{ profile = verifiy_api_key(header_api_key.to_str().unwrap(), None).await.unwrap(); } }else { let query = req.uri().query().unwrap_or_default(); let value: GetAuthParams = serde_urlencoded::from_str(query).unwrap(); if value.api_key.is_some() { if value.context_id.is_some() { profile = verifiy_api_key(&value.api_key.unwrap(), Some(&value.context_id.unwrap())).await.unwrap(); }else{ profile = verifiy_api_key(&value.api_key.unwrap(), None).await.unwrap(); } }else{ return Err("no authorization header"); } } Ok(profile) } fn verify_token_from_cookie(cookie: HeaderValue) -> Result<DtzProfile,String> { let cookie_str = cookie.to_str().unwrap(); match Cookie::parse(cookie_str){ Ok(cookie) => { let token = cookie.value().to_string(); verify_token(token) }, Err(_) => Err("no valid token found in cookie".to_string()) } } fn verify_token_from_bearer(bearer: HeaderValue) -> Result<DtzProfile,String> { let bearer_str = bearer.to_str().unwrap(); let jwt = bearer_str.replace("Bearer ",""); verify_token(jwt) } fn verify_token(token: String) -> Result<DtzProfile,String> { if token.as_str().contains('.') { let jwt_parts: Vec<&str> = token.split('.').collect(); let jwt_alg = jwt_parts.get(0).unwrap(); let jwt_payload = jwt_parts.get(1).unwrap(); let jwt_sig = jwt_parts.get(2).unwrap(); let algorithm = PKeyWithDigest { digest: MessageDigest::sha256(), key: PKey::public_key_from_pem(PUBLIC_KEY.as_bytes()).unwrap(), }; match algorithm.verify(jwt_alg, jwt_payload, jwt_sig) { Ok(_) => { let claims = Claims::from_base64(jwt_payload).unwrap(); let roles_claim = claims.private.get("roles").unwrap(); let mut roles: Vec<String> = Vec::new(); let arr = roles_claim.as_array().unwrap(); for role in arr { roles.push(role.as_str().unwrap().to_string()); } let scope_str = claims.private.get("scope").unwrap().as_str().unwrap(); let result = DtzProfile{ identity_id: Uuid::parse_str(&claims.registered.subject.unwrap()).unwrap(), context_id: Uuid::parse_str(scope_str).unwrap(), roles, }; Ok(result) }, Err(_) => { return Err("invalid token".to_string()); } } }else{ Err("not authorized".to_string()) } } #[derive(Serialize, Deserialize, Debug)] struct TokenResponse { access_token: String, scope: Option<String>, token_type: String, expires_in: u32, } #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct GetAuthParams { api_key: Option<String>, context_id: Option<String>, } static KNOWN_IDENTITIES: Lazy<Mutex<LruCache::<String, DtzProfile>>> = Lazy::new(|| { let time_to_live = std::time::Duration::from_secs(3600); let m = LruCache::<String, DtzProfile>::with_expiry_duration_and_capacity(time_to_live,100); Mutex::new(m) }); async fn verifiy_api_key(api_key: &str, context_id: Option<&str>) -> Result<DtzProfile,String> { let req_data = if context_id.is_some() { format!("{{\"apiKey\":\"{}\",\"contextId\":\"{}\"}}",api_key,context_id.unwrap()) } else { format!("{{\"apiKey\":\"{}\"}}",api_key) }; { let mut x = KNOWN_IDENTITIES.lock().unwrap(); if x.contains_key(&req_data){ let profile = x.get(&req_data).unwrap().clone(); return Ok(profile); } } let hostname = std::env::var("HOSTNAME").unwrap_or("localhost".to_string()); let req = Request::builder() .method(Method::POST) .uri("https://identity.dtz.rocks/api/2021-02-21/auth/apikey") .header("content-type", "application/json") .header("X-DTZ-SOURCE", hostname) .body(Body::from(req_data.clone())).unwrap(); let https = hyper_rustls::HttpsConnectorBuilder::new() .with_native_roots() .https_only() .enable_http1() .build(); let http_client = Client::builder().build(https); let resp = http_client.request(req).await.unwrap(); if resp.status().is_success() { let bytes = body::to_bytes(resp.into_body()).await.unwrap(); let resp_str = String::from_utf8(bytes.to_vec()).expect("response was not valid utf-8"); let token_response: TokenResponse = serde_json::from_str(&resp_str).unwrap(); let jwt = token_response.access_token; let result = verify_token(jwt); { if result.is_ok() { let mut x = KNOWN_IDENTITIES.lock().unwrap(); x.insert(req_data,result.clone().unwrap()); } } result }else{ Err("not authorized".to_string()) } } fn replace_placeholder(template: &str, profile: &DtzProfile) -> String { let mut result = template.to_string(); result = result.replace("{identity_id}", &profile.identity_id.to_string()); result = result.replace("{context_id}", &profile.context_id.to_string()); result = result.replace("{roles}", &profile.roles.join(",")); result } pub fn verify_role(profile: &DtzProfile, role: &str) -> bool { profile.roles.contains(&role.to_string()) } pub fn verfify_context_role(profile: &DtzProfile, role: &str) -> bool { let replaced_role = replace_placeholder(role, profile); profile.roles.contains(&replaced_role.to_string()) } #[cfg(test)] mod tests { use uuid::Uuid; use super::*; #[test] fn test_replacement_identity() { let identity = DtzProfile{ identity_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), context_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), roles: vec!["admin".to_string()], }; let result = super::replace_placeholder("https://dtz.rocks/context/admin/{identity_id}", &identity); assert_eq!(result, "https://dtz.rocks/context/admin/00000000-0000-0000-0000-000000000000"); } #[test] fn test_replacement_context() { let identity = DtzProfile{ identity_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), context_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), roles: vec!["admin".to_string()], }; let result = super::replace_placeholder("https://dtz.rocks/context/admin/{context_id}", &identity); assert_eq!(result, "https://dtz.rocks/context/admin/00000000-0000-0000-0000-000000000000"); } #[test] fn test_replacement_nothing() { let identity = DtzProfile{ identity_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), context_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), roles: vec!["admin".to_string()], }; let result = super::replace_placeholder("https://dtz.rocks/context/admin", &identity); assert_eq!(result, "https://dtz.rocks/context/admin"); } }
#![deny(missing_docs)] #![feature(adt_const_params)] use serde::{Serialize,Deserialize}; use axum::{ async_trait, extract::{FromRequest, RequestParts}, http::header::HeaderValue, http::{StatusCode}, }; use uuid::Uuid; use jwt::PKeyWithDigest; use openssl::hash::MessageDigest; use openssl::pkey::PKey; use jwt::algorithm::VerifyingAlgorithm; use jwt::claims::Claims; use jwt::FromBase64; use cookie::Cookie; use hyper::{Body, Request,Client,Method}; use hyper::body; use lru_time_cache::LruCache; use std::sync::Mutex; use once_cell::sync::Lazy; const PUBLIC_KEY: &str = r#"-----BEGIN PUBLIC KEY----- MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0gVBfnAa7748XyjOYXQ5 Yf39yKJ/t3b2wF5F1yPUuyLanwZYTGBV804Vs0YWiiKJ1H/csI3jWX5CWkV5TzMx CIP4kCugsFH6wP8rCt8Vei+rdJFB/LrlYz8Ks8Td60c5t/Hq9yQEz6kIpa5TmZw2 DSDPvOKXW2SJRPCqj3JEk6fHsJ6nZ2BIoFvs6NMRNqgSEHr1x7lUUt9teWM2wOtF ze24D+luvXWhRUjMMvMKkPuxdS6mPbXqoyde3U9tcsC+t2tThqVaREPkj6ew1IcU RnoXLi+43p4j4cQqxRjG3DzzjqAlivFjlGR/vqfLvUrGP9opjI+zs3l4G8IYWsqM KQIDAQAB -----END PUBLIC KEY-----"#; #[derive(Serialize, Deserialize, Debug, Default, Clone)] pub struct DtzProfile { pub identity_id: Uuid, pub context_id: Uuid, #[serde(skip_serializing_if = "Vec::is_empty")] pub roles: Vec<String>, } pub struct DtzRequiredRole<const N: &'static str>(pub DtzProfile); #[async_trait] impl<B, const N: &'static str> FromRequest<B> for DtzRequiredRole<N> where B: Send, { type Rejection = (StatusCode, &'static str); async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> { let result = get_profile_from_request(req).await; match result { Ok(profile) => { let scope = replace_placeholder(N, &profile); if !profile.roles.contains(&scope) { return Err((StatusCode::FORBIDDEN, "no permission")); } Ok(DtzRequiredRole(profile)) }, Err(e) => Err((StatusCode::UNAUTHORIZED, &e)), } } } pub struct DtzRequiredUser(pub DtzProfile); #[async_trait] impl<B> FromRequest<B> for DtzRequiredUser where B: Send, { type Rejection = (StatusCode, &'static str); async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> { let result = get_profile_from_request(req).await; match result { Ok(profile) => Ok(DtzRequiredUser(profile)), Err(e) => Err((StatusCode::UNAUTHORIZED, e)), } } } pub struct DtzOptionalUser(pub Option<DtzProfile>); #[async_trait] impl<B> FromRequest<B> for DtzOptionalUser where B: Send, { type Rejection = (StatusCode, &'static str); async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> { let result = get_profile_from_request(req).await; match result { Ok(profile) => Ok(DtzOptionalUser(Some(profile))), Err(_e) => Ok(DtzOptionalUser(None)), } } } async fn get_profile_from_request<B>(req: &mut RequestParts<B>) -> Result<DtzProfile,&'static str> { let headers = req.headers().clone(); let cookie: Option<&HeaderValue> = headers.get("cookie"); let authorization: Option<&HeaderValue> = headers.get("authorization"); let header_api_key: Option<&HeaderValue> = headers.get("x-api-key"); let header_context_id: Option<&HeaderValue> = headers.get("x-dtz-context"); let profile: DtzProfile; if let Some(cookie) = cookie { match verify_token_from_cookie(cookie.clone()) { Ok(p) => { profile = p; }, Err(_) => { return Err("no valid token found in cookie"); } } }else if let Some(authorization) = authorization { match verify_token_from_bearer(authorization.clone()) { Ok(p) => { profile = p; }, Err(_) => { return Err("not authorized"); } } }else if let Some(header_api_key) = header_api_key {
}else { let query = req.uri().query().unwrap_or_default(); let value: GetAuthParams = serde_urlencoded::from_str(query).unwrap(); if value.api_key.is_some() { if value.context_id.is_some() { profile = verifiy_api_key(&value.api_key.unwrap(), Some(&value.context_id.unwrap())).await.unwrap(); }else{ profile = verifiy_api_key(&value.api_key.unwrap(), None).await.unwrap(); } }else{ return Err("no authorization header"); } } Ok(profile) } fn verify_token_from_cookie(cookie: HeaderValue) -> Result<DtzProfile,String> { let cookie_str = cookie.to_str().unwrap(); match Cookie::parse(cookie_str){ Ok(cookie) => { let token = cookie.value().to_string(); verify_token(token) }, Err(_) => Err("no valid token found in cookie".to_string()) } } fn verify_token_from_bearer(bearer: HeaderValue) -> Result<DtzProfile,String> { let bearer_str = bearer.to_str().unwrap(); let jwt = bearer_str.replace("Bearer ",""); verify_token(jwt) } fn verify_token(token: String) -> Result<DtzProfile,String> { if token.as_str().contains('.') { let jwt_parts: Vec<&str> = token.split('.').collect(); let jwt_alg = jwt_parts.get(0).unwrap(); let jwt_payload = jwt_parts.get(1).unwrap(); let jwt_sig = jwt_parts.get(2).unwrap(); let algorithm = PKeyWithDigest { digest: MessageDigest::sha256(), key: PKey::public_key_from_pem(PUBLIC_KEY.as_bytes()).unwrap(), }; match algorithm.verify(jwt_alg, jwt_payload, jwt_sig) { Ok(_) => { let claims = Claims::from_base64(jwt_payload).unwrap(); let roles_claim = claims.private.get("roles").unwrap(); let mut roles: Vec<String> = Vec::new(); let arr = roles_claim.as_array().unwrap(); for role in arr { roles.push(role.as_str().unwrap().to_string()); } let scope_str = claims.private.get("scope").unwrap().as_str().unwrap(); let result = DtzProfile{ identity_id: Uuid::parse_str(&claims.registered.subject.unwrap()).unwrap(), context_id: Uuid::parse_str(scope_str).unwrap(), roles, }; Ok(result) }, Err(_) => { return Err("invalid token".to_string()); } } }else{ Err("not authorized".to_string()) } } #[derive(Serialize, Deserialize, Debug)] struct TokenResponse { access_token: String, scope: Option<String>, token_type: String, expires_in: u32, } #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct GetAuthParams { api_key: Option<String>, context_id: Option<String>, } static KNOWN_IDENTITIES: Lazy<Mutex<LruCache::<String, DtzProfile>>> = Lazy::new(|| { let time_to_live = std::time::Duration::from_secs(3600); let m = LruCache::<String, DtzProfile>::with_expiry_duration_and_capacity(time_to_live,100); Mutex::new(m) }); async fn verifiy_api_key(api_key: &str, context_id: Option<&str>) -> Result<DtzProfile,String> { let req_data = if context_id.is_some() { format!("{{\"apiKey\":\"{}\",\"contextId\":\"{}\"}}",api_key,context_id.unwrap()) } else { format!("{{\"apiKey\":\"{}\"}}",api_key) }; { let mut x = KNOWN_IDENTITIES.lock().unwrap(); if x.contains_key(&req_data){ let profile = x.get(&req_data).unwrap().clone(); return Ok(profile); } } let hostname = std::env::var("HOSTNAME").unwrap_or("localhost".to_string()); let req = Request::builder() .method(Method::POST) .uri("https://identity.dtz.rocks/api/2021-02-21/auth/apikey") .header("content-type", "application/json") .header("X-DTZ-SOURCE", hostname) .body(Body::from(req_data.clone())).unwrap(); let https = hyper_rustls::HttpsConnectorBuilder::new() .with_native_roots() .https_only() .enable_http1() .build(); let http_client = Client::builder().build(https); let resp = http_client.request(req).await.unwrap(); if resp.status().is_success() { let bytes = body::to_bytes(resp.into_body()).await.unwrap(); let resp_str = String::from_utf8(bytes.to_vec()).expect("response was not valid utf-8"); let token_response: TokenResponse = serde_json::from_str(&resp_str).unwrap(); let jwt = token_response.access_token; let result = verify_token(jwt); { if result.is_ok() { let mut x = KNOWN_IDENTITIES.lock().unwrap(); x.insert(req_data,result.clone().unwrap()); } } result }else{ Err("not authorized".to_string()) } } fn replace_placeholder(template: &str, profile: &DtzProfile) -> String { let mut result = template.to_string(); result = result.replace("{identity_id}", &profile.identity_id.to_string()); result = result.replace("{context_id}", &profile.context_id.to_string()); result = result.replace("{roles}", &profile.roles.join(",")); result } pub fn verify_role(profile: &DtzProfile, role: &str) -> bool { profile.roles.contains(&role.to_string()) } pub fn verfify_context_role(profile: &DtzProfile, role: &str) -> bool { let replaced_role = replace_placeholder(role, profile); profile.roles.contains(&replaced_role.to_string()) } #[cfg(test)] mod tests { use uuid::Uuid; use super::*; #[test] fn test_replacement_identity() { let identity = DtzProfile{ identity_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), context_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), roles: vec!["admin".to_string()], }; let result = super::replace_placeholder("https://dtz.rocks/context/admin/{identity_id}", &identity); assert_eq!(result, "https://dtz.rocks/context/admin/00000000-0000-0000-0000-000000000000"); } #[test] fn test_replacement_context() { let identity = DtzProfile{ identity_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), context_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), roles: vec!["admin".to_string()], }; let result = super::replace_placeholder("https://dtz.rocks/context/admin/{context_id}", &identity); assert_eq!(result, "https://dtz.rocks/context/admin/00000000-0000-0000-0000-000000000000"); } #[test] fn test_replacement_nothing() { let identity = DtzProfile{ identity_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), context_id: Uuid::parse_str("00000000-0000-0000-0000-000000000000").unwrap(), roles: vec!["admin".to_string()], }; let result = super::replace_placeholder("https://dtz.rocks/context/admin", &identity); assert_eq!(result, "https://dtz.rocks/context/admin"); } }
if header_context_id.is_some() { profile = verifiy_api_key(header_api_key.to_str().unwrap(), Some(header_context_id.unwrap().to_str().unwrap())).await.unwrap(); }else{ profile = verifiy_api_key(header_api_key.to_str().unwrap(), None).await.unwrap(); }
if_condition
[ { "content": "# 0.4.10 2022-04-30\n\n\n\n* fix unparsable cookie\n\n\n\n# 0.4.9 2022-04-20\n\n\n\n* fail on invalid jwts\n\n\n\n# 0.4.8 2022-04-17\n\n\n\n* add verify_role functions\n\n\n\n# 0.4.7 2022-03-11\n\n\n\n* support foreign cookies\n\n\n\n# 0.4.6 2022-03-11\n\n\n\n* update deps\n\n\n\n# 0.4.4 2022-03-11\n\n\n\n* cache api keys\n\n* add x-dtz-source header\n\n\n\n# 0.4.1 2022-03-07\n\n\n\n* unify auth methods\n\n\n\n# 0.4.0 2022-03-04\n\n\n", "file_path": "CHANGELOG.md", "rank": 19, "score": 5.831813479340455 } ]
Rust
examples/capture-test.rs
petrosagg/differential-dataflow
2e38abbb62aedf02cb9b6b5debb73c29b6e97dbb
extern crate rand; extern crate timely; extern crate differential_dataflow; extern crate serde; extern crate rdkafka; use rand::{Rng, SeedableRng, StdRng}; use timely::dataflow::*; use timely::dataflow::operators::probe::Handle; use differential_dataflow::input::Input; use differential_dataflow::Collection; use differential_dataflow::operators::*; use differential_dataflow::lattice::Lattice; type Node = u32; type Edge = (Node, Node); fn main() { let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap(); let edges: u32 = std::env::args().nth(2).unwrap().parse().unwrap(); let batch: u32 = std::env::args().nth(3).unwrap().parse().unwrap(); let topic = std::env::args().nth(4).unwrap(); let write = std::env::args().any(|x| x == "write"); let read = std::env::args().any(|x| x == "read"); timely::execute_from_args(std::env::args(), move |worker| { let timer = ::std::time::Instant::now(); let mut probe = Handle::new(); let (mut roots, mut graph, _write_token, _read_token) = worker.dataflow(|scope| { let (root_input, roots) = scope.new_collection(); let (edge_input, graph) = scope.new_collection(); let result = bfs(&graph, &roots); let result = result.map(|(_,l)| l) .consolidate() .probe_with(&mut probe); let write_token = if write { Some(kafka::create_sink(&result.inner, "localhost:9092", &topic)) } else { None }; let read_token = if read { let (read_token, stream) = kafka::create_source(result.scope(), "localhost:9092", &topic, "group"); use differential_dataflow::AsCollection; stream .as_collection() .negate() .concat(&result) .consolidate() .inspect(|x| println!("In error: {:?}", x)) .probe_with(&mut probe) .assert_empty() ; Some(read_token) } else { None }; (root_input, edge_input, write_token, read_token) }); let seed: &[_] = &[1, 2, 3, 4]; let mut rng1: StdRng = SeedableRng::from_seed(seed); let mut rng2: StdRng = SeedableRng::from_seed(seed); roots.insert(0); roots.close(); println!("performing BFS on {} nodes, {} edges:", nodes, edges); if worker.index() == 0 { for _ in 0 .. edges { graph.insert((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes))); } } println!("{:?}\tloaded", timer.elapsed()); graph.advance_to(1); graph.flush(); worker.step_while(|| probe.less_than(graph.time())); println!("{:?}\tstable", timer.elapsed()); for round in 0 .. { if write { std::thread::sleep(std::time::Duration::from_millis(100)); } for element in 0 .. batch { if worker.index() == 0 { graph.insert((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes))); graph.remove((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes))); } graph.advance_to(2 + round * batch + element); } graph.flush(); let timer2 = ::std::time::Instant::now(); worker.step_while(|| probe.less_than(&graph.time())); if worker.index() == 0 { let elapsed = timer2.elapsed(); println!("{:?}\t{:?}:\t{}", timer.elapsed(), round, elapsed.as_secs() * 1000000000 + (elapsed.subsec_nanos() as u64)); } } println!("finished; elapsed: {:?}", timer.elapsed()); }).unwrap(); } fn bfs<G: Scope>(edges: &Collection<G, Edge>, roots: &Collection<G, Node>) -> Collection<G, (Node, u32)> where G::Timestamp: Lattice+Ord { let nodes = roots.map(|x| (x, 0)); nodes.iterate(|inner| { let edges = edges.enter(&inner.scope()); let nodes = nodes.enter(&inner.scope()); inner.join_map(&edges, |_k,l,d| (*d, l+1)) .concat(&nodes) .reduce(|_, s, t| t.push((*s[0].0, 1))) }) } pub mod kafka { use serde::{Serialize, Deserialize}; use timely::scheduling::SyncActivator; use rdkafka::{ClientContext, config::ClientConfig}; use rdkafka::consumer::{BaseConsumer, ConsumerContext}; use rdkafka::error::{KafkaError, RDKafkaError}; use differential_dataflow::capture::Writer; use std::hash::Hash; use timely::progress::Timestamp; use timely::dataflow::{Scope, Stream}; use differential_dataflow::ExchangeData; use differential_dataflow::lattice::Lattice; pub fn create_source<G, D, T, R>(scope: G, addr: &str, topic: &str, group: &str) -> (Box<dyn std::any::Any>, Stream<G, (D, T, R)>) where G: Scope<Timestamp = T>, D: ExchangeData + Hash + for<'a> serde::Deserialize<'a>, T: ExchangeData + Hash + for<'a> serde::Deserialize<'a> + Timestamp + Lattice, R: ExchangeData + Hash + for<'a> serde::Deserialize<'a>, { differential_dataflow::capture::source::build(scope, |activator| { let source = KafkaSource::new(addr, topic, group, activator); differential_dataflow::capture::YieldingIter::new_from(Iter::<D,T,R>::new_from(source), std::time::Duration::from_millis(10)) }) } pub fn create_sink<G, D, T, R>(stream: &Stream<G, (D, T, R)>, addr: &str, topic: &str) -> Box<dyn std::any::Any> where G: Scope<Timestamp = T>, D: ExchangeData + Hash + Serialize + for<'a> Deserialize<'a>, T: ExchangeData + Hash + Serialize + for<'a> Deserialize<'a> + Timestamp + Lattice, R: ExchangeData + Hash + Serialize + for<'a> Deserialize<'a>, { use std::rc::Rc; use std::cell::RefCell; use differential_dataflow::hashable::Hashable; let sink = KafkaSink::new(addr, topic); let result = Rc::new(RefCell::new(sink)); let sink_hash = (addr.to_string(), topic.to_string()).hashed(); differential_dataflow::capture::sink::build( &stream, sink_hash, Rc::downgrade(&result), Rc::downgrade(&result), ); Box::new(result) } pub struct KafkaSource { consumer: BaseConsumer<ActivationConsumerContext>, } impl KafkaSource { pub fn new(addr: &str, topic: &str, group: &str, activator: SyncActivator) -> Self { let mut kafka_config = ClientConfig::new(); kafka_config.set("bootstrap.servers", &addr.to_string()); kafka_config .set("enable.auto.commit", "false") .set("auto.offset.reset", "earliest"); kafka_config.set("topic.metadata.refresh.interval.ms", "30000"); kafka_config.set("fetch.message.max.bytes", "134217728"); kafka_config.set("group.id", group); kafka_config.set("isolation.level", "read_committed"); let activator = ActivationConsumerContext(activator); let consumer = kafka_config.create_with_context::<_, BaseConsumer<_>>(activator).unwrap(); use rdkafka::consumer::Consumer; consumer.subscribe(&[topic]).unwrap(); Self { consumer, } } } pub struct Iter<D, T, R> { pub source: KafkaSource, phantom: std::marker::PhantomData<(D, T, R)>, } impl<D, T, R> Iter<D, T, R> { pub fn new_from(source: KafkaSource) -> Self { Self { source, phantom: std::marker::PhantomData, } } } impl<D, T, R> Iterator for Iter<D, T, R> where D: for<'a>Deserialize<'a>, T: for<'a>Deserialize<'a>, R: for<'a>Deserialize<'a>, { type Item = differential_dataflow::capture::Message<D, T, R>; fn next(&mut self) -> Option<Self::Item> { use rdkafka::message::Message; self.source .consumer .poll(std::time::Duration::from_millis(0)) .and_then(|result| result.ok()) .and_then(|message| { message.payload().and_then(|message| bincode::deserialize::<differential_dataflow::capture::Message<D, T, R>>(message).ok()) }) } } struct ActivationConsumerContext(SyncActivator); impl ClientContext for ActivationConsumerContext { } impl ActivationConsumerContext { fn activate(&self) { self.0.activate().unwrap(); } } impl ConsumerContext for ActivationConsumerContext { fn message_queue_nonempty_callback(&self) { self.activate(); } } use std::time::Duration; use rdkafka::producer::DefaultProducerContext; use rdkafka::producer::{BaseRecord, ThreadedProducer}; pub struct KafkaSink { topic: String, producer: ThreadedProducer<DefaultProducerContext>, buffer: Vec<u8>, } impl KafkaSink { pub fn new(addr: &str, topic: &str) -> Self { let mut config = ClientConfig::new(); config.set("bootstrap.servers", &addr); config.set("queue.buffering.max.kbytes", &format!("{}", 16 << 20)); config.set("queue.buffering.max.messages", &format!("{}", 10_000_000)); config.set("queue.buffering.max.ms", &format!("{}", 10)); let producer = config .create_with_context::<_, ThreadedProducer<_>>(DefaultProducerContext) .expect("creating kafka producer for kafka sinks failed"); Self { producer, topic: topic.to_string(), buffer: Vec::new(), } } } impl<T: Serialize> Writer<T> for KafkaSink { fn poll(&mut self, item: &T) -> Option<Duration> { self.buffer.clear(); bincode::serialize_into(&mut self.buffer, item).expect("Writing to a `Vec<u8>` cannot fail"); let record = BaseRecord::<[u8], _>::to(&self.topic).payload(&self.buffer); self.producer.send(record).err().map(|(e, _)| { if let KafkaError::MessageProduction(RDKafkaError::QueueFull) = e { Duration::from_secs(1) } else { Duration::from_secs(1) } }) } fn done(&self) -> bool { self.producer.in_flight_count() == 0 } } }
extern crate rand; extern crate timely; extern crate differential_dataflow; extern crate serde; extern crate rdkafka; use rand::{Rng, SeedableRng, StdRng}; use timely::dataflow::*; use timely::dataflow::operators::probe::Handle; use differential_dataflow::input::Input; use differential_dataflow::Collection; use differential_dataflow::operators::*; use differential_dataflow::lattice::Lattice; type Node = u32; type Edge = (Node, Node); fn main() { let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap(); let edges: u32 = std::env::args().nth(2).unwrap().parse().unwrap(); let batch: u32 = std::env::args().nth(3).unwrap().parse().unwrap(); let topic = std::env::args().nth(4).unwrap(); let write = std::env::args().any(|x| x == "write"); let read = std::env::args().any(|x| x == "read"); timely::execute_from_args(std::env::args(), move |worker| { let timer = ::std::time::Instant::now(); let mut probe = Handle::new(); let (mut roots, mut graph, _write_token, _read_token) = worker.dataflow(|scope| { let (root_input, roots) = scope.new_collection(); let (edge_input, graph) = scope.new_collection(); let result = bfs(&graph, &roots); let result = result.map(|(_,l)| l) .consolidate() .probe_with(&mut probe); let write_token = if write { Some(kafka::create_sink(&result.inner, "localhost:9092", &topic)) } else { None }; let read_token = if read { let (read_token, stream) = kafka::create_source(result.scope(), "localhost:9092", &topic, "group"); use differential_dataflow::AsCollection; stream .as_collection() .negate() .concat(&result) .consolidate() .inspect(|x| println!("In error: {:?}", x)) .probe_with(&mut probe) .assert_empty() ; Some(read_token) } else { None }; (root_input, edge_input, write_token, read_token) }); let seed: &[_] = &[1, 2, 3, 4]; let mut rng1: StdRng = SeedableRng::from_seed(seed); let mut rng2: StdRng = SeedableRng::from_seed(seed); roots.insert(0); roots.close(); println!("performing BFS on {} nodes, {} edges:", nodes, edges); if worker.index() == 0 { for _ in 0 .. edges { graph.insert((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes))); } } println!("{:?}\tloaded", timer.elapsed()); graph.advance_to(1); graph.flush(); worker.step_while(|| probe.less_than(graph.time())); println!("{:?}\tstable", timer.elapsed()); for round in 0 .. { if write { std::thread::sleep(std::time::Duration::from_millis(100)); } for element in 0 .. batch { if worker.index() == 0 { graph.insert((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes))); graph.remove((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes))); } graph.advance_to(2 + round * batch + element); } graph.flush(); let timer2 = ::std::time::Instant::now(); worker.step_while(|| probe.less_than(&graph.time())); if worker.index() == 0 { let elapsed = timer2.elapsed(); println!("{:?}\t{:?}:\t{}", timer.elapsed(), round, elapsed.as_secs() * 1000000000 + (elapsed.subsec_nanos() as u64)); } } println!("finished; elapsed: {:?}", timer.elapsed()); }).unwrap(); } fn bfs<G: Scope>(edges: &Collection<G, Edge>, roots: &Collection<G, Nod
pub mod kafka { use serde::{Serialize, Deserialize}; use timely::scheduling::SyncActivator; use rdkafka::{ClientContext, config::ClientConfig}; use rdkafka::consumer::{BaseConsumer, ConsumerContext}; use rdkafka::error::{KafkaError, RDKafkaError}; use differential_dataflow::capture::Writer; use std::hash::Hash; use timely::progress::Timestamp; use timely::dataflow::{Scope, Stream}; use differential_dataflow::ExchangeData; use differential_dataflow::lattice::Lattice; pub fn create_source<G, D, T, R>(scope: G, addr: &str, topic: &str, group: &str) -> (Box<dyn std::any::Any>, Stream<G, (D, T, R)>) where G: Scope<Timestamp = T>, D: ExchangeData + Hash + for<'a> serde::Deserialize<'a>, T: ExchangeData + Hash + for<'a> serde::Deserialize<'a> + Timestamp + Lattice, R: ExchangeData + Hash + for<'a> serde::Deserialize<'a>, { differential_dataflow::capture::source::build(scope, |activator| { let source = KafkaSource::new(addr, topic, group, activator); differential_dataflow::capture::YieldingIter::new_from(Iter::<D,T,R>::new_from(source), std::time::Duration::from_millis(10)) }) } pub fn create_sink<G, D, T, R>(stream: &Stream<G, (D, T, R)>, addr: &str, topic: &str) -> Box<dyn std::any::Any> where G: Scope<Timestamp = T>, D: ExchangeData + Hash + Serialize + for<'a> Deserialize<'a>, T: ExchangeData + Hash + Serialize + for<'a> Deserialize<'a> + Timestamp + Lattice, R: ExchangeData + Hash + Serialize + for<'a> Deserialize<'a>, { use std::rc::Rc; use std::cell::RefCell; use differential_dataflow::hashable::Hashable; let sink = KafkaSink::new(addr, topic); let result = Rc::new(RefCell::new(sink)); let sink_hash = (addr.to_string(), topic.to_string()).hashed(); differential_dataflow::capture::sink::build( &stream, sink_hash, Rc::downgrade(&result), Rc::downgrade(&result), ); Box::new(result) } pub struct KafkaSource { consumer: BaseConsumer<ActivationConsumerContext>, } impl KafkaSource { pub fn new(addr: &str, topic: &str, group: &str, activator: SyncActivator) -> Self { let mut kafka_config = ClientConfig::new(); kafka_config.set("bootstrap.servers", &addr.to_string()); kafka_config .set("enable.auto.commit", "false") .set("auto.offset.reset", "earliest"); kafka_config.set("topic.metadata.refresh.interval.ms", "30000"); kafka_config.set("fetch.message.max.bytes", "134217728"); kafka_config.set("group.id", group); kafka_config.set("isolation.level", "read_committed"); let activator = ActivationConsumerContext(activator); let consumer = kafka_config.create_with_context::<_, BaseConsumer<_>>(activator).unwrap(); use rdkafka::consumer::Consumer; consumer.subscribe(&[topic]).unwrap(); Self { consumer, } } } pub struct Iter<D, T, R> { pub source: KafkaSource, phantom: std::marker::PhantomData<(D, T, R)>, } impl<D, T, R> Iter<D, T, R> { pub fn new_from(source: KafkaSource) -> Self { Self { source, phantom: std::marker::PhantomData, } } } impl<D, T, R> Iterator for Iter<D, T, R> where D: for<'a>Deserialize<'a>, T: for<'a>Deserialize<'a>, R: for<'a>Deserialize<'a>, { type Item = differential_dataflow::capture::Message<D, T, R>; fn next(&mut self) -> Option<Self::Item> { use rdkafka::message::Message; self.source .consumer .poll(std::time::Duration::from_millis(0)) .and_then(|result| result.ok()) .and_then(|message| { message.payload().and_then(|message| bincode::deserialize::<differential_dataflow::capture::Message<D, T, R>>(message).ok()) }) } } struct ActivationConsumerContext(SyncActivator); impl ClientContext for ActivationConsumerContext { } impl ActivationConsumerContext { fn activate(&self) { self.0.activate().unwrap(); } } impl ConsumerContext for ActivationConsumerContext { fn message_queue_nonempty_callback(&self) { self.activate(); } } use std::time::Duration; use rdkafka::producer::DefaultProducerContext; use rdkafka::producer::{BaseRecord, ThreadedProducer}; pub struct KafkaSink { topic: String, producer: ThreadedProducer<DefaultProducerContext>, buffer: Vec<u8>, } impl KafkaSink { pub fn new(addr: &str, topic: &str) -> Self { let mut config = ClientConfig::new(); config.set("bootstrap.servers", &addr); config.set("queue.buffering.max.kbytes", &format!("{}", 16 << 20)); config.set("queue.buffering.max.messages", &format!("{}", 10_000_000)); config.set("queue.buffering.max.ms", &format!("{}", 10)); let producer = config .create_with_context::<_, ThreadedProducer<_>>(DefaultProducerContext) .expect("creating kafka producer for kafka sinks failed"); Self { producer, topic: topic.to_string(), buffer: Vec::new(), } } } impl<T: Serialize> Writer<T> for KafkaSink { fn poll(&mut self, item: &T) -> Option<Duration> { self.buffer.clear(); bincode::serialize_into(&mut self.buffer, item).expect("Writing to a `Vec<u8>` cannot fail"); let record = BaseRecord::<[u8], _>::to(&self.topic).payload(&self.buffer); self.producer.send(record).err().map(|(e, _)| { if let KafkaError::MessageProduction(RDKafkaError::QueueFull) = e { Duration::from_secs(1) } else { Duration::from_secs(1) } }) } fn done(&self) -> bool { self.producer.in_flight_count() == 0 } } }
e>) -> Collection<G, (Node, u32)> where G::Timestamp: Lattice+Ord { let nodes = roots.map(|x| (x, 0)); nodes.iterate(|inner| { let edges = edges.enter(&inner.scope()); let nodes = nodes.enter(&inner.scope()); inner.join_map(&edges, |_k,l,d| (*d, l+1)) .concat(&nodes) .reduce(|_, s, t| t.push((*s[0].0, 1))) }) }
function_block-function_prefixed
[ { "content": "// Type aliases for differential execution.\n\ntype Time = u32;\n", "file_path": "doop/src/main.rs", "rank": 0, "score": 294510.99856603285 }, { "content": "type Node = u32;\n\n\n", "file_path": "src/trace/implementations/graph.rs", "rank": 1, "score": 289060.6563150857 }, { "content": "#[no_mangle]\n\npub fn build((dataflow, handles, probe, timer, args): Environment) -> Result<(), String> {\n\n\n\n // This call either starts the production of random graph edges.\n\n //\n\n // The arguments should be\n\n //\n\n // <graph_name> <nodes> <edges> <rate>\n\n //\n\n // where <rate> is the target number of edge changes per second. The source\n\n // will play out changes to keep up with this, and timestamp them as if they\n\n // were emitted at the correct time. The timestamps use the system `timer`,\n\n // but only start whenever the method is called. This means that the data are\n\n // not deterministic, but if you subtract the elapsed time between system start\n\n // up and method call, they should be deterministic.\n\n //\n\n // The method also registers a capability with name `<graph_name>-capability`,\n\n // and will continue to execute until this capability is dropped from `handles`.\n\n // To terminate the operator it is sufficient to drop the capability, as the\n\n // operator holds only a weak reference to it.\n\n //\n", "file_path": "server/dataflows/random_graph/src/lib.rs", "rank": 2, "score": 283334.70969676506 }, { "content": "// returns pairs (n, s) indicating node n can be reached from a root in s steps.\n\nfn bfs<G: Scope>(edges: &Collection<G, Edge>, roots: &Collection<G, Node>) -> Collection<G, (Node, u32)>\n\nwhere G::Timestamp: Lattice+Ord {\n\n\n\n // initialize roots as reaching themselves at distance 0\n\n let nodes = roots.map(|x| (x, 0));\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n nodes.iterate(|inner| {\n\n\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter(&inner.scope());\n\n\n\n inner.join_map(&edges, |_k,l,d| (*d, l+1))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((*s[0].0, 1)))\n\n })\n\n}", "file_path": "examples/bfs.rs", "rank": 3, "score": 273521.8120165493 }, { "content": "// returns pairs (n, s) indicating node n can be reached from a root in s steps.\n\nfn bfs<G: Scope>(edges: &Collection<G, Edge>, roots: &Collection<G, Node>) -> Collection<G, (Node, u32)>\n\nwhere G::Timestamp: Lattice+Ord {\n\n\n\n // initialize roots as reaching themselves at distance 0\n\n let nodes = roots.map(|x| (x, 0));\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n nodes.iterate(|inner| {\n\n\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter(&inner.scope());\n\n\n\n inner.join_map(&edges, |_k,l,d| (*d, l+1))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((*s[0].0, 1)))\n\n })\n\n}", "file_path": "examples/stackoverflow.rs", "rank": 4, "score": 273521.8120165493 }, { "content": "fn dump_cursor<Tr>(round: u32, index: usize, trace: &mut Tr)\n\nwhere\n\n Tr: TraceReader,\n\n Tr::Key: Debug + Clone,\n\n Tr::Val: Debug + Clone,\n\n Tr::Time: Debug + Clone,\n\n Tr::R: Debug + Clone,\n\n{\n\n let (mut cursor, storage) = trace.cursor();\n\n for ((k, v), diffs) in cursor.to_vec(&storage).iter() {\n\n println!(\"round {}, w{} {:?}:{:?}: {:?}\", round, index, *k, *v, diffs);\n\n }\n\n}\n", "file_path": "examples/cursors.rs", "rank": 6, "score": 265138.44606916915 }, { "content": "fn read_u25(string: &str) -> [u8;25] { let mut buff = [0;25]; copy_from_to(string.as_bytes(), &mut buff); buff }\n\n\n\nunsafe_abomonate!(AbomonationWrapper<ArrayString<[u8; 25]>>);\n\nunsafe_abomonate!(AbomonationWrapper<ArrayString<[u8; 40]>>);\n\nunsafe_abomonate!(AbomonationWrapper<ArrayString<[u8; 128]>>);\n\n\n\n#[derive(Ord,PartialOrd,Eq,PartialEq,Clone,Copy,Debug,Hash,Default)]\n\npub struct AbomonationWrapper<T> {\n\n pub element: T,\n\n}\n\n\n\nuse ::std::ops::Deref;\n\nimpl<T> Deref for AbomonationWrapper<T> {\n\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n\n &self.element\n\n }\n\n}\n\n\n\nunsafe_abomonate!(Part);\n", "file_path": "tpchlike/src/types.rs", "rank": 7, "score": 265036.73541693133 }, { "content": "fn read_u10(string: &str) -> [u8;10] { let mut buff = [0;10]; copy_from_to(string.as_bytes(), &mut buff); buff }\n", "file_path": "tpchlike/src/types.rs", "rank": 8, "score": 265036.73541693133 }, { "content": "fn read_u15(string: &str) -> [u8;15] { let mut buff = [0;15]; copy_from_to(string.as_bytes(), &mut buff); buff }\n", "file_path": "tpchlike/src/types.rs", "rank": 9, "score": 265036.73541693133 }, { "content": "fn read_u01(string: &str) -> [u8;1] { let mut buff = [0;1]; copy_from_to(string.as_bytes(), &mut buff); buff }\n", "file_path": "tpchlike/src/types.rs", "rank": 10, "score": 265036.73541693133 }, { "content": "type Time = u32;\n", "file_path": "examples/cursors.rs", "rank": 11, "score": 253688.27089614252 }, { "content": "type Time = u32;\n", "file_path": "examples/pagerank.rs", "rank": 12, "score": 253688.27089614252 }, { "content": "type Node = u32;\n", "file_path": "examples/pagerank.rs", "rank": 13, "score": 253093.0110743976 }, { "content": "type Node = u32;\n", "file_path": "examples/stackoverflow.rs", "rank": 14, "score": 253093.0110743976 }, { "content": "type Node = u32;\n", "file_path": "examples/cursors.rs", "rank": 15, "score": 253093.0110743976 }, { "content": "type Node = u32;\n", "file_path": "examples/interpreted.rs", "rank": 16, "score": 253093.0110743976 }, { "content": "type Node = u32;\n", "file_path": "examples/bfs.rs", "rank": 17, "score": 253093.0110743976 }, { "content": "type Node = u32;\n", "file_path": "dogsdogsdogs/examples/ngo.rs", "rank": 19, "score": 249070.47743759854 }, { "content": "type Node = u32;\n", "file_path": "examples/monoid-bfs.rs", "rank": 20, "score": 249070.4774375985 }, { "content": "/// Enables logging of differential dataflow events.\n\npub fn enable<A, W>(worker: &mut timely::worker::Worker<A>, writer: W) -> Option<Box<dyn std::any::Any+'static>>\n\nwhere\n\n A: timely::communication::Allocate,\n\n W: std::io::Write+'static,\n\n{\n\n let writer = ::timely::dataflow::operators::capture::EventWriter::new(writer);\n\n let mut logger = ::timely::logging::BatchLogger::new(writer);\n\n worker\n\n .log_register()\n\n .insert::<DifferentialEvent,_>(\"differential/arrange\", move |time, data| logger.publish_batch(time, data))\n\n}\n\n\n\n/// Possible different differential events.\n\n#[derive(Debug, Clone, Abomonation, Ord, PartialOrd, Eq, PartialEq)]\n\npub enum DifferentialEvent {\n\n /// Batch creation.\n\n Batch(BatchEvent),\n\n /// Merge start and stop events.\n\n Merge(MergeEvent),\n\n /// Batch dropped when trace dropped.\n", "file_path": "src/logging.rs", "rank": 21, "score": 233140.2729517994 }, { "content": "type Edge = (Node, Node);\n\n\n\n#[test] fn bfs_10_20_1000() { test_sizes(10, 20, 1000, Config::process(3)); }\n\n#[test] fn bfs_100_200_10() { test_sizes(100, 200, 10, Config::process(3)); }\n\n#[test] fn bfs_100_2000_1() { test_sizes(100, 2000, 1, Config::process(3)); }\n\n\n", "file_path": "tests/bfs.rs", "rank": 22, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n\n\n", "file_path": "examples/interpreted.rs", "rank": 23, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n", "file_path": "examples/cursors.rs", "rank": 24, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n", "file_path": "examples/pagerank.rs", "rank": 25, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n\n\n\n#[test] fn scc_10_20_1000() { test_sizes(10, 20, 1000, Config::process(3)); }\n\n#[test] fn scc_100_200_10() { test_sizes(100, 200, 10, Config::process(3)); }\n\n#[test] fn scc_100_2000_1() { test_sizes(100, 2000, 1, Config::process(3)); }\n\n\n", "file_path": "tests/scc.rs", "rank": 26, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n\n\n", "file_path": "examples/bfs.rs", "rank": 27, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n", "file_path": "examples/graspan.rs", "rank": 28, "score": 226348.37484132207 }, { "content": "type Edge = (Node, Node);\n\n\n", "file_path": "examples/stackoverflow.rs", "rank": 29, "score": 226348.37484132207 }, { "content": "fn assign(node: usize, root: usize, reverse: &HashMap<usize, Vec<usize>>, component: &mut HashMap<usize, usize>) {\n\n if !component.contains_key(&node) {\n\n component.insert(node, root);\n\n if let Some(edges) = reverse.get(&node) {\n\n for &edge in edges.iter() {\n\n assign(edge, root, reverse, component);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/scc.rs", "rank": 30, "score": 224895.80643971608 }, { "content": "#[no_mangle]\n\npub fn build((dataflow, handles, probe, _timer, args): Environment) -> Result<(), String> {\n\n\n\n if args.len() != 2 { return Err(format!(\"expected two arguments; instead: {:?}\", args)); }\n\n\n\n let edges = handles\n\n .get_mut::<Rc<RefCell<Option<TraceHandle>>>>(&args[0])?\n\n .borrow_mut().as_mut().unwrap().import(dataflow);\n\n\n\n let source = args[1].parse::<usize>().map_err(|_| format!(\"parse error, source: {:?}\", args[1]))?; \n\n let (_input, query) = dataflow.new_collection_from(Some(source));\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n query\n\n .map(|x| (x, x))\n\n .join_core(&edges, |_n, &q, &d| Some((d, q))) // one hop\n\n .join_core(&edges, |_n, &q, &d| Some((d, q))) // two hops\n\n .join_core(&edges, |_n, &q, &d| Some((d, q))) // three hops\n\n .map(|x| x.1)\n\n .consolidate()\n\n .inspect(move |x| println!(\"{:?}:\\t{:?}\", timer.elapsed(), x))\n\n .probe_with(probe);\n\n\n\n Ok(())\n\n}", "file_path": "server/dataflows/neighborhood/src/lib.rs", "rank": 31, "score": 222903.30899012528 }, { "content": "#[no_mangle]\n\npub fn build((dataflow, handles, probe, _timer, args): Environment) -> Result<(), String> {\n\n\n\n if args.len() != 2 { return Err(format!(\"expected two arguments; instead: {:?}\", args)); }\n\n\n\n let edges = handles\n\n .get_mut::<Rc<RefCell<Option<TraceHandle>>>>(&args[0])?\n\n .borrow_mut().as_mut().unwrap().import(dataflow);\n\n\n\n let source = args[1].parse::<usize>().map_err(|_| format!(\"parse error, source: {:?}\", args[1]))?; \n\n let (_input, roots) = dataflow.new_collection_from(Some(source));\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n roots.iterate(|dists| {\n\n let edges = edges.enter(&dists.scope());\n\n let roots = roots.enter(&dists.scope());\n\n dists.arrange_by_self()\n\n .join_core(&edges, |_src, _, &dst| Some(dst))\n\n .concat(&roots)\n\n .distinct()\n\n })\n\n .probe_with(probe);\n\n\n\n Ok(())\n\n}", "file_path": "server/dataflows/reachability/src/lib.rs", "rank": 32, "score": 222903.30899012528 }, { "content": "type Edge = (Node, Node);\n\n\n", "file_path": "dogsdogsdogs/examples/ngo.rs", "rank": 34, "score": 222835.71298371206 }, { "content": "type Edge = (Node, Node);\n\n\n\n#[derive(Abomonation, Copy, Ord, PartialOrd, Eq, PartialEq, Debug, Clone, Serialize, Deserialize, Hash)]\n\npub struct MinSum {\n\n value: u32,\n\n}\n\n\n\nuse std::ops::{AddAssign, Mul};\n\nuse differential_dataflow::difference::Semigroup;\n\n\n\nimpl<'a> AddAssign<&'a Self> for MinSum {\n\n fn add_assign(&mut self, rhs: &'a Self) {\n\n self.value = std::cmp::min(self.value, rhs.value);\n\n }\n\n}\n\n\n\nimpl Mul<Self> for MinSum {\n\n type Output = Self;\n\n fn mul(self, rhs: Self) -> Self {\n\n MinSum { value: self.value + rhs.value }\n\n }\n\n}\n\n\n\nimpl Semigroup for MinSum {\n\n fn is_zero(&self) -> bool { false }\n\n}\n\n\n", "file_path": "examples/monoid-bfs.rs", "rank": 35, "score": 222835.71298371206 }, { "content": "#[no_mangle]\n\npub fn build((dataflow, handles, probe, _timer, args): Environment) -> Result<(), String> {\n\n\n\n if args.len() != 1 { return Err(format!(\"expected one argument, instead: {:?}\", args)); }\n\n\n\n handles\n\n .get_mut::<Rc<RefCell<Option<TraceHandle>>>>(&args[0])?\n\n .borrow_mut().as_mut().unwrap()\n\n .import(dataflow)\n\n .as_collection(|&src,_dst| src)\n\n .count_total()\n\n .map(|(_deg, cnt)| cnt as usize)\n\n .count_total()\n\n .probe_with(probe);\n\n\n\n Ok(())\n\n}", "file_path": "server/dataflows/degr_dist/src/lib.rs", "rank": 36, "score": 219838.94394000177 }, { "content": "// returns pairs (n, s) indicating node n can be reached from a root in s steps.\n\nfn bfs<G: Scope>(edges: &Collection<G, Edge>, roots: &Collection<G, Node>) -> Collection<G, (Node, usize)>\n\nwhere G::Timestamp: Lattice+Ord {\n\n\n\n // initialize roots as reaching themselves at distance 0\n\n let nodes = roots.map(|x| (x, 0));\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n nodes.iterate(|inner| {\n\n\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter(&inner.scope());\n\n\n\n inner.join_map(&edges, |_k,l,d| (*d, l+1))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((*s[0].0, 1)))\n\n })\n\n}\n", "file_path": "tests/bfs.rs", "rank": 37, "score": 214965.4824300761 }, { "content": "/// Propagates labels forward, retaining the minimum label.\n\n///\n\n/// This algorithm naively propagates all labels at once, much like standard label propagation.\n\n/// To more carefully control the label propagation, consider `propagate_core` which supports a\n\n/// method to limit the introduction of labels.\n\npub fn propagate<G, N, L, R>(edges: &Collection<G, (N,N), R>, nodes: &Collection<G,(N,L),R>) -> Collection<G,(N,L),R>\n\nwhere\n\n G: Scope,\n\n G::Timestamp: Lattice+Ord,\n\n N: ExchangeData+Hash,\n\n R: ExchangeData+Abelian,\n\n R: Mul<R, Output=R>,\n\n R: From<i8>,\n\n L: ExchangeData,\n\n{\n\n propagate_core(&edges.arrange_by_key(), nodes, |_label| 0)\n\n}\n\n\n", "file_path": "src/algorithms/graphs/propagate.rs", "rank": 38, "score": 211521.45495163626 }, { "content": "type Iter = u32;\n", "file_path": "doop/src/main.rs", "rank": 39, "score": 210647.21909271507 }, { "content": "type Number = u32;\n", "file_path": "doop/src/main.rs", "rank": 40, "score": 210647.21909271507 }, { "content": "type Symbol = u32;\n", "file_path": "doop/src/main.rs", "rank": 41, "score": 210647.21909271507 }, { "content": "/// Introduces differential options to a timely configuration.\n\npub fn configure(config: &mut timely::WorkerConfig, options: &Config) {\n\n if let Some(effort) = options.idle_merge_effort {\n\n config.set(\"differential/idle_merge_effort\".to_string(), effort);\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 42, "score": 210616.34569801207 }, { "content": "type Result = std::sync::mpsc::Receiver<timely::dataflow::operators::capture::Event<usize, ((u64, i64), usize, i64)>>;\n\n\n", "file_path": "tests/import.rs", "rank": 43, "score": 209117.31130603747 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n // revenue by supplier\n\n let revenue =\n\n collections\n\n .lineitems()\n\n .explode(|item|\n\n if create_date(1996, 1, 1) <= item.ship_date && item.ship_date < create_date(1996,4,1) {\n\n Some((item.supp_key, (item.extended_price * (100 - item.discount) / 100) as isize))\n\n }\n\n else { None }\n\n );\n\n\n\n // suppliers with maximum revenue\n\n let top_suppliers =\n\n revenue\n\n // do a hierarchical min, to improve update perf.\n\n .map(|key| ((key % 1000) as u16, key))\n\n .reduce(|_k, s, t| {\n", "file_path": "tpchlike/src/queries/query15.rs", "rank": 44, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let regions = collections.regions().filter(|r| starts_with(&r.name, b\"AMERICA\")).map(|r| r.region_key);\n\n let nations1 = collections.nations().map(|n| (n.region_key, n.nation_key)).semijoin(&regions).map(|x| x.1);\n\n let customers = collections.customers().map(|c| (c.nation_key, c.cust_key)).semijoin(&nations1).map(|x| x.1);\n\n let orders =\n\n collections\n\n .orders()\n\n .flat_map(|o|\n\n if create_date(1995,1,1) <= o.order_date && o.order_date <= create_date(1996, 12, 31) {\n\n Some((o.cust_key, (o.order_key, o.order_date >> 16)))\n\n }\n\n else { None }\n\n )\n\n .semijoin(&customers)\n\n .map(|x| x.1);\n\n\n\n let nations2 = collections.nations.map(|n| (n.nation_key, starts_with(&n.name, b\"BRAZIL\")));\n\n let suppliers =\n", "file_path": "tpchlike/src/queries/query08.rs", "rank": 45, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let customers =\n\n collections\n\n .customers()\n\n .flat_map(|c| if starts_with(&c.mktsegment[..], b\"BUILDING\") { Some(c.cust_key) } else { None });\n\n\n\n let lineitems =\n\n collections\n\n .lineitems()\n\n .explode(|l|\n\n if l.ship_date > create_date(1995, 3, 15) {\n\n Some((l.order_key, (l.extended_price * (100 - l.discount) / 100) as isize))\n\n }\n\n else { None }\n\n );\n\n\n\n let orders =\n\n collections\n", "file_path": "tpchlike/src/queries/query03.rs", "rank": 46, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let regex = Regex::new(\"special.*requests\").expect(\"Regex construction failed\");\n\n\n\n let orders =\n\n collections\n\n .orders()\n\n .flat_map(move |o| if !regex.is_match(&o.comment) { Some(o.cust_key) } else { None } );\n\n\n\n collections\n\n .customers()\n\n .map(|c| c.cust_key)\n\n .concat(&orders)\n\n .count_total()\n\n .map(|(_cust_key, count)| (count-1) as usize)\n\n .count_total()\n\n // .inspect(|x| println!(\"{:?}\", x))\n\n .probe_with(probe);\n\n}\n\n\n", "file_path": "tpchlike/src/queries/query13.rs", "rank": 47, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n println!(\"TODO: Q20 uses a `reduce_abelian` to get an arrangement, but could use `count_total`\");\n\n\n\n let partkeys = collections.parts.filter(|p| p.name.as_bytes() == b\"forest\").map(|p| p.part_key);\n\n\n\n let available =\n\n collections\n\n .lineitems()\n\n .flat_map(|l|\n\n if l.ship_date >= create_date(1994, 1, 1) && l.ship_date < create_date(1995, 1, 1) {\n\n Some((l.part_key, (l.supp_key, l.quantity)))\n\n }\n\n else { None }\n\n )\n\n .semijoin(&partkeys)\n\n .explode(|l| Some(((((l.0 as u64) << 32) + (l.1).0 as u64, ()), (l.1).1 as isize)))\n\n .reduce_abelian::<_,DefaultValTrace<_,_,_,_>>(\"Reduce\", |_k,s,t| t.push((s[0].1, 1)));\n\n\n", "file_path": "tpchlike/src/queries/query20.rs", "rank": 48, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let regex = Regex::new(\"Customer.*Complaints\").expect(\"Regex construction failed\");\n\n\n\n let suppliers =\n\n collections\n\n .suppliers()\n\n .flat_map(move |s| if regex.is_match(&s.comment) { Some(s.supp_key) } else { None } );\n\n\n\n let parts = collections\n\n .partsupps()\n\n .map(|ps| (ps.supp_key, ps.part_key))\n\n .antijoin(&suppliers)\n\n .map(|(_supp_key, part_key)| part_key);\n\n\n\n collections\n\n .parts()\n\n .flat_map(|p|\n\n if !starts_with(&p.brand, b\"Brand#45\") && !starts_with(&p.typ.as_bytes(), b\"MEDIUM POLISHED\") && [49, 14, 23, 45, 19, 3, 36, 9].contains(&p.size) {\n", "file_path": "tpchlike/src/queries/query16.rs", "rank": 49, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n println!(\"TODO: Q22 uses a `group` for counting to get an arrangement; could use `count_total`\");\n\n\n\n let customers =\n\n collections\n\n .customers()\n\n .flat_map(|c| {\n\n if c.acctbal > 0 {\n\n match &[c.phone[0], c.phone[1]] {\n\n b\"13\" | b\"31\" | b\"23\" | b\"29\" | b\"30\" | b\"18\" | b\"17\" => {\n\n Some((((c.phone[1] as u16) << 8) + c.phone[0] as u16, c.acctbal, c.cust_key))\n\n },\n\n _ => None,\n\n }\n\n }\n\n else { None }\n\n });\n\n\n", "file_path": "tpchlike/src/queries/query22.rs", "rank": 50, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let nations =\n\n collections\n\n .nations()\n\n .filter(|n| starts_with(&n.name, b\"GERMANY\"))\n\n .map(|n| n.nation_key);\n\n\n\n let suppliers =\n\n collections\n\n .suppliers()\n\n .map(|s| (s.nation_key, s.supp_key))\n\n .semijoin(&nations)\n\n .map(|s| s.1);\n\n\n\n collections\n\n .partsupps()\n\n .explode(|x| Some(((x.supp_key, x.part_key), (x.supplycost as isize) * (x.availqty as isize))))\n\n .semijoin(&suppliers)\n\n .map(|(_, part_key)| ((), part_key))\n\n .reduce(|_part_key, s, t| {\n\n let threshold: isize = s.iter().map(|x| x.1 as isize).sum::<isize>() / 10000;\n\n t.extend(s.iter().filter(|x| x.1 > threshold).map(|&(&a,b)| (a, b)));\n\n })\n\n .map(|(_, part_key)| part_key)\n\n .count_total()\n\n .probe_with(probe);\n\n}\n\n\n", "file_path": "tpchlike/src/queries/query11.rs", "rank": 51, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let orders =\n\n collections\n\n .orders()\n\n .map(|o| (o.order_key, (o.cust_key, o.order_date, o.total_price)));\n\n\n\n collections\n\n .lineitems()\n\n .explode(|l| Some((l.order_key, l.quantity as isize)))\n\n .count_total()\n\n .filter(|&(_key, cnt)| cnt > 300)\n\n .join_map(&orders, |&o_key, &quant, &(cust_key, date, price)| (cust_key, (o_key, date, price, quant)))\n\n .join(&collections.customers().map(|c| (c.cust_key, c.name)))\n\n .probe_with(probe);\n\n}\n\n\n", "file_path": "tpchlike/src/queries/query18.rs", "rank": 52, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n println!(\"TODO: Q07 could use `join_core` to fuse map and filter\");\n\n\n\n let nations =\n\n collections\n\n .nations()\n\n .filter(|n| starts_with(&n.name, b\"FRANCE\") || starts_with(&n.name, b\"GERMANY\"))\n\n .map(|n| (n.nation_key, n.name));\n\n\n\n let customers =\n\n collections\n\n .customers()\n\n .map(|c| (c.nation_key, c.cust_key))\n\n .join_map(&nations, |_, &cust_key, &name| (cust_key, name));\n\n\n\n let orders =\n\n collections\n\n .orders()\n", "file_path": "tpchlike/src/queries/query07.rs", "rank": 53, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let regions =\n\n collections\n\n .regions()\n\n .filter(|x| starts_with(&x.name[..], b\"ASIA\"))\n\n .map(|x| x.region_key);\n\n\n\n let nations =\n\n collections\n\n .nations()\n\n .map(|x| (x.region_key, x.nation_key))\n\n .semijoin(&regions)\n\n .map(|(_region_key, nation_key)| nation_key);\n\n\n\n let suppliers =\n\n collections\n\n .suppliers()\n\n .map(|x| (x.nation_key, x.supp_key))\n", "file_path": "tpchlike/src/queries/query05.rs", "rank": 54, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let lineitems =\n\n collections\n\n .lineitems()\n\n .explode(|x|\n\n if (starts_with(&x.ship_mode, b\"AIR\") || starts_with(&x.ship_mode, b\"AIR REG\")) && starts_with(&x.ship_instruct, b\"DELIVER IN PERSON\") {\n\n Some(((x.part_key, x.quantity), (x.extended_price * (100 - x.discount) / 100) as isize))\n\n }\n\n else { None }\n\n );\n\n\n\n let lines1 = lineitems.filter(|&(_, quant)| quant >= 1 && quant <= 11).map(|x| x.0).arrange_by_self();\n\n let lines2 = lineitems.filter(|&(_, quant)| quant >= 10 && quant <= 20).map(|x| x.0).arrange_by_self();\n\n let lines3 = lineitems.filter(|&(_, quant)| quant >= 20 && quant <= 30).map(|x| x.0).arrange_by_self();\n\n\n\n let parts = collections.parts().map(|p| (p.part_key, (p.brand, p.container, p.size)));\n\n\n\n let parts1 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b\"Brand#12\") && 1 <= size && size <= 5 && (starts_with(&container, b\"SM CASE\") || starts_with(&container, b\"SM BOX\") || starts_with(&container, b\"SM PACK\") || starts_with(&container, b\"MED PKG\"))).map(|x| x.0).arrange_by_self();\n", "file_path": "tpchlike/src/queries/query19.rs", "rank": 55, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let regions =\n\n collections\n\n .regions()\n\n .flat_map(|x| if starts_with(&x.name[..], b\"EUROPE\") { Some(x.region_key) } else { None });\n\n\n\n let nations =\n\n collections\n\n .nations()\n\n .map(|x| (x.region_key, (x.nation_key, x.name)))\n\n .semijoin(&regions)\n\n .map(|(_region_key, (nation_key, name))| (nation_key, name));\n\n\n\n let suppliers =\n\n collections\n\n .suppliers()\n\n .map(|x| (x.nation_key, (x.acctbal, x.name, x.address, x.phone, x.comment, x.supp_key)))\n\n .semijoin(&nations.map(|x| x.0))\n", "file_path": "tpchlike/src/queries/query02.rs", "rank": 56, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n collections\n\n .lineitems()\n\n .explode(|x| {\n\n if (create_date(1994, 1, 1) <= x.ship_date) && (x.ship_date < create_date(1995, 1, 1)) && (5 <= x.discount) && (x.discount <= 7) && (x.quantity < 24) {\n\n Some(((), (x.extended_price * x.discount / 100) as isize))\n\n }\n\n else { None }\n\n })\n\n .count_total()\n\n // .inspect(|x| println!(\"{:?}\", x))\n\n .probe_with(probe);\n\n}\n\n\n", "file_path": "tpchlike/src/queries/query06.rs", "rank": 57, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let lineitems =\n\n collections\n\n .lineitems()\n\n .explode(|l|\n\n if create_date(1995,9,1) <= l.ship_date && l.ship_date < create_date(1995,10,1) {\n\n Some((l.part_key, (l.extended_price * (100 - l.discount) / 100) as isize ))\n\n }\n\n else { None }\n\n )\n\n .arrange_by_self();\n\n\n\n collections\n\n .parts()\n\n .explode(|p| Some((p.part_key, DiffPair::new(1, if starts_with(&p.typ.as_bytes(), b\"PROMO\") { 1 } else { 0 }))))\n\n .arrange_by_self()\n\n .join_core(&lineitems, |&_part_key, _, _| Some(()))\n\n .count_total()\n\n // .inspect(|x| println!(\"{:?}\", x))\n\n .probe_with(probe);\n\n}\n\n\n", "file_path": "tpchlike/src/queries/query14.rs", "rank": 58, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere\n\n G::Timestamp: Lattice+TotalOrder+Ord\n\n{\n\n collections\n\n .lineitems()\n\n .explode(|item|\n\n if item.ship_date <= ::types::create_date(1998, 9, 2) {\n\n Some(((item.return_flag[0], item.line_status[0]),\n\n DiffPair::new(item.quantity as isize,\n\n DiffPair::new(item.extended_price as isize,\n\n DiffPair::new((item.extended_price * (100 - item.discount) / 100) as isize,\n\n DiffPair::new((item.extended_price * (100 - item.discount) * (100 + item.tax) / 10000) as isize,\n\n DiffPair::new(item.discount as isize, 1)))))))\n\n }\n\n else {\n\n None\n\n }\n\n )\n\n .count_total()\n\n // .inspect(|x| println!(\"{:?}\", x))\n\n .probe_with(probe);\n\n}\n\n\n", "file_path": "tpchlike/src/queries/query01.rs", "rank": 59, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let lineitems =\n\n collections\n\n .lineitems()\n\n .explode(|x|\n\n if starts_with(&x.return_flag, b\"R\") {\n\n Some((x.order_key, (x.extended_price * (100 - x.discount)) as isize))\n\n }\n\n else { None }\n\n );\n\n\n\n let orders =\n\n collections\n\n .orders()\n\n .flat_map(|o|\n\n if create_date(1993,10,1) < o.order_date && o.order_date <= create_date(1994,1,1) {\n\n Some((o.order_key, o.cust_key))\n\n }\n", "file_path": "tpchlike/src/queries/query10.rs", "rank": 60, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n println!(\"TODO: Q12 does contortions because isize doesn't implement Mul<DiffPair<isize, isize>>.\");\n\n\n\n let orders =\n\n collections\n\n .orders()\n\n .explode(|o|\n\n if starts_with(&o.order_priority, b\"1-URGENT\") || starts_with(&o.order_priority, b\"2-HIGH\") {\n\n Some((o.order_key, DiffPair::new(1, 0)))\n\n }\n\n else {\n\n Some((o.order_key, DiffPair::new(0, 1)))\n\n }\n\n )\n\n .arrange_by_self();\n\n\n\n let lineitems =\n\n collections\n", "file_path": "tpchlike/src/queries/query12.rs", "rank": 61, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n println!(\"TODO: Q09 join order may be pessimal; could pivot to put lineitems last\");\n\n\n\n let parts =\n\n collections\n\n .parts()\n\n .flat_map(|x| if substring(&x.name.as_bytes(), b\"green\") { Some(x.part_key) } else { None } );\n\n\n\n collections\n\n .lineitems()\n\n .map(|l| (l.part_key, (l.supp_key, l.order_key, l.extended_price * (100 - l.discount) / 100, l.quantity)))\n\n .semijoin(&parts)\n\n .map(|(part_key, (supp_key, order_key, revenue, quantity))| ((part_key, supp_key), (order_key, revenue, quantity)))\n\n .join(&collections.partsupps().map(|ps| ((ps.part_key, ps.supp_key), ps.supplycost)))\n\n .explode(|((_part_key, supp_key), ((order_key, revenue, quantity), supplycost))|\n\n Some(((order_key, supp_key), ((revenue - supplycost * quantity) as isize)))\n\n )\n\n .join_map(&collections.orders().map(|o| (o.order_key, o.order_date >> 16)), |_, &supp_key, &order_year| (supp_key, order_year))\n\n .join_map(&collections.suppliers().map(|s| (s.supp_key, s.nation_key)), |_, &order_year, &nation_key| (nation_key, order_year))\n\n .join(&collections.nations().map(|n| (n.nation_key, n.name)))\n\n .count_total()\n\n .probe_with(probe);\n\n}\n\n\n", "file_path": "tpchlike/src/queries/query09.rs", "rank": 62, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let lineitems =\n\n collections\n\n .lineitems()\n\n .flat_map(|l| if l.commit_date < l.receipt_date { Some(l.order_key) } else { None })\n\n .distinct_total();\n\n\n\n collections\n\n .orders()\n\n .flat_map(|o|\n\n if o.order_date >= ::types::create_date(1993, 7, 1) && o.order_date < ::types::create_date(1993, 10, 1) {\n\n Some((o.order_key, o.order_priority))\n\n }\n\n else { None }\n\n )\n\n .semijoin(&lineitems)\n\n .map(|(_k,v)| v)\n\n .count_total()\n\n // .inspect(|x| println!(\"{:?}\", x))\n\n .probe_with(probe);\n\n}\n\n\n", "file_path": "tpchlike/src/queries/query04.rs", "rank": 63, "score": 208213.4998824624 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let parts =\n\n collections\n\n .parts() // We fluff out search strings to have the right lengths. \\\\\n\n .flat_map(|x| {\n\n if &x.brand[..8] == b\"Brand#23\" && &x.container[..7] == b\"MED BOX\" {\n\n Some(x.part_key)\n\n }\n\n else { None }\n\n });\n\n\n\n collections\n\n .lineitems()\n\n .map(|x| (x.part_key, (x.quantity, x.extended_price)))\n\n .semijoin(&parts)\n\n .reduce(|_k, s, t| {\n\n\n\n // determine the total and count of quantity.\n", "file_path": "tpchlike/src/queries/query17.rs", "rank": 64, "score": 208213.49988246243 }, { "content": "pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)\n\nwhere G::Timestamp: Lattice+TotalOrder+Ord {\n\n\n\n let orders =\n\n collections\n\n .orders()\n\n .flat_map(|o|\n\n if starts_with(&o.order_status, b\"F\") { Some(o.order_key) }\n\n else { None }\n\n );\n\n\n\n // lineitems relevant to \"F\" orders.\n\n let lineitems =\n\n collections\n\n .lineitems()\n\n .map(|l| (l.order_key, (l.supp_key, l.receipt_date > l.commit_date)))\n\n .semijoin(&orders);\n\n\n\n let lateitems = lineitems.filter(|l| (l.1).1);\n\n let lateorders = lateitems.map(|l| l.0).distinct_total();\n", "file_path": "tpchlike/src/queries/query21.rs", "rank": 65, "score": 208213.49988246243 }, { "content": "/// Propagates labels forward, retaining the minimum label.\n\n///\n\n/// This variant takes a pre-arranged edge collection, to facilitate re-use, and allows\n\n/// a method `logic` to specify the rounds in which we introduce various labels. The output\n\n/// of `logic should be a number in the interval [0,64],\n\npub fn propagate_core<G, N, L, Tr, F, R>(edges: &Arranged<G,Tr>, nodes: &Collection<G,(N,L),R>, logic: F) -> Collection<G,(N,L),R>\n\nwhere\n\n G: Scope,\n\n G::Timestamp: Lattice+Ord,\n\n N: ExchangeData+Hash,\n\n R: ExchangeData+Abelian,\n\n R: Mul<R, Output=R>,\n\n R: From<i8>,\n\n L: ExchangeData,\n\n Tr: TraceReader<Key=N, Val=N, Time=G::Timestamp, R=R>+Clone+'static,\n\n Tr::Batch: crate::trace::BatchReader<N, N, G::Timestamp, Tr::R>+'static,\n\n Tr::Cursor: crate::trace::Cursor<N, N, G::Timestamp, Tr::R>+'static,\n\n F: Fn(&L)->u64+Clone+'static,\n\n{\n\n // Morally the code performs the following iterative computation. However, in the interest of a simplified\n\n // dataflow graph and reduced memory footprint we instead have a wordier version below. The core differences\n\n // between the two are that 1. the former filters its input and pretends to perform non-monotonic computation,\n\n // whereas the latter creates an initially empty monotonic iteration variable, and 2. the latter rotates the\n\n // iterative computation so that the arrangement produced by `reduce` can be re-used.\n\n\n", "file_path": "src/algorithms/graphs/propagate.rs", "rank": 66, "score": 202952.39720036724 }, { "content": "/// Propagates labels forward, retaining the minimum label.\n\n///\n\n/// This algorithm naively propagates all labels at once, much like standard label propagation.\n\n/// To more carefully control the label propagation, consider `propagate_core` which supports a\n\n/// method to limit the introduction of labels.\n\npub fn propagate_at<G, N, L, F, R>(edges: &Collection<G, (N,N), R>, nodes: &Collection<G,(N,L),R>, logic: F) -> Collection<G,(N,L),R>\n\nwhere\n\n G: Scope,\n\n G::Timestamp: Lattice+Ord,\n\n N: ExchangeData+Hash,\n\n R: ExchangeData+Abelian,\n\n R: Mul<R, Output=R>,\n\n R: From<i8>,\n\n L: ExchangeData,\n\n F: Fn(&L)->u64+Clone+'static,\n\n{\n\n propagate_core(&edges.arrange_by_key(), nodes, logic)\n\n}\n\n\n\nuse trace::TraceReader;\n\nuse operators::arrange::arrangement::Arranged;\n\n\n", "file_path": "src/algorithms/graphs/propagate.rs", "rank": 67, "score": 202149.53089895178 }, { "content": "fn triangles<G: Scope>(edges: &Collection<G, Edge>) -> Collection<G, (Node, Node, Node)>\n\nwhere G::Timestamp: Lattice+Hash+Ord {\n\n\n\n // only use forward-pointing edges.\n\n let edges = edges.filter(|&(src, dst)| src < dst);\n\n\n\n // arrange the edge relation three ways.\n\n let as_self = edges.arrange_by_self();\n\n let forward = edges.arrange_by_key();\n\n let reverse = edges.map_in_place(|x| ::std::mem::swap(&mut x.0, &mut x.1))\n\n .arrange_by_key();\n\n\n\n // arrange the count of extensions from each source.\n\n let counts = edges.map(|(src, _dst)| src)\n\n .arrange_by_self();\n\n\n\n // extract ((src, dst), idx) tuples with weights equal to the number of extensions.\n\n let cand_count1 = forward.join_core(&counts, |&src, &dst, &()| Some(((src, dst), 1)));\n\n let cand_count2 = reverse.join_core(&counts, |&dst, &src, &()| Some(((src, dst), 2)));\n\n\n", "file_path": "dogsdogsdogs/examples/ngo.rs", "rank": 68, "score": 200829.5973722318 }, { "content": "// Generate synthetic interactions with a skewed distribution\n\nfn generate_interactions<R>(how_many: usize, rng: &mut R) -> Vec<(u32,u32)> where R: Rng {\n\n let mut interactions = Vec::with_capacity(how_many);\n\n\n\n let mut user_sampler = CRP::new(6000.0, 0.35);\n\n let mut item_sampler = CRP::new(6000.0, 0.35);\n\n\n\n for _ in 0 .. how_many {\n\n let user = user_sampler.sample(rng);\n\n let item = item_sampler.sample(rng);\n\n interactions.push((user, item));\n\n }\n\n\n\n interactions\n\n}\n", "file_path": "examples/itembased_cf.rs", "rank": 69, "score": 199960.6201059934 }, { "content": "/// Returns pairs (node, dist) indicating distance of each node from a root.\n\npub fn bfs<G, N>(edges: &Collection<G, (N,N)>, roots: &Collection<G, N>) -> Collection<G, (N,u32)>\n\nwhere\n\n G: Scope,\n\n G::Timestamp: Lattice+Ord,\n\n N: ExchangeData+Hash,\n\n{\n\n use operators::arrange::arrangement::ArrangeByKey;\n\n let edges = edges.arrange_by_key();\n\n bfs_arranged(&edges, roots)\n\n}\n\n\n\nuse crate::trace::TraceReader;\n\nuse crate::operators::arrange::Arranged;\n\n\n", "file_path": "src/algorithms/graphs/bfs.rs", "rank": 70, "score": 198264.63813113488 }, { "content": "// returns pairs (n, s) indicating node n can be reached from a root in s steps.\n\nfn bfs<G: Scope>(edges: &Collection<G, Edge, MinSum>, roots: &Collection<G, Node, MinSum>) -> Collection<G, Node, MinSum>\n\nwhere G::Timestamp: Lattice+Ord {\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n roots.scope().iterative::<u32,_,_>(|scope| {\n\n\n\n use differential_dataflow::operators::iterate::SemigroupVariable;\n\n use differential_dataflow::operators::reduce::ReduceCore;\n\n use differential_dataflow::trace::implementations::ord::OrdKeySpine as DefaultKeyTrace;\n\n\n\n\n\n use timely::order::Product;\n\n let variable = SemigroupVariable::new(scope, Product::new(Default::default(), 1));\n\n\n\n let edges = edges.enter(scope);\n\n let roots = roots.enter(scope);\n\n\n\n let result =\n\n variable\n\n .map(|n| (n,()))\n", "file_path": "examples/monoid-bfs.rs", "rank": 71, "score": 197548.57929519942 }, { "content": "/// Returns pairs (node, dist) indicating distance of each node from a root.\n\npub fn bfs_arranged<G, N, Tr>(edges: &Arranged<G, Tr>, roots: &Collection<G, N>) -> Collection<G, (N, u32)>\n\nwhere\n\n G: Scope,\n\n G::Timestamp: Lattice+Ord,\n\n N: ExchangeData+Hash,\n\n Tr: TraceReader<Key=N, Val=N, Time=G::Timestamp, R=isize>+Clone+'static,\n\n Tr::Batch: crate::trace::BatchReader<N, N, G::Timestamp, Tr::R>+'static,\n\n Tr::Cursor: crate::trace::Cursor<N, N, G::Timestamp, Tr::R>+'static,\n\n{\n\n // initialize roots as reaching themselves at distance 0\n\n let nodes = roots.map(|x| (x, 0));\n\n\n\n // repeatedly update minimal distances each node can be reached from each root\n\n nodes.iterate(|inner| {\n\n\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter(&inner.scope());\n\n\n\n inner.join_core(&edges, |_k,l,d| Some((d.clone(), l+1)))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((s[0].0.clone(), 1)))\n\n })\n\n}", "file_path": "src/algorithms/graphs/bfs.rs", "rank": 72, "score": 195282.3297459216 }, { "content": "fn _reachability<G: Scope>(edges: &Collection<G, Edge>, nodes: &Collection<G, (Node, Node)>) -> Collection<G, Edge>\n\nwhere G::Timestamp: Lattice+Ord+Hash {\n\n\n\n edges.filter(|_| false)\n\n .iterate(|inner| {\n\n let edges = edges.enter(&inner.scope());\n\n let nodes = nodes.enter_at(&inner.scope(), |r| 256 * (64 - (r.0 as u64).leading_zeros() as u64));\n\n\n\n inner.join_map(&edges, |_k,l,d| (*d,*l))\n\n .concat(&nodes)\n\n .reduce(|_, s, t| t.push((*s[0].0, 1)))\n\n\n\n })\n\n}\n", "file_path": "tests/scc.rs", "rank": 73, "score": 194494.05172426775 }, { "content": "type TraceBatch = OrdValBatch<usize, usize, RootTime, isize>;\n", "file_path": "server/src/lib.rs", "rank": 74, "score": 191018.3791368112 }, { "content": "fn visit(node: usize, forward: &HashMap<usize, Vec<usize>>, visited: &mut HashSet<usize>, list: &mut Vec<usize>) {\n\n if !visited.contains(&node) {\n\n visited.insert(node);\n\n if let Some(edges) = forward.get(&node) {\n\n for &edge in edges.iter() {\n\n visit(edge, forward, visited, list)\n\n }\n\n }\n\n list.push(node);\n\n }\n\n}\n\n\n", "file_path": "tests/scc.rs", "rank": 75, "score": 185770.7911406774 }, { "content": "fn _color<G, N>(edges: &Collection<G, (N,N)>) -> Collection<G,(N,Option<u32>)>\n\nwhere\n\n G: Scope,\n\n G::Timestamp: Lattice+Ord,\n\n N: ExchangeData+Hash,\n\n{\n\n // need some bogus initial values.\n\n let start = edges.map(|(x,_y)| (x,u32::max_value()))\n\n .distinct();\n\n\n\n // repeatedly apply color-picking logic.\n\n sequence(&start, &edges, |_node, vals| {\n\n\n\n // look for the first absent positive integer.\n\n // start at 1 in case we ever use NonZero<u32>.\n\n\n\n (1u32 ..)\n\n .filter(|&i| vals.get(i as usize - 1).map(|x| *x.0) != Some(i))\n\n .next()\n\n .unwrap()\n\n })\n\n}\n\n\n", "file_path": "src/algorithms/graphs/sequential.rs", "rank": 76, "score": 182900.11177307827 }, { "content": "fn get_trace() -> Spine<UnsignedWrapper<u64>, u64, usize, i64, Rc<OrdValBatch<UnsignedWrapper<u64>, u64, usize, i64>>> {\n\n let op_info = OperatorInfo::new(0, 0, &[]);\n\n let mut trace = IntegerTrace::new(op_info, None, None);\n\n {\n\n let mut batcher = <<IntegerTrace as TraceReader>::Batch as Batch<UnsignedWrapper<u64>, u64, usize, i64>>::Batcher::new();\n\n\n\n batcher.push_batch(&mut vec![\n\n ((1.into(), 2), 0, 1),\n\n ((2.into(), 3), 1, 1),\n\n ((2.into(), 3), 2, -1),\n\n ]);\n\n\n\n let batch_ts = &[1, 2, 3];\n\n let batches = batch_ts.iter().map(move |i| batcher.seal(Antichain::from_elem(*i)));\n\n for b in batches {\n\n trace.insert(b);\n\n }\n\n }\n\n trace\n\n}\n\n\n", "file_path": "tests/trace.rs", "rank": 77, "score": 182579.83456345863 }, { "content": "/// Read a command and its arguments.\n\nfn read_integers<'a>(input: impl Iterator<Item=&'a str>) -> Result<Vec<isize>, std::num::ParseIntError> {\n\n let mut integers = Vec::new();\n\n for text in input {\n\n integers.push(text.parse()?);\n\n }\n\n Ok(integers)\n\n}", "file_path": "examples/multitemporal.rs", "rank": 78, "score": 182128.3523159607 }, { "content": "fn copy_from_to(src: &[u8], dst: &mut [u8]) {\n\n let limit = if src.len() < dst.len() { src.len() } else { dst.len() };\n\n for index in 0 .. limit {\n\n dst[index] = src[index];\n\n }\n\n}\n\n\n", "file_path": "tpchlike/src/types.rs", "rank": 79, "score": 181795.43084477726 }, { "content": "fn main() {\n\n\n\n let mut args = std::env::args().skip(1);\n\n let prefix = args.next().expect(\"must supply path to facts\");\n\n let batch: Time = args.next().unwrap_or(\"1\".to_string()).parse().expect(\"batch must be an integer\");\n\n\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n let index = worker.index();\n\n\n\n let mut probe = ProbeHandle::new();\n\n\n\n // For interning strings.\n\n let interner = Rc::new(RefCell::new(StringInterner::new()));\n\n\n\n let mut inputs = (\n\n Vec::new(),\n\n Vec::new(),\n\n Vec::new(),\n", "file_path": "doop/src/main.rs", "rank": 80, "score": 177152.9297868865 }, { "content": "fn _strongly_connected<G: Scope>(graph: &Collection<G, Edge>) -> Collection<G, Edge>\n\nwhere G::Timestamp: Lattice+Ord+Hash {\n\n graph.iterate(|inner| {\n\n let edges = graph.enter(&inner.scope());\n\n let trans = edges.map_in_place(|x| mem::swap(&mut x.0, &mut x.1));\n\n _trim_edges(&_trim_edges(inner, &edges), &trans)\n\n })\n\n}\n\n\n", "file_path": "tests/scc.rs", "rank": 81, "score": 175987.9073013793 }, { "content": "fn test_sizes(nodes: usize, edges: usize, rounds: usize, config: Config) {\n\n\n\n let root_list = vec![(1, 0, 1)];\n\n let mut edge_list = Vec::new();\n\n\n\n let seed: &[_] = &[1, 2, 3, 4];\n\n let mut rng1: StdRng = SeedableRng::from_seed(seed); // rng for edge additions\n\n let mut rng2: StdRng = SeedableRng::from_seed(seed); // rng for edge deletions\n\n\n\n for _ in 0 .. edges {\n\n edge_list.push(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), 0, 1));\n\n }\n\n\n\n for round in 1 .. rounds {\n\n edge_list.push(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), round, 1));\n\n edge_list.push(((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes)), round,-1));\n\n }\n\n\n\n let mut results1 = bfs_sequential(root_list.clone(), edge_list.clone());\n\n let mut results2 = bfs_differential(root_list.clone(), edge_list.clone(), config);\n", "file_path": "tests/bfs.rs", "rank": 82, "score": 175535.5698981652 }, { "content": "fn test_sizes(nodes: usize, edges: usize, rounds: usize, config: Config) {\n\n\n\n let mut edge_list = Vec::new();\n\n\n\n let seed: &[_] = &[1, 2, 3, 4];\n\n let mut rng1: StdRng = SeedableRng::from_seed(seed); // rng for edge additions\n\n let mut rng2: StdRng = SeedableRng::from_seed(seed); // rng for edge deletions\n\n\n\n for _ in 0 .. edges {\n\n edge_list.push(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), 0, 1));\n\n }\n\n\n\n for round in 1 .. rounds {\n\n edge_list.push(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), round, 1));\n\n edge_list.push(((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes)), round,-1));\n\n }\n\n\n\n // for thing in edge_list.iter() {\n\n // println!(\"input: {:?}\", thing);\n\n // }\n", "file_path": "tests/scc.rs", "rank": 83, "score": 175535.5698981652 }, { "content": "/// Sorts and consolidates `vec`.\n\n///\n\n/// This method will sort `vec` and then consolidate runs of more than one entry with\n\n/// identical first elements by accumulating the second elements of the pairs. Should the final\n\n/// accumulation be zero, the element is discarded.\n\npub fn consolidate<T: Ord, R: Semigroup>(vec: &mut Vec<(T, R)>) {\n\n consolidate_from(vec, 0);\n\n}\n\n\n", "file_path": "src/consolidation.rs", "rank": 84, "score": 173834.8534790057 }, { "content": "type TraceSpine = Spine<usize, usize, RootTime, isize, Rc<TraceBatch>>;\n\npub type TraceHandle = TraceAgent<usize, usize, RootTime, isize, TraceSpine>;\n\n\n\n/// Arguments provided to each shared library to help build their dataflows and register their results.\n\npub type Environment<'a, 'b> = (\n\n &'a mut Child<'b, Worker<Allocator>,usize>,\n\n &'a mut TraceHandler,\n\n &'a mut ProbeHandle<RootTime>,\n\n &'a Instant,\n\n &'a [String]\n\n);\n\n\n\n/// A wrapper around types that keep their source libraries alive.\n\n///\n\n/// This type is meant to be a smart pointer for a type `T` that needs to keep\n\n/// a `Library` alive, perhaps because its methods would call in to the library.\n\n/// The type should have a specified drop order (viz RFC 1857) which guarentees\n\n/// that the shared library reference drops only after the element itself is\n\n/// dropped. It also implements `Deref` and `DerefMut` to provide the experience\n\n/// of a `T` itself.\n", "file_path": "server/src/lib.rs", "rank": 85, "score": 173254.92458231538 }, { "content": "/// Sorts and consolidates a slice, returning the valid prefix length.\n\npub fn consolidate_slice<T: Ord, R: Semigroup>(slice: &mut [(T, R)]) -> usize {\n\n\n\n // We could do an insertion-sort like initial scan which builds up sorted, consolidated runs.\n\n // In a world where there are not many results, we may never even need to call in to merge sort.\n\n slice.sort_by(|x,y| x.0.cmp(&y.0));\n\n\n\n // Counts the number of distinct known-non-zero accumulations. Indexes the write location.\n\n let mut offset = 0;\n\n for index in 1 .. slice.len() {\n\n\n\n // The following unsafe block elides various bounds checks, using the reasoning that `offset`\n\n // is always strictly less than `index` at the beginning of each iteration. This is initially\n\n // true, and in each iteration `offset` can increase by at most one (whereas `index` always\n\n // increases by one). As `index` is always in bounds, and `offset` starts at zero, it too is\n\n // always in bounds.\n\n //\n\n // LLVM appears to struggle to optimize out Rust's split_at_mut, which would prove disjointness\n\n // using run-time tests.\n\n unsafe {\n\n\n", "file_path": "src/consolidation.rs", "rank": 86, "score": 171403.57398581167 }, { "content": "fn main() {\n\n let rounds: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n\n\n\n let mut summaries = timely::execute_from_args(std::env::args(), move |worker| {\n\n let mut probe = Handle::new();\n\n let (mut graph, mut graph_trace) = worker.dataflow(|scope| {\n\n let (graph_input, graph) = scope.new_collection();\n\n\n\n let graph_arr = graph.map(|(x, y): Edge| (x, (x, y))).arrange_by_key();\n\n let graph_trace = graph_arr.trace.clone();\n\n\n\n /* Be sure to attach probe to arrangements we want to enumerate;\n\n * so we know when all updates for a given epoch have been added to the arrangement. */\n\n graph_arr\n\n .as_collection(|_, v| *v)\n\n .consolidate()\n\n //.inspect(move |x| println!(\"{:?}\", x))\n\n .probe_with(&mut probe);\n\n\n\n /* Return `graph_trace`, so we can obtain cursor for the arrangement at runtime. */\n", "file_path": "examples/cursors.rs", "rank": 87, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let keys: usize = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let batch: usize = 10_000;\n\n\n\n // This computation demonstrates in-place accumulation of arbitrarily large \n\n // volumes of input data, consuming space bounded by the number of distinct keys.\n\n timely::execute_from_args(std::env::args().skip(2), move |worker| {\n\n\n\n let index = worker.index();\n\n let peers = worker.peers();\n\n\n\n let mut input = worker.dataflow::<(), _, _>(|scope| {\n\n let (input, data) = scope.new_collection::<_, isize>();\n\n data.consolidate();\n\n input\n\n });\n\n\n\n let seed: &[_] = &[1, 2, 3, 4];\n\n let mut rng: StdRng = SeedableRng::from_seed(seed);\n", "file_path": "examples/accumulate.rs", "rank": 88, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n let timer = worker.timer();\n\n let mut probe = Handle::new();\n\n\n\n let (mut nodes, mut edges, mut times) = worker.dataflow::<usize,_,_>(|scope| {\n\n\n\n let (node_input, nodes) = scope.new_collection();\n\n let (edge_input, edges) = scope.new_collection();\n\n let (time_input, times) = scope.new_collection();\n\n\n\n // Detect cycles that do not increment timestamps.\n\n find_cycles::<_,usize>(nodes.clone(), edges.clone())\n\n .inspect(move |x| println!(\"{:?}\\tcycles: {:?}\", timer.elapsed(), x))\n\n .probe_with(&mut probe);\n\n\n\n // Summarize all paths to inputs of operator zero.\n\n summarize::<_,usize>(nodes.clone(), edges.clone())\n", "file_path": "examples/progress.rs", "rank": 89, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let edges: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let batch: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n\n let pre: usize = std::env::args().nth(4).unwrap().parse().unwrap();\n\n let inspect: bool = std::env::args().nth(5).unwrap() == \"inspect\";\n\n\n\n\n\n // define a new timely dataflow computation.\n\n timely::execute_from_args(std::env::args().skip(6), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n let index = worker.index();\n\n let peers = worker.peers();\n\n\n\n let mut probe = timely::dataflow::operators::probe::Handle::new();\n\n\n\n // create a dataflow managing an ever-changing edge collection.\n", "file_path": "examples/arrange.rs", "rank": 90, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let edges: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let batch: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n\n let inspect: bool = std::env::args().nth(4).unwrap() == \"inspect\";\n\n let open_loop: bool = std::env::args().nth(5).unwrap() == \"open-loop\";\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args().skip(6), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n let index = worker.index();\n\n let peers = worker.peers();\n\n\n\n // create a degree counting differential dataflow\n\n let (mut input, probe) = worker.dataflow(|scope| {\n\n\n\n // create edge input, count a few ways.\n", "file_path": "examples/degrees.rs", "rank": 91, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n // Used to determine if our output has caught up to our input.\n\n let mut probe: ProbeHandle<Pair<isize, isize>> = ProbeHandle::new();\n\n\n\n let (mut input, mut capability, mut trace) =\n\n worker.dataflow(|scope| {\n\n\n\n // Create \"unordered\" inputs which provide their capabilities to users.\n\n // Here \"capability\" is a technical term, which is \"permission to send\n\n // data or after a certain timestamp\". When this capability is dropped\n\n // or downgraded, the input communicates that its possible timestamps\n\n // have advanced, and the system can start to make progress.\n\n let ((input, capability), data) = scope.new_unordered_input();\n\n\n\n let arrangement =\n\n data.as_collection()\n\n .count()\n", "file_path": "examples/multitemporal.rs", "rank": 92, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let large: usize = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let small: usize = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let batch: usize = std::env::args().nth(3).unwrap().parse().unwrap();\n\n let total: usize = std::env::args().nth(4).unwrap().parse().unwrap();\n\n\n\n // define a new timely dataflow computation.\n\n timely::execute_from_args(std::env::args().skip(3), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n let mut probe = timely::dataflow::operators::probe::Handle::new();\n\n\n\n // create a dataflow managing an ever-changing edge collection.\n\n \tlet mut handle = worker.dataflow(|scope| {\n\n let (handle, input) = scope.new_collection();\n\n input.distinct().probe_with(&mut probe);\n\n handle\n\n });\n", "file_path": "examples/compact.rs", "rank": 93, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n // snag a filename to use for the input graph.\n\n let filename = std::env::args().nth(1).unwrap();\n\n let iterations: Iter = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let inspect = std::env::args().nth(3) == Some(\"inspect\".to_string());\n\n\n\n timely::execute_from_args(std::env::args().skip(2), move |worker| {\n\n\n\n let peers = worker.peers();\n\n let index = worker.index();\n\n let timer = worker.timer();\n\n\n\n let mut input = InputSession::new();\n\n let mut probe = ProbeHandle::new();\n\n\n\n worker.dataflow::<Time,_,_>(|scope| {\n\n let edges = input.to_collection(scope);\n\n pagerank(iterations, &edges)\n\n .filter(move |_| inspect)\n", "file_path": "examples/pagerank.rs", "rank": 94, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n // snag a filename to use for the input graph.\n\n let filename = std::env::args().nth(1).unwrap();\n\n\n\n timely::execute_from_args(std::env::args().skip(2), move |worker| {\n\n\n\n let peers = worker.peers();\n\n let index = worker.index();\n\n\n\n // // What you might do if you used GraphMMap:\n\n let graph = GraphMMap::new(&filename);\n\n let nodes = graph.nodes();\n\n let edges = (0..nodes).filter(move |node| node % peers == index)\n\n .flat_map(|node| graph.edges(node).iter().cloned().map(move |dst| ((node as u32, dst))))\n\n .map(|(src, dst)| ((src, dst), Default::default(), 1))\n\n .collect::<Vec<_>>();\n\n\n\n println!(\"loaded {} nodes, {} edges\", nodes, edges.len());\n\n\n\n worker.dataflow::<(),_,_>(|scope| {\n\n interpret(&Collection::new(edges.to_stream(scope)), &[(0,2), (1,2)]);\n\n });\n\n\n\n }).unwrap();\n\n}\n\n\n", "file_path": "examples/interpreted.rs", "rank": 95, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n \n\n // define BFS dataflow; return handles to roots and edges inputs\n\n let mut probe = Handle::new();\n\n let (mut rules, mut graph) = worker.dataflow(|scope| {\n\n\n\n let (rule_input, rules) = scope.new_collection();\n\n let (edge_input, graph) = scope.new_collection();\n\n\n\n let result = graph.iterate(|inner| {\n\n\n\n let rules = rules.enter(&inner.scope());\n\n let arranged = inner.arrange_by_key();\n\n\n\n // rule 0: remove self-loops:\n\n let freeze0 = freeze(&arranged, |t| {\n\n if t.inner <= 0 {\n", "file_path": "examples/freeze.rs", "rank": 96, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n // An input for (x,y,z) placements.\n\n let mut xyzs = InputSession::<_,_,isize>::new();\n\n\n\n // Inputs for (x,y) and (x,z) goals.\n\n let mut xy_goal = InputSession::new();\n\n let mut xz_goal = InputSession::new();\n\n\n\n let mut probe = Handle::new();\n\n\n\n // Dataflow to validate input against goals.\n\n worker.dataflow(|scope| {\n\n\n\n // Introduce inputs to the scope.\n\n let xyzs = xyzs.to_collection(scope);\n\n let xy_goal = xy_goal.to_collection(scope);\n", "file_path": "examples/projekt.rs", "rank": 97, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let nodes: u32 = std::env::args().nth(1).unwrap().parse().unwrap();\n\n let edges: u32 = std::env::args().nth(2).unwrap().parse().unwrap();\n\n let batch: u32 = std::env::args().nth(3).unwrap().parse().unwrap();\n\n let rounds: u32 = std::env::args().nth(4).unwrap().parse().unwrap();\n\n let inspect: bool = std::env::args().nth(5).unwrap() == \"inspect\";\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args(), move |worker| {\n\n\n\n if let Ok(addr) = ::std::env::var(\"DIFFERENTIAL_LOG_ADDR\") {\n\n\n\n eprintln!(\"enabled DIFFERENTIAL logging to {}\", addr);\n\n\n\n if let Ok(stream) = ::std::net::TcpStream::connect(&addr) {\n\n let writer = ::timely::dataflow::operators::capture::EventWriter::new(stream);\n\n let mut logger = ::timely::logging::BatchLogger::new(writer);\n\n worker.log_register().insert::<DifferentialEvent,_>(\"differential/arrange\", move |time, data|\n\n logger.publish_batch(time, data)\n", "file_path": "examples/bfs.rs", "rank": 98, "score": 169558.44478451117 }, { "content": "fn main() {\n\n\n\n let mut args = std::env::args();\n\n args.next();\n\n\n\n let nodes: u32 = args.next().unwrap().parse().unwrap();\n\n let edges: usize = args.next().unwrap().parse().unwrap();\n\n let batch: u32 = args.next().unwrap().parse().unwrap();\n\n let inspect: bool = args.next().unwrap() == \"inspect\";\n\n\n\n // define a new computational scope, in which to run BFS\n\n timely::execute_from_args(std::env::args().skip(5), move |worker| {\n\n\n\n let timer = ::std::time::Instant::now();\n\n\n\n let index = worker.index();\n\n let peers = worker.peers();\n\n\n\n // create a degree counting differential dataflow\n\n let (mut input, probe) = worker.dataflow::<u32,_,_>(|scope| {\n", "file_path": "examples/hello.rs", "rank": 99, "score": 169558.44478451117 } ]
Rust
src/lib.rs
incident-recipient/gherkin-rust
7668c349ced30d5332821538ce321737cdbbacec
mod parser; pub mod tagexpr; pub use peg::error::ParseError; pub use peg::str::LineCol; use typed_builder::TypedBuilder; use std::path::{Path, PathBuf}; #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Background { pub steps: Vec<Step>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Examples { pub table: Table, #[builder(default)] pub tags: Vec<String>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Feature { pub name: String, #[builder(default)] pub description: Option<String>, #[builder(default)] pub background: Option<Background>, #[builder(default)] pub scenarios: Vec<Scenario>, #[builder(default)] pub rules: Vec<Rule>, #[builder(default)] pub tags: Vec<String>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), #[builder(default)] pub path: Option<PathBuf>, } impl PartialOrd for Feature { fn partial_cmp(&self, other: &Feature) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } } impl Ord for Feature { fn cmp(&self, other: &Feature) -> std::cmp::Ordering { self.name.cmp(&other.name) } } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Rule { pub name: String, #[builder(default)] pub background: Option<Background>, pub scenarios: Vec<Scenario>, #[builder(default)] pub tags: Vec<String>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Scenario { pub name: String, pub steps: Vec<Step>, #[builder(default)] pub examples: Option<Examples>, #[builder(default)] pub tags: Vec<String>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Step { pub ty: StepType, pub raw_type: String, pub value: String, #[builder(default)] pub docstring: Option<String>, #[builder(default)] pub table: Option<Table>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } #[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)] pub enum StepType { Given, When, Then, } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Table { pub rows: Vec<Vec<String>>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } impl Table { pub fn row_width(&self) -> usize { self.rows .iter() .next() .map(|x| x.len()) .unwrap_or_else(|| 0) } } #[derive(Debug, thiserror::Error)] pub enum ParseFileError { #[error("Could not read path: {0}")] Reading(PathBuf, #[source] std::io::Error), #[error("Could not parse feature file: {0}")] Parsing( PathBuf, #[source] peg::error::ParseError<peg::str::LineCol>, ), } impl Feature { #[inline] pub fn parse_path<P: AsRef<Path>>(path: P) -> Result<Feature, ParseFileError> { let s = std::fs::read_to_string(path.as_ref()) .map_err(|e| ParseFileError::Reading(path.as_ref().to_path_buf(), e))?; let mut feature = parser::gherkin_parser::feature(&s, &Default::default()) .map_err(|e| ParseFileError::Parsing(path.as_ref().to_path_buf(), e))?; feature.path = Some(path.as_ref().to_path_buf()); Ok(feature) } #[inline] pub fn parse<S: AsRef<str>>(input: S) -> Result<Feature, ParseError<LineCol>> { parser::gherkin_parser::feature(input.as_ref(), &Default::default()) } } impl Step { pub fn docstring(&self) -> Option<&String> { match &self.docstring { Some(v) => Some(&v), None => None, } } pub fn table(&self) -> Option<&Table> { match &self.table { Some(v) => Some(&v), None => None, } } } impl std::fmt::Display for Step { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{} {}", &self.raw_type, &self.value) } }
mod parser; pub mod tagexpr; pub use peg::error::ParseError; pub use peg::str::LineCol; use typed_builder::TypedBuilder; use std::path::{Path, PathBuf}; #[derive(Debug, Clone, TypedBuilder, P
e { Some(v) => Some(&v), None => None, } } } impl std::fmt::Display for Step { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{} {}", &self.raw_type, &self.value) } }
artialEq, Hash, Eq)] pub struct Background { pub steps: Vec<Step>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Examples { pub table: Table, #[builder(default)] pub tags: Vec<String>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Feature { pub name: String, #[builder(default)] pub description: Option<String>, #[builder(default)] pub background: Option<Background>, #[builder(default)] pub scenarios: Vec<Scenario>, #[builder(default)] pub rules: Vec<Rule>, #[builder(default)] pub tags: Vec<String>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), #[builder(default)] pub path: Option<PathBuf>, } impl PartialOrd for Feature { fn partial_cmp(&self, other: &Feature) -> Option<std::cmp::Ordering> { Some(self.cmp(other)) } } impl Ord for Feature { fn cmp(&self, other: &Feature) -> std::cmp::Ordering { self.name.cmp(&other.name) } } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Rule { pub name: String, #[builder(default)] pub background: Option<Background>, pub scenarios: Vec<Scenario>, #[builder(default)] pub tags: Vec<String>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Scenario { pub name: String, pub steps: Vec<Step>, #[builder(default)] pub examples: Option<Examples>, #[builder(default)] pub tags: Vec<String>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Step { pub ty: StepType, pub raw_type: String, pub value: String, #[builder(default)] pub docstring: Option<String>, #[builder(default)] pub table: Option<Table>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } #[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)] pub enum StepType { Given, When, Then, } #[derive(Debug, Clone, TypedBuilder, PartialEq, Hash, Eq)] pub struct Table { pub rows: Vec<Vec<String>>, #[builder(default)] pub span: (usize, usize), #[builder(default)] pub position: (usize, usize), } impl Table { pub fn row_width(&self) -> usize { self.rows .iter() .next() .map(|x| x.len()) .unwrap_or_else(|| 0) } } #[derive(Debug, thiserror::Error)] pub enum ParseFileError { #[error("Could not read path: {0}")] Reading(PathBuf, #[source] std::io::Error), #[error("Could not parse feature file: {0}")] Parsing( PathBuf, #[source] peg::error::ParseError<peg::str::LineCol>, ), } impl Feature { #[inline] pub fn parse_path<P: AsRef<Path>>(path: P) -> Result<Feature, ParseFileError> { let s = std::fs::read_to_string(path.as_ref()) .map_err(|e| ParseFileError::Reading(path.as_ref().to_path_buf(), e))?; let mut feature = parser::gherkin_parser::feature(&s, &Default::default()) .map_err(|e| ParseFileError::Parsing(path.as_ref().to_path_buf(), e))?; feature.path = Some(path.as_ref().to_path_buf()); Ok(feature) } #[inline] pub fn parse<S: AsRef<str>>(input: S) -> Result<Feature, ParseError<LineCol>> { parser::gherkin_parser::feature(input.as_ref(), &Default::default()) } } impl Step { pub fn docstring(&self) -> Option<&String> { match &self.docstring { Some(v) => Some(&v), None => None, } } pub fn table(&self) -> Option<&Table> { match &self.tabl
random
[ { "content": "//! let op: TagOperation = \"@a and @b\".parse()?;\n\n//! # Ok(())\n\n//! # }\n\n//! ```\n\n\n\nuse std::str::FromStr;\n\n\n\nimpl FromStr for TagOperation {\n\n type Err = peg::error::ParseError<peg::str::LineCol>;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n crate::parser::gherkin_parser::tag_operation(s, &Default::default())\n\n }\n\n}\n\n\n\n/// A parsed tree of operations for Gherkin tags.\n\n#[derive(Debug, Clone)]\n\npub enum TagOperation {\n\n And(Box<TagOperation>, Box<TagOperation>),\n\n Or(Box<TagOperation>, Box<TagOperation>),\n", "file_path": "src/tagexpr.rs", "rank": 0, "score": 18712.470849082445 }, { "content": " Not(Box<TagOperation>),\n\n Tag(String),\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn parse_tag_expr1() {\n\n let foo: TagOperation = \"@foo and @bar\".parse().unwrap_or_else(|e| panic!(\"{}\", e));\n\n println!(\"{:#?}\", foo);\n\n }\n\n #[test]\n\n fn parse_tag_expr2() {\n\n let foo: TagOperation = \"@foo or @bar\".parse().unwrap_or_else(|e| panic!(\"{}\", e));\n\n println!(\"{:#?}\", foo);\n\n }\n\n\n\n #[test]\n", "file_path": "src/tagexpr.rs", "rank": 1, "score": 18710.12728539006 }, { "content": "// Copyright (c) 2020 Brendan Molloy <[email protected]>\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! ### Tag expressions\n\n//!\n\n//! You can read about tag expressions in the [Cucumber documentation](https://cucumber.io/docs/cucumber/api/#tag-expressions).\n\n//!\n\n//! This implements the parsing apparatus for these expressions so that other crates like [cucumber_rust](https://github.com/bbqsrc/cucumber-rust)\n\n//! may take advantage of them.\n\n//!\n\n//! #### Usage\n\n//!\n\n//! ```\n\n//! use gherkin_rust::tagexpr::TagOperation;\n\n//! # fn main() -> Result<(), peg::error::ParseError<peg::str::LineCol>> {\n", "file_path": "src/tagexpr.rs", "rank": 2, "score": 18709.123782882394 }, { "content": " #[test]\n\n fn parse_tag_expr7() {\n\n let foo: TagOperation = \"not (@a or @b) and (@c or not @d)\"\n\n .parse()\n\n .unwrap_or_else(|e| panic!(\"{}\", e));\n\n println!(\"{:#?}\", foo);\n\n }\n\n\n\n #[test]\n\n fn parse_tag_expr8() {\n\n let foo: TagOperation = \"@a or @b and @c or not @d\"\n\n .parse()\n\n .unwrap_or_else(|e| panic!(\"{}\", e));\n\n println!(\"{:#?}\", foo);\n\n }\n\n}\n", "file_path": "src/tagexpr.rs", "rank": 3, "score": 18707.26713550207 }, { "content": " fn parse_tag_expr1b() {\n\n let foo: TagOperation = \"(@foo and @bar)\"\n\n .parse()\n\n .unwrap_or_else(|e| panic!(\"{}\", e));\n\n println!(\"{:#?}\", foo);\n\n }\n\n #[test]\n\n fn parse_tag_expr2b() {\n\n let foo: TagOperation = \"(@foo or @bar)\".parse().unwrap_or_else(|e| panic!(\"{}\", e));\n\n println!(\"{:#?}\", foo);\n\n }\n\n\n\n #[test]\n\n fn parse_tag_expr3() {\n\n let foo: TagOperation = \"not @fat\".parse().unwrap_or_else(|e| panic!(\"{}\", e));\n\n println!(\"{:#?}\", foo);\n\n }\n\n\n\n #[test]\n\n fn parse_tag_expr4() {\n", "file_path": "src/tagexpr.rs", "rank": 4, "score": 18707.26713550207 }, { "content": " let foo: Result<TagOperation, _> = \"@foo not @bar\".parse();\n\n assert!(foo.is_err());\n\n }\n\n\n\n #[test]\n\n fn parse_tag_expr5() {\n\n let foo: TagOperation = \"(not @foo) and not (@haha or @bar)\"\n\n .parse()\n\n .unwrap_or_else(|e| panic!(\"{}\", e));\n\n println!(\"{:#?}\", foo);\n\n }\n\n\n\n #[test]\n\n fn parse_tag_expr6() {\n\n let foo: TagOperation = \"not @foo and not @haha or @bar\"\n\n .parse()\n\n .unwrap_or_else(|e| panic!(\"{}\", e));\n\n println!(\"{:#?}\", foo);\n\n }\n\n\n", "file_path": "src/tagexpr.rs", "rank": 5, "score": 18707.26713550207 }, { "content": " Background:\n\n Given I have overdue tasks\n\n\n\n Example: First use of the day\n\n Given I last used the app yesterday\n\n When I use the app\n\n Then I am notified about overdue tasks\n\n\n\n Example: Already used today\n\n Given I last used the app earlier today\n\n When I use the app\n\n Then I am not notified about overdue tasks\n\n\";\n\n\n\n #[test]\n\n fn smoke() {\n\n let env = GherkinEnv::default();\n\n assert!(gherkin_parser::feature(FOO, &env).is_ok());\n\n }\n\n\n", "file_path": "src/parser.rs", "rank": 6, "score": 15198.794676032365 }, { "content": "// Copyright (c) 2020 Brendan Molloy <[email protected]>\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse std::cell::RefCell;\n\n\n\nuse crate::tagexpr::TagOperation;\n\nuse crate::{Background, Examples, Feature, Rule, Scenario, Step, StepType, Table};\n\n\n", "file_path": "src/parser.rs", "rank": 7, "score": 15197.95008653198 }, { "content": "}}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n const FOO: &str = \"# language: formal\\r\\n\n\n@hot-stuff\n\nSection: 4.2. The thing we care about\n\nA description just jammed in here for no reason\n\n@lol @a @rule @with-spaces\n\nRule: All gubbins must be placed in the airlock\n\n\n\n@bad_idea\n\nEvidence: A gubbins in an airlock\n\n Given a gubbins\n\n \\\"\\\"\\\"\n\n That's a gubbins\n\n and that is\n\n and so is that\n", "file_path": "src/parser.rs", "rank": 8, "score": 15197.656489495515 }, { "content": "}\n\n\n\npeg::parser! { pub(crate) grammar gherkin_parser(env: &GherkinEnv) for str {\n\n\n\nrule _() = quiet!{[' ' | '\\t']*}\n\nrule __() = quiet!{[' ' | '\\t']+}\n\n\n\nrule nl0() = quiet!{\"\\r\"? \"\\n\"}\n\nrule nl() = quiet!{nl0() p:position!() comment()* {\n\n env.increment_nl(p);\n\n}} \n\nrule eof() = quiet!{![_]}\n\nrule nl_eof() = quiet!{(nl() / [' ' | '\\t'])+ / eof()}\n\nrule comment() = quiet!{[' ' | '\\t']* \"#\" $((!nl0()[_])*) nl()}\n\nrule not_nl() -> &'input str = n:$((!nl0()[_])+) { n }\n\n\n\nrule match_until_starting_word_is_keyword() -> &'input str = n:$((not_nl() / \n\n (!(nl() __ (keyword((env.keywords().given)) / keyword((env.keywords().when)) / keyword((env.keywords().then))) ) nl()))+ nl()) { n }\n\nrule keyword1(list: &[&'static str]) -> &'static str\n\n = input:$([_]*<\n", "file_path": "src/parser.rs", "rank": 9, "score": 15197.035150461717 }, { "content": " #[test]\n\n fn smoke2() {\n\n let env = GherkinEnv::default();\n\n let d = env!(\"CARGO_MANIFEST_DIR\");\n\n let s = std::fs::read_to_string(format!(\"{}/tests/test.feature\", d)).unwrap();\n\n assert!(gherkin_parser::feature(&s, &env).is_ok());\n\n }\n\n\n\n #[test]\n\n fn smoke3() {\n\n let env = GherkinEnv::default();\n\n assert!(gherkin_parser::feature(RULE_WITH_BACKGROUND, &env).is_ok(),\n\n \"RULE_WITH_BACKGROUND was not parsed correctly!\");\n\n }\n\n\n\n #[test]\n\n fn smoke4() {\n\n let env = GherkinEnv::default();\n\n assert!(gherkin_parser::feature(RULE_WITH_RULE_IN_BACKGROUND, &env).is_ok(),\n\n \"RULE_WITH_RULE_IN_BACKGROUND was not parsed correctly!\");\n", "file_path": "src/parser.rs", "rank": 10, "score": 15196.660757622052 }, { "content": " .position(env.position(pa))\n\n .build()\n\n }\n\n\n\nrule tag_char() -> &'input str\n\n = s:$([_]) {?\n\n let x = s.chars().next().unwrap();\n\n if x.is_alphanumeric() || x == '_' || x == '-' {\n\n Ok(s)\n\n } else {\n\n Err(\"tag character\")\n\n }\n\n }\n\n\n\npub(crate) rule tag() -> String\n\n = \"@\" s:tag_char()+ { s.join(\"\") }\n\n\n\npub(crate) rule tags() -> Vec<String>\n\n = t:(tag() ** ([' ']+)) _ nl() { t }\n\n / { vec![] }\n", "file_path": "src/parser.rs", "rank": 11, "score": 15196.521703465454 }, { "content": " }\n\n\n\nrule table_cell() -> &'input str\n\n = \"|\" _ !(nl0() / eof()) n:$((!\"|\"[_])*) { n }\n\n\n\npub(crate) rule table_row() -> Vec<String>\n\n = n:(table_cell() ** _) _ \"|\" _ nl_eof() {\n\n n.into_iter()\n\n .map(str::trim)\n\n .map(str::to_string)\n\n .collect()\n\n }\n\n\n\npub(crate) rule table0() -> Vec<Vec<String>>\n\n = _ d:(table_row() ++ _) {\n\n if d.is_empty() {\n\n d\n\n } else {\n\n let len = d[0].len();\n\n d.into_iter().map(|mut x| { x.truncate(len); x }).collect()\n", "file_path": "src/parser.rs", "rank": 12, "score": 15196.362214270746 }, { "content": " }\n\n\n\nrule rules() -> Vec<Rule>\n\n = _ r:(rule_() ** _)? { r.unwrap_or_else(|| vec![]) }\n\n\n\npub(crate) rule scenarios() -> Vec<Scenario>\n\n = _ s:(scenario() ** _)? { s.unwrap_or_else(|| vec![]) }\n\n\n\npub rule feature() -> Feature\n\n = _ language_directive()? nl()*\n\n t:tags() nl()*\n\n pa:position!()\n\n keyword((env.keywords().feature)) \":\" _ n:$([_]+)\n\n d:description()? nl()*\n\n b:background()? nl()*\n\n s:scenarios() nl()*\n\n r:rules() pb:position!()\n\n nl()*\n\n {\n\n Feature::builder()\n", "file_path": "src/parser.rs", "rank": 13, "score": 15196.346052255052 }, { "content": " const RULE_WITH_MULTILINE_DESCRIPTION: &str = \"\n\nFeature: Everything with background inside rule\n\n\n\nRule: Be sure that I didn't started yet\n\n Background:\n\n Given I didn't started yet\n\n And I'm pretty sure about it\n\n\n\n Scenario: Long winded \n\n A long description elaborating on what is happening\n\n Given I just started\n\n\";\n\n\n\n // From Gherkin 6 documentation\n\n const RULE_WITH_RULE_IN_BACKGROUND: &str = \"\n\nFeature: Overdue tasks\n\n Let users know when tasks are overdue, even when using other\n\n features of the app\n\n\n\n Rule: Users are notified about overdue tasks on first use of the day\n", "file_path": "src/parser.rs", "rank": 14, "score": 15196.334350111629 }, { "content": " }\n\n \n\n #[test]\n\n fn multiline_desription() {\n\n let env = GherkinEnv::default();\n\n let feature_result = gherkin_parser::feature(RULE_WITH_MULTILINE_DESCRIPTION, &env);\n\n assert!(feature_result.is_ok(),\n\n \"RULE_WITH_MULTILINE_DESCRIPTION was not parsed correctly!\");\n\n }\n\n}\n", "file_path": "src/parser.rs", "rank": 15, "score": 15196.327045830527 }, { "content": " }\n\n }\n\n\n\npub(crate) rule table() -> Table\n\n = pa:position!() t:table0() pb:position!() {\n\n Table::builder()\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .rows(t)\n\n .build()\n\n }\n\n\n\npub(crate) rule step() -> Step\n\n = pa:position!() k:keyword((env.keywords().given)) __ n:not_nl() pb:position!() _ nl_eof() _\n\n d:docstring()? t:table()?\n\n {\n\n env.set_last_step(StepType::Given);\n\n Step::builder().ty(StepType::Given)\n\n .raw_type(k.to_string())\n\n .value(n.to_string())\n", "file_path": "src/parser.rs", "rank": 16, "score": 15196.172170930116 }, { "content": " but: &[\"Men\"],\n\n};\n\n\n\npub struct GherkinEnv {\n\n keywords: RefCell<Keywords<'static>>,\n\n last_step: RefCell<Option<StepType>>,\n\n last_keyword: RefCell<Option<String>>,\n\n line_offsets: RefCell<Vec<usize>>,\n\n}\n\n\n\nimpl GherkinEnv {\n\n pub fn set_language(&self, language: &str) -> Result<(), &'static str> {\n\n let keywords = match language {\n\n \"formal\" => FORMAL_SPEC_KEYWORDS,\n\n \"sv\" => SV_KEYWORDS,\n\n \"en\" => DEFAULT_KEYWORDS,\n\n _ => return Err(\"Error: requested language not supported\"),\n\n };\n\n\n\n *self.keywords.borrow_mut() = keywords;\n", "file_path": "src/parser.rs", "rank": 17, "score": 15196.121362306085 }, { "content": " .docstring(d)\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .build())\n\n }\n\n None => {\n\n Err(\"given, when or then\")\n\n }\n\n }\n\n }\n\n\n\npub(crate) rule steps() -> Vec<Step>\n\n = s:(step() ** _) {\n\n env.clear_last_step();\n\n s\n\n }\n\n\n\nrule background() -> Background\n\n = _ pa:position!()\n\n keyword((env.keywords().background)) \":\" _ nl_eof()\n", "file_path": "src/parser.rs", "rank": 18, "score": 15195.9464530836 }, { "content": " .tags(t)\n\n .name(n.to_string())\n\n .description(d.flatten())\n\n .background(b)\n\n .scenarios(s)\n\n .rules(r)\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .build()\n\n }\n\n\n\npub rule tag_operation() -> TagOperation = precedence!{\n\n x:@ _ \"and\" _ y:(@) { TagOperation::And(Box::new(x), Box::new(y)) }\n\n x:@ _ \"or\" _ y:(@) { TagOperation::Or(Box::new(x), Box::new(y)) }\n\n \"not\" _ x:(@) { TagOperation::Not(Box::new(x)) }\n\n --\n\n t:tag() { TagOperation::Tag(t) }\n\n \"(\" t:tag_operation() \")\" _ { t }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 19, "score": 15195.759907500005 }, { "content": " match env.last_keyword().as_ref() {\n\n Some(v) => Ok(v.len()),\n\n None => Err(\"no match\")\n\n }\n\n }\n\n\n\npub(crate) rule keyword(list: &[&'static str]) -> &'static str\n\n = len:keyword0(list) [_]*<{len}> {\n\n let kw = env.take_keyword();\n\n list.iter().find(|x| **x == &*kw).unwrap()\n\n }\n\n\n\nrule language_directive() -> ()\n\n = \"# language: \" l:$(['a'..='z']+) _ nl() {?\n\n env.set_language(l)\n\n }\n\n\n\nrule docstring() -> String\n\n = \"\\\"\\\"\\\"\" n:$((!\"\\\"\\\"\\\"\"[_])*) \"\\\"\\\"\\\"\" nl_eof() {\n\n textwrap::dedent(n)\n", "file_path": "src/parser.rs", "rank": 20, "score": 15195.731128353984 }, { "content": " fn take_keyword(&self) -> String {\n\n self.last_keyword.borrow_mut().take().unwrap()\n\n }\n\n\n\n fn set_last_step(&self, ty: StepType) {\n\n *self.last_step.borrow_mut() = Some(ty);\n\n }\n\n\n\n fn clear_last_step(&self) {\n\n *self.last_step.borrow_mut() = None;\n\n }\n\n\n\n fn last_step(&self) -> Option<StepType> {\n\n *self.last_step.borrow()\n\n }\n\n\n\n fn increment_nl(&self, offset: usize) {\n\n self.line_offsets.borrow_mut().push(offset);\n\n }\n\n\n", "file_path": "src/parser.rs", "rank": 21, "score": 15194.525098564987 }, { "content": " .table(t)\n\n .docstring(d)\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .build()\n\n }\n\n / pa:position!() k:keyword((env.keywords().when)) __ n:not_nl() pb:position!() _ nl_eof() _\n\n d:docstring()? t:table()?\n\n {\n\n env.set_last_step(StepType::When);\n\n Step::builder().ty(StepType::When)\n\n .raw_type(k.to_string())\n\n .value(n.to_string())\n\n .table(t)\n\n .docstring(d)\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .build()\n\n }\n\n / pa:position!() k:keyword((env.keywords().then)) __ n:not_nl() pb:position!() _ nl_eof() _\n", "file_path": "src/parser.rs", "rank": 22, "score": 15194.525098564987 }, { "content": "\n\nrule rule_() -> Rule\n\n = _\n\n t:tags()\n\n _\n\n pa:position!()\n\n keyword((env.keywords().rule)) \":\" _ n:not_nl() _ nl_eof()\n\n b:background()? nl()*\n\n s:scenarios()? nl()*\n\n // e:examples()?\n\n pb:position!()\n\n {\n\n Rule::builder()\n\n .name(n.to_string())\n\n .tags(t)\n\n .background(b)\n\n .scenarios(s.unwrap_or_else(|| vec![]))\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .build()\n", "file_path": "src/parser.rs", "rank": 23, "score": 15194.525098564987 }, { "content": " .examples(e)\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .build()\n\n }\n\n / _\n\n t:tags()\n\n _\n\n pa:position!()\n\n keyword((env.keywords().scenario_outline)) \":\" _ n:not_nl() _ nl_eof()\n\n s:steps()?\n\n e:examples()?\n\n pb:position!()\n\n {\n\n Scenario::builder()\n\n .name(n.to_string())\n\n .tags(t)\n\n .steps(s.unwrap_or_else(|| vec![]))\n\n .examples(e)\n\n .span((pa, pb))\n", "file_path": "src/parser.rs", "rank": 24, "score": 15194.525098564987 }, { "content": " .table(tb)\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .build()\n\n }\n\n\n\nrule scenario() -> Scenario\n\n = _\n\n t:tags()\n\n _\n\n pa:position!()\n\n keyword((env.keywords().scenario)) \":\" _ n:$(match_until_starting_word_is_keyword())\n\n s:steps()?\n\n e:examples()?\n\n pb:position!()\n\n {\n\n Scenario::builder()\n\n .name(n.to_string())\n\n .tags(t)\n\n .steps(s.unwrap_or_else(|| vec![]))\n", "file_path": "src/parser.rs", "rank": 25, "score": 15194.525098564987 }, { "content": " self.background,\n\n self.rule,\n\n self.scenario,\n\n self.rule,\n\n self.scenario_outline,\n\n self.examples,\n\n self.given,\n\n self.when,\n\n self.then,\n\n self.and,\n\n self.but,\n\n ]\n\n .iter()\n\n {\n\n v.append(&mut x.to_vec());\n\n }\n\n\n\n v\n\n }\n\n}\n", "file_path": "src/parser.rs", "rank": 26, "score": 15194.525098564987 }, { "content": " = d:(description_line() ** _) {\n\n let d = d.join(\"\\n\");\n\n if d.trim() == \"\" {\n\n None\n\n } else {\n\n Some(d)\n\n }\n\n }\n\n\n\nrule examples() -> Examples\n\n = _\n\n t:tags()\n\n _\n\n pa:position!()\n\n keyword((env.keywords().examples)) \":\" _ nl_eof()\n\n tb:table()\n\n pb:position!()\n\n {\n\n Examples::builder()\n\n .tags(t)\n", "file_path": "src/parser.rs", "rank": 27, "score": 15194.525098564987 }, { "content": " {list.iter().map(|x| x.len()).min().unwrap()},\n\n {list.iter().map(|x| x.len()).max().unwrap()}\n\n >) {?\n\n // println!(\"Input: {} {:?}\", &input, &list);\n\n match list.iter().find(|x| input.starts_with(**x)) {\n\n Some(v) => {\n\n env.set_keyword((*v).to_string());\n\n // println!(\"Found: {}\", &v);\n\n Err(\"success\")\n\n },\n\n None => {\n\n // println!(\"Unfound: {}\", &input);\n\n env.clear_keyword();\n\n Err(\"unknown keyword\")\n\n }\n\n }\n\n }\n\n\n\nrule keyword0(list: &[&'static str]) -> usize\n\n = keyword1(list)? {?\n", "file_path": "src/parser.rs", "rank": 28, "score": 15194.525098564987 }, { "content": "\n\nconst DEFAULT_KEYWORDS: Keywords<'static> = Keywords {\n\n feature: &[\"Feature\"],\n\n background: &[\"Background\"],\n\n rule: &[\"Rule\"],\n\n scenario: &[\"Scenario\", \"Example\"],\n\n scenario_outline: &[\"Scenario Outline\", \"Scenario Template\"],\n\n examples: &[\"Examples\"],\n\n given: &[\"Given\"],\n\n when: &[\"When\"],\n\n then: &[\"Then\"],\n\n and: &[\"*\", \"And\"],\n\n but: &[\"But\"],\n\n};\n\n\n\nconst FORMAL_SPEC_KEYWORDS: Keywords<'static> = Keywords {\n\n feature: &[\"Section\"],\n\n background: &[\"Context\"],\n\n rule: &[\"Rule\"],\n\n scenario: &[\"Proof\", \"Evidence\"],\n", "file_path": "src/parser.rs", "rank": 29, "score": 15194.525098564987 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n fn keywords(&self) -> std::cell::Ref<Keywords<'static>> {\n\n self.keywords.borrow()\n\n }\n\n\n\n fn set_keyword(&self, kw: String) {\n\n *self.last_keyword.borrow_mut() = Some(kw);\n\n }\n\n\n\n fn clear_keyword(&self) {\n\n *self.last_keyword.borrow_mut() = None;\n\n }\n\n\n\n fn last_keyword(&self) -> std::cell::Ref<Option<String>> {\n\n self.last_keyword.borrow()\n\n }\n\n\n", "file_path": "src/parser.rs", "rank": 30, "score": 15194.525098564987 }, { "content": " \\\"\\\"\\\"\n\n When a gubbins is forced into this weird corner\n\n | a | b | c |\n\n | 1 | 2 | 3 |\n\n | 4 | 5 | 6 |\n\n Then a gubbins is proven to be in an airlock\n\n\";\n\n\n\n const RULE_WITH_BACKGROUND: &str = \"\n\nFeature: Everything with background inside rule\n\n\n\nRule: Be sure that I didn't started yet\n\n Background:\n\n Given I didn't started yet\n\n And I'm pretty sure about it\n\n\n\n Scenario: Nothing\n\n Given I just started\n\n\";\n\n\n", "file_path": "src/parser.rs", "rank": 31, "score": 15194.525098564987 }, { "content": " scenario_outline: &[\"Demonstration\"],\n\n examples: &[\"Examples\"],\n\n given: &[\"Given\"],\n\n when: &[\"When\"],\n\n then: &[\"Then\"],\n\n and: &[\"*\", \"And\"],\n\n but: &[\"But\"],\n\n};\n\n\n\nconst SV_KEYWORDS: Keywords<'static> = Keywords {\n\n feature: &[\"Egenskap\"],\n\n background: &[\"Bakgrund\"],\n\n rule: &[\"Regel\"],\n\n scenario: &[\"Scenario\", \"Exempel\"],\n\n scenario_outline: &[\"Abstrakt Scenario\"],\n\n examples: &[\"Exempel\"],\n\n given: &[\"Givet\"],\n\n when: &[\"När\"],\n\n then: &[\"Så\"],\n\n and: &[\"*\", \"Och\"],\n", "file_path": "src/parser.rs", "rank": 32, "score": 15194.525098564987 }, { "content": " d:docstring()? t:table()?\n\n {\n\n env.set_last_step(StepType::Then);\n\n Step::builder().ty(StepType::Then)\n\n .raw_type(k.to_string())\n\n .value(n.to_string())\n\n .table(t)\n\n .docstring(d)\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .build()\n\n }\n\n / pa:position!() k:keyword((env.keywords().and)) __ n:not_nl() pb:position!() _ nl_eof() _\n\n d:docstring()? t:table()?\n\n {?\n\n match env.last_step() {\n\n Some(v) => {\n\n Ok(Step::builder().ty(v)\n\n .raw_type(k.to_string())\n\n .value(n.to_string())\n", "file_path": "src/parser.rs", "rank": 33, "score": 15194.525098564987 }, { "content": " s:steps()?\n\n pb:position!()\n\n {\n\n Background::builder()\n\n .steps(s.unwrap_or_else(|| vec![]))\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .build()\n\n }\n\n\n\nrule any_directive() -> &'static str\n\n = k:keyword((&*env.keywords().all())) {\n\n // println!(\"Found directive: {}\", &k);\n\n k\n\n }\n\n\n\nrule description_line() -> &'input str\n\n = _ !\"@\" !any_directive() _ n:not_nl() nl_eof() { n }\n\n\n\nrule description() -> Option<String>\n", "file_path": "src/parser.rs", "rank": 34, "score": 15194.525098564987 }, { "content": " .table(t)\n\n .docstring(d)\n\n .span((pa, pb))\n\n .position(env.position(pa))\n\n .build())\n\n }\n\n None => {\n\n Err(\"given, when or then\")\n\n }\n\n }\n\n }\n\n / pa:position!() k:keyword((env.keywords().but)) __ n:not_nl() pb:position!() _ nl_eof() _\n\n d:docstring()? t:table()?\n\n {?\n\n match env.last_step() {\n\n Some(v) => {\n\n Ok(Step::builder().ty(v)\n\n .raw_type(k.to_string())\n\n .value(n.to_string())\n\n .table(t)\n", "file_path": "src/parser.rs", "rank": 35, "score": 15194.525098564987 }, { "content": " fn position(&self, offset: usize) -> (usize, usize) {\n\n let line_offsets = self.line_offsets.borrow();\n\n let index = line_offsets.iter().position(|x| x > &offset);\n\n\n\n let line = index.unwrap_or(0);\n\n let col = index.map(|i| offset - line_offsets[i - 1]).unwrap_or(offset) + 1;\n\n\n\n (line, col)\n\n }\n\n}\n\n\n\nimpl Default for GherkinEnv {\n\n fn default() -> Self {\n\n GherkinEnv {\n\n keywords: RefCell::new(DEFAULT_KEYWORDS),\n\n last_step: RefCell::new(None),\n\n last_keyword: RefCell::new(None),\n\n line_offsets: RefCell::new(vec![0]),\n\n }\n\n }\n", "file_path": "src/parser.rs", "rank": 36, "score": 15194.525098564987 }, { "content": "struct Keywords<'a> {\n\n feature: &'a [&'a str],\n\n background: &'a [&'a str],\n\n rule: &'a [&'a str],\n\n scenario: &'a [&'a str],\n\n scenario_outline: &'a [&'a str],\n\n examples: &'a [&'a str],\n\n given: &'a [&'a str],\n\n when: &'a [&'a str],\n\n then: &'a [&'a str],\n\n and: &'a [&'a str],\n\n but: &'a [&'a str],\n\n}\n\n\n\nimpl<'a> Keywords<'a> {\n\n pub fn all(&self) -> Vec<&'a str> {\n\n let mut v = vec![];\n\n\n\n for x in [\n\n self.feature,\n", "file_path": "src/parser.rs", "rank": 37, "score": 13880.762666461987 }, { "content": "extern crate cucumber;\n\n\n\nuse async_trait::async_trait;\n\nuse std::convert::Infallible;\n\n\n\npub struct MyWorld {}\n\n\n\n#[async_trait(?Send)]\n\nimpl cucumber::World for MyWorld {\n\n type Error = Infallible;\n\n\n\n async fn new() -> Result<Self, Infallible> {\n\n Ok(MyWorld {})\n\n }\n\n}\n\n\n\nmod t {\n\n use cucumber::Steps;\n\n\n\n pub fn steps() -> Steps<crate::MyWorld> {\n\n let builder: Steps<crate::MyWorld> = Steps::new();\n\n builder\n\n }\n\n}\n\n\n", "file_path": "tests/cucumber.rs", "rank": 39, "score": 5.5497302077034565 } ]